2022-02-11 09:07:11 +11:00
|
|
|
/* SPDX-License-Identifier: GPL-2.0-or-later
|
|
|
|
|
* Copyright 2007 Blender Foundation. All rights reserved. */
|
2011-09-05 21:01:50 +00:00
|
|
|
|
2019-02-18 08:08:12 +11:00
|
|
|
/** \file
|
|
|
|
|
* \ingroup nodes
|
2011-09-05 21:01:50 +00:00
|
|
|
*/
|
|
|
|
|
|
2022-01-04 11:41:10 -05:00
|
|
|
#include <cstring>
|
2011-09-05 21:01:50 +00:00
|
|
|
|
2019-02-27 12:34:56 +11:00
|
|
|
#include "DNA_light_types.h"
|
2020-03-19 09:33:03 +01:00
|
|
|
#include "DNA_linestyle_types.h"
|
2011-09-05 21:01:50 +00:00
|
|
|
#include "DNA_material_types.h"
|
|
|
|
|
#include "DNA_node_types.h"
|
2011-11-07 22:14:48 +00:00
|
|
|
#include "DNA_scene_types.h"
|
2013-03-18 16:34:57 +00:00
|
|
|
#include "DNA_space_types.h"
|
2017-10-16 17:15:03 -02:00
|
|
|
#include "DNA_workspace_types.h"
|
2020-03-19 09:33:03 +01:00
|
|
|
#include "DNA_world_types.h"
|
2011-09-05 21:01:50 +00:00
|
|
|
|
2022-04-17 13:30:30 -05:00
|
|
|
#include "BLI_array.hh"
|
2019-09-23 11:34:22 +02:00
|
|
|
#include "BLI_linklist.h"
|
2011-09-05 21:01:50 +00:00
|
|
|
#include "BLI_listbase.h"
|
|
|
|
|
#include "BLI_threads.h"
|
|
|
|
|
#include "BLI_utildefines.h"
|
2022-04-14 18:47:58 +02:00
|
|
|
#include "BLI_vector.hh"
|
2011-09-05 21:01:50 +00:00
|
|
|
|
2015-08-16 17:32:01 +10:00
|
|
|
#include "BLT_translation.h"
|
2011-11-09 15:00:11 +00:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
#include "BKE_context.h"
|
2020-03-19 09:33:03 +01:00
|
|
|
#include "BKE_lib_id.h"
|
2014-07-16 13:53:00 +09:00
|
|
|
#include "BKE_linestyle.h"
|
2011-09-05 21:01:50 +00:00
|
|
|
#include "BKE_node.h"
|
2021-12-21 15:18:56 +01:00
|
|
|
#include "BKE_node_tree_update.h"
|
2011-11-07 22:14:48 +00:00
|
|
|
#include "BKE_scene.h"
|
2011-09-05 21:01:50 +00:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
#include "RNA_access.h"
|
2022-03-14 16:54:46 +01:00
|
|
|
#include "RNA_prototypes.h"
|
2013-03-18 16:34:57 +00:00
|
|
|
|
2011-09-05 21:01:50 +00:00
|
|
|
#include "GPU_material.h"
|
|
|
|
|
|
2020-11-09 15:42:38 +01:00
|
|
|
#include "RE_texture.h"
|
2011-09-05 21:01:50 +00:00
|
|
|
|
2021-12-24 22:47:58 -05:00
|
|
|
#include "UI_resources.h"
|
|
|
|
|
|
2016-06-17 12:14:36 +02:00
|
|
|
#include "NOD_common.h"
|
|
|
|
|
|
2012-08-06 18:49:28 +00:00
|
|
|
#include "node_common.h"
|
2011-09-05 21:01:50 +00:00
|
|
|
#include "node_exec.h"
|
2022-01-04 23:25:16 -05:00
|
|
|
#include "node_shader_util.hh"
|
2020-03-19 09:33:03 +01:00
|
|
|
#include "node_util.h"
|
2011-09-05 21:01:50 +00:00
|
|
|
|
2022-04-17 13:30:30 -05:00
|
|
|
using blender::Array;
|
2022-04-14 18:47:58 +02:00
|
|
|
using blender::Vector;
|
2018-11-17 17:08:21 +01:00
|
|
|
|
2018-07-02 11:47:00 +02:00
|
|
|
static bool shader_tree_poll(const bContext *C, bNodeTreeType *UNUSED(treetype))
|
2011-09-05 21:01:50 +00:00
|
|
|
{
|
2013-03-18 16:34:57 +00:00
|
|
|
Scene *scene = CTX_data_scene(C);
|
2018-04-17 13:35:05 +02:00
|
|
|
const char *engine_id = scene->r.engine;
|
2017-10-16 17:15:03 -02:00
|
|
|
|
2019-04-29 20:12:09 +10:00
|
|
|
/* Allow empty engine string too,
|
|
|
|
|
* this is from older versions that didn't have registerable engines yet. */
|
2017-10-16 17:15:03 -02:00
|
|
|
return (engine_id[0] == '\0' || STREQ(engine_id, RE_engine_id_CYCLES) ||
|
2018-04-17 13:35:05 +02:00
|
|
|
!BKE_scene_use_shading_nodes_custom(scene));
|
2013-03-18 16:34:57 +00:00
|
|
|
}
|
2011-11-02 18:55:32 +00:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
static void shader_get_from_context(const bContext *C,
|
|
|
|
|
bNodeTreeType *UNUSED(treetype),
|
|
|
|
|
bNodeTree **r_ntree,
|
|
|
|
|
ID **r_id,
|
|
|
|
|
ID **r_from)
|
|
|
|
|
{
|
|
|
|
|
SpaceNode *snode = CTX_wm_space_node(C);
|
|
|
|
|
Scene *scene = CTX_data_scene(C);
|
2017-11-22 10:52:39 -02:00
|
|
|
ViewLayer *view_layer = CTX_data_view_layer(C);
|
|
|
|
|
Object *ob = OBACT(view_layer);
|
2019-04-17 06:17:24 +02:00
|
|
|
|
Remove Blender Internal and legacy viewport from Blender 2.8.
Brecht authored this commit, but he gave me the honours to actually
do it. Here it goes; Blender Internal. Bye bye, you did great!
* Point density, voxel data, ocean, environment map textures were removed,
as these only worked within BI rendering. Note that the ocean modifier
and the Cycles point density shader node continue to work.
* Dynamic paint using material shading was removed, as this only worked
with BI. If we ever wanted to support this again probably it should go
through the baking API.
* GPU shader export through the Python API was removed. This only worked
for the old BI GLSL shaders, which no longer exists. Doing something
similar for Eevee would be significantly more complicated because it
uses a lot of multiplass rendering and logic outside the shader, it's
probably impractical.
* Collada material import / export code is mostly gone, as it only worked
for BI materials. We need to add Cycles / Eevee material support at some
point.
* The mesh noise operator was removed since it only worked with BI
material texture slots. A displacement modifier can be used instead.
* The delete texture paint slot operator was removed since it only worked
for BI material texture slots. Could be added back with node support.
* Not all legacy viewport features are supported in the new viewport, but
their code was removed. If we need to bring anything back we can look at
older git revisions.
* There is some legacy viewport code that I could not remove yet, and some
that I probably missed.
* Shader node execution code was left mostly intact, even though it is not
used anywhere now. We may eventually use this to replace the texture
nodes with Cycles / Eevee shader nodes.
* The Cycles Bake panel now includes settings for baking multires normal
and displacement maps. The underlying code needs to be merged properly,
and we plan to add back support for multires AO baking and add support
to Cycles baking for features like vertex color, displacement, and other
missing baking features.
* This commit removes DNA and the Python API for BI material, lamp, world
and scene settings. This breaks a lot of addons.
* There is more DNA that can be removed or renamed, where Cycles or Eevee
are reusing some old BI properties but the names are not really correct
anymore.
* Texture slots for materials, lamps and world were removed. They remain
for brushes, particles and freestyle linestyles.
* 'BLENDER_RENDER' remains in the COMPAT_ENGINES of UI panels. Cycles and
other renderers use this to find all panels to show, minus a few panels
that they have their own replacement for.
2018-04-19 17:34:44 +02:00
|
|
|
if (snode->shaderfrom == SNODE_SHADER_OBJECT) {
|
2013-03-18 16:34:57 +00:00
|
|
|
if (ob) {
|
2013-03-19 10:42:33 +00:00
|
|
|
*r_from = &ob->id;
|
2013-03-18 16:34:57 +00:00
|
|
|
if (ob->type == OB_LAMP) {
|
2022-01-04 11:41:10 -05:00
|
|
|
*r_id = static_cast<ID *>(ob->data);
|
2019-02-27 10:46:48 +11:00
|
|
|
*r_ntree = ((Light *)ob->data)->nodetree;
|
2013-03-18 16:34:57 +00:00
|
|
|
}
|
|
|
|
|
else {
|
2020-02-05 11:23:58 +01:00
|
|
|
Material *ma = BKE_object_material_get(ob, ob->actcol);
|
2013-03-18 16:34:57 +00:00
|
|
|
if (ma) {
|
|
|
|
|
*r_id = &ma->id;
|
|
|
|
|
*r_ntree = ma->nodetree;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2014-07-11 16:51:04 +09:00
|
|
|
#ifdef WITH_FREESTYLE
|
|
|
|
|
else if (snode->shaderfrom == SNODE_SHADER_LINESTYLE) {
|
2018-04-24 15:20:17 +02:00
|
|
|
FreestyleLineStyle *linestyle = BKE_linestyle_active_from_view_layer(view_layer);
|
2014-07-11 16:51:04 +09:00
|
|
|
if (linestyle) {
|
2022-01-04 11:41:10 -05:00
|
|
|
*r_from = nullptr;
|
2014-07-11 16:51:04 +09:00
|
|
|
*r_id = &linestyle->id;
|
|
|
|
|
*r_ntree = linestyle->nodetree;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2013-03-18 16:34:57 +00:00
|
|
|
else { /* SNODE_SHADER_WORLD */
|
|
|
|
|
if (scene->world) {
|
2022-01-04 11:41:10 -05:00
|
|
|
*r_from = nullptr;
|
2013-03-18 16:34:57 +00:00
|
|
|
*r_id = &scene->world->id;
|
|
|
|
|
*r_ntree = scene->world->nodetree;
|
|
|
|
|
}
|
|
|
|
|
}
|
2011-09-05 21:01:50 +00:00
|
|
|
}
|
|
|
|
|
|
Remove Blender Internal and legacy viewport from Blender 2.8.
Brecht authored this commit, but he gave me the honours to actually
do it. Here it goes; Blender Internal. Bye bye, you did great!
* Point density, voxel data, ocean, environment map textures were removed,
as these only worked within BI rendering. Note that the ocean modifier
and the Cycles point density shader node continue to work.
* Dynamic paint using material shading was removed, as this only worked
with BI. If we ever wanted to support this again probably it should go
through the baking API.
* GPU shader export through the Python API was removed. This only worked
for the old BI GLSL shaders, which no longer exists. Doing something
similar for Eevee would be significantly more complicated because it
uses a lot of multiplass rendering and logic outside the shader, it's
probably impractical.
* Collada material import / export code is mostly gone, as it only worked
for BI materials. We need to add Cycles / Eevee material support at some
point.
* The mesh noise operator was removed since it only worked with BI
material texture slots. A displacement modifier can be used instead.
* The delete texture paint slot operator was removed since it only worked
for BI material texture slots. Could be added back with node support.
* Not all legacy viewport features are supported in the new viewport, but
their code was removed. If we need to bring anything back we can look at
older git revisions.
* There is some legacy viewport code that I could not remove yet, and some
that I probably missed.
* Shader node execution code was left mostly intact, even though it is not
used anywhere now. We may eventually use this to replace the texture
nodes with Cycles / Eevee shader nodes.
* The Cycles Bake panel now includes settings for baking multires normal
and displacement maps. The underlying code needs to be merged properly,
and we plan to add back support for multires AO baking and add support
to Cycles baking for features like vertex color, displacement, and other
missing baking features.
* This commit removes DNA and the Python API for BI material, lamp, world
and scene settings. This breaks a lot of addons.
* There is more DNA that can be removed or renamed, where Cycles or Eevee
are reusing some old BI properties but the names are not really correct
anymore.
* Texture slots for materials, lamps and world were removed. They remain
for brushes, particles and freestyle linestyles.
* 'BLENDER_RENDER' remains in the COMPAT_ENGINES of UI panels. Cycles and
other renderers use this to find all panels to show, minus a few panels
that they have their own replacement for.
2018-04-19 17:34:44 +02:00
|
|
|
static void foreach_nodeclass(Scene *UNUSED(scene), void *calldata, bNodeClassCallback func)
|
2011-11-07 22:14:48 +00:00
|
|
|
{
|
2012-05-16 15:01:46 +00:00
|
|
|
func(calldata, NODE_CLASS_INPUT, N_("Input"));
|
|
|
|
|
func(calldata, NODE_CLASS_OUTPUT, N_("Output"));
|
Remove Blender Internal and legacy viewport from Blender 2.8.
Brecht authored this commit, but he gave me the honours to actually
do it. Here it goes; Blender Internal. Bye bye, you did great!
* Point density, voxel data, ocean, environment map textures were removed,
as these only worked within BI rendering. Note that the ocean modifier
and the Cycles point density shader node continue to work.
* Dynamic paint using material shading was removed, as this only worked
with BI. If we ever wanted to support this again probably it should go
through the baking API.
* GPU shader export through the Python API was removed. This only worked
for the old BI GLSL shaders, which no longer exists. Doing something
similar for Eevee would be significantly more complicated because it
uses a lot of multiplass rendering and logic outside the shader, it's
probably impractical.
* Collada material import / export code is mostly gone, as it only worked
for BI materials. We need to add Cycles / Eevee material support at some
point.
* The mesh noise operator was removed since it only worked with BI
material texture slots. A displacement modifier can be used instead.
* The delete texture paint slot operator was removed since it only worked
for BI material texture slots. Could be added back with node support.
* Not all legacy viewport features are supported in the new viewport, but
their code was removed. If we need to bring anything back we can look at
older git revisions.
* There is some legacy viewport code that I could not remove yet, and some
that I probably missed.
* Shader node execution code was left mostly intact, even though it is not
used anywhere now. We may eventually use this to replace the texture
nodes with Cycles / Eevee shader nodes.
* The Cycles Bake panel now includes settings for baking multires normal
and displacement maps. The underlying code needs to be merged properly,
and we plan to add back support for multires AO baking and add support
to Cycles baking for features like vertex color, displacement, and other
missing baking features.
* This commit removes DNA and the Python API for BI material, lamp, world
and scene settings. This breaks a lot of addons.
* There is more DNA that can be removed or renamed, where Cycles or Eevee
are reusing some old BI properties but the names are not really correct
anymore.
* Texture slots for materials, lamps and world were removed. They remain
for brushes, particles and freestyle linestyles.
* 'BLENDER_RENDER' remains in the COMPAT_ENGINES of UI panels. Cycles and
other renderers use this to find all panels to show, minus a few panels
that they have their own replacement for.
2018-04-19 17:34:44 +02:00
|
|
|
func(calldata, NODE_CLASS_SHADER, N_("Shader"));
|
|
|
|
|
func(calldata, NODE_CLASS_TEXTURE, N_("Texture"));
|
2012-05-16 15:01:46 +00:00
|
|
|
func(calldata, NODE_CLASS_OP_COLOR, N_("Color"));
|
|
|
|
|
func(calldata, NODE_CLASS_OP_VECTOR, N_("Vector"));
|
2021-08-23 16:23:58 +02:00
|
|
|
func(calldata, NODE_CLASS_CONVERTER, N_("Converter"));
|
2012-11-03 14:32:26 +00:00
|
|
|
func(calldata, NODE_CLASS_SCRIPT, N_("Script"));
|
2012-05-16 15:01:46 +00:00
|
|
|
func(calldata, NODE_CLASS_GROUP, N_("Group"));
|
2013-03-18 16:34:57 +00:00
|
|
|
func(calldata, NODE_CLASS_INTERFACE, N_("Interface"));
|
2012-05-16 15:01:46 +00:00
|
|
|
func(calldata, NODE_CLASS_LAYOUT, N_("Layout"));
|
2011-11-07 22:14:48 +00:00
|
|
|
}
|
|
|
|
|
|
2012-02-27 17:38:16 +00:00
|
|
|
static void localize(bNodeTree *localtree, bNodeTree *UNUSED(ntree))
|
|
|
|
|
{
|
2012-06-01 12:38:03 +00:00
|
|
|
/* replace muted nodes and reroute nodes by internal links */
|
2021-12-07 12:01:24 +01:00
|
|
|
LISTBASE_FOREACH_MUTABLE (bNode *, node, &localtree->nodes) {
|
2012-06-01 12:38:03 +00:00
|
|
|
if (node->flag & NODE_MUTED || node->type == NODE_REROUTE) {
|
2012-02-27 17:38:16 +00:00
|
|
|
nodeInternalRelink(localtree, node);
|
2019-03-16 18:54:00 +01:00
|
|
|
ntreeFreeLocalNode(localtree, node);
|
2012-02-27 17:38:16 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2011-10-19 17:08:35 +00:00
|
|
|
static void update(bNodeTree *ntree)
|
|
|
|
|
{
|
|
|
|
|
ntreeSetOutput(ntree);
|
2018-06-08 08:07:48 +02:00
|
|
|
|
2012-08-06 18:49:28 +00:00
|
|
|
ntree_update_reroute_nodes(ntree);
|
2011-10-19 17:08:35 +00:00
|
|
|
}
|
|
|
|
|
|
2021-12-15 09:51:57 -06:00
|
|
|
static bool shader_validate_link(eNodeSocketDatatype from, eNodeSocketDatatype to)
|
2019-06-22 15:36:29 +02:00
|
|
|
{
|
|
|
|
|
/* Can't connect shader into other socket types, other way around is fine
|
|
|
|
|
* since it will be interpreted as emission. */
|
2021-12-15 09:51:57 -06:00
|
|
|
if (from == SOCK_SHADER) {
|
|
|
|
|
return to == SOCK_SHADER;
|
2019-06-22 15:36:29 +02:00
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2021-07-06 18:36:11 +01:00
|
|
|
static bool shader_node_tree_socket_type_valid(bNodeTreeType *UNUSED(ntreetype),
|
|
|
|
|
bNodeSocketType *socket_type)
|
2021-04-29 23:36:46 -05:00
|
|
|
{
|
2021-07-06 18:36:11 +01:00
|
|
|
return nodeIsStaticSocketType(socket_type) &&
|
|
|
|
|
ELEM(socket_type->type, SOCK_FLOAT, SOCK_VECTOR, SOCK_RGBA, SOCK_SHADER);
|
2021-04-29 23:36:46 -05:00
|
|
|
}
|
|
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
bNodeTreeType *ntreeType_Shader;
|
|
|
|
|
|
2022-01-04 11:41:10 -05:00
|
|
|
void register_node_tree_type_sh()
|
2013-03-18 16:34:57 +00:00
|
|
|
{
|
2022-01-04 11:41:10 -05:00
|
|
|
bNodeTreeType *tt = ntreeType_Shader = MEM_cnew<bNodeTreeType>("shader node tree type");
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
tt->type = NTREE_SHADER;
|
|
|
|
|
strcpy(tt->idname, "ShaderNodeTree");
|
2019-02-13 16:18:19 +01:00
|
|
|
strcpy(tt->ui_name, N_("Shader Editor"));
|
2021-12-24 22:47:58 -05:00
|
|
|
tt->ui_icon = ICON_NODE_MATERIAL;
|
2019-02-13 16:18:19 +01:00
|
|
|
strcpy(tt->ui_description, N_("Shader nodes"));
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
tt->foreach_nodeclass = foreach_nodeclass;
|
|
|
|
|
tt->localize = localize;
|
|
|
|
|
tt->update = update;
|
|
|
|
|
tt->poll = shader_tree_poll;
|
|
|
|
|
tt->get_from_context = shader_get_from_context;
|
2019-06-22 15:36:29 +02:00
|
|
|
tt->validate_link = shader_validate_link;
|
2021-04-29 23:36:46 -05:00
|
|
|
tt->valid_socket_type = shader_node_tree_socket_type_valid;
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2020-04-03 18:24:08 +02:00
|
|
|
tt->rna_ext.srna = &RNA_ShaderNodeTree;
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
ntreeTypeAdd(tt);
|
|
|
|
|
}
|
2011-09-05 21:01:50 +00:00
|
|
|
|
|
|
|
|
/* GPU material from shader nodes */
|
|
|
|
|
|
2018-07-05 12:44:15 +02:00
|
|
|
bNode *ntreeShaderOutputNode(bNodeTree *ntree, int target)
|
2016-05-20 14:16:54 +02:00
|
|
|
{
|
|
|
|
|
/* Make sure we only have single node tagged as output. */
|
|
|
|
|
ntreeSetOutput(ntree);
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2018-07-05 12:44:15 +02:00
|
|
|
/* Find output node that matches type and target. If there are
|
|
|
|
|
* multiple, we prefer exact target match and active nodes. */
|
2022-01-04 11:41:10 -05:00
|
|
|
bNode *output_node = nullptr;
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2020-04-03 19:15:01 +02:00
|
|
|
LISTBASE_FOREACH (bNode *, node, &ntree->nodes) {
|
2018-07-05 12:44:15 +02:00
|
|
|
if (!ELEM(node->type, SH_NODE_OUTPUT_MATERIAL, SH_NODE_OUTPUT_WORLD, SH_NODE_OUTPUT_LIGHT)) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2018-07-05 12:44:15 +02:00
|
|
|
if (node->custom1 == SHD_OUTPUT_ALL) {
|
2022-01-04 11:41:10 -05:00
|
|
|
if (output_node == nullptr) {
|
2018-07-05 12:44:15 +02:00
|
|
|
output_node = node;
|
|
|
|
|
}
|
|
|
|
|
else if (output_node->custom1 == SHD_OUTPUT_ALL) {
|
|
|
|
|
if ((node->flag & NODE_DO_OUTPUT) && !(output_node->flag & NODE_DO_OUTPUT)) {
|
|
|
|
|
output_node = node;
|
2019-04-17 06:17:24 +02:00
|
|
|
}
|
2018-07-05 12:44:15 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (node->custom1 == target) {
|
2022-01-04 11:41:10 -05:00
|
|
|
if (output_node == nullptr) {
|
2018-07-05 12:44:15 +02:00
|
|
|
output_node = node;
|
|
|
|
|
}
|
2018-07-10 09:29:30 +02:00
|
|
|
else if (output_node->custom1 == SHD_OUTPUT_ALL) {
|
2018-07-05 12:44:15 +02:00
|
|
|
output_node = node;
|
|
|
|
|
}
|
|
|
|
|
else if ((node->flag & NODE_DO_OUTPUT) && !(output_node->flag & NODE_DO_OUTPUT)) {
|
|
|
|
|
output_node = node;
|
2019-04-17 06:17:24 +02:00
|
|
|
}
|
2018-07-05 12:44:15 +02:00
|
|
|
}
|
2016-05-20 14:16:54 +02:00
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2018-07-05 12:44:15 +02:00
|
|
|
return output_node;
|
2016-05-20 14:16:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Find socket with a specified identifier. */
|
|
|
|
|
static bNodeSocket *ntree_shader_node_find_socket(ListBase *sockets, const char *identifier)
|
|
|
|
|
{
|
2022-01-04 11:41:10 -05:00
|
|
|
LISTBASE_FOREACH (bNodeSocket *, sock, sockets) {
|
2016-05-20 14:16:54 +02:00
|
|
|
if (STREQ(sock->identifier, identifier)) {
|
|
|
|
|
return sock;
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-01-04 11:41:10 -05:00
|
|
|
return nullptr;
|
2016-05-20 14:16:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Find input socket with a specified identifier. */
|
|
|
|
|
static bNodeSocket *ntree_shader_node_find_input(bNode *node, const char *identifier)
|
|
|
|
|
{
|
|
|
|
|
return ntree_shader_node_find_socket(&node->inputs, identifier);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Find output socket with a specified identifier. */
|
|
|
|
|
static bNodeSocket *ntree_shader_node_find_output(bNode *node, const char *identifier)
|
|
|
|
|
{
|
|
|
|
|
return ntree_shader_node_find_socket(&node->outputs, identifier);
|
|
|
|
|
}
|
|
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
/* Find input socket at a specific position. */
|
|
|
|
|
static bNodeSocket *ntree_shader_node_input_get(bNode *node, int n)
|
|
|
|
|
{
|
|
|
|
|
return reinterpret_cast<bNodeSocket *>(BLI_findlink(&node->inputs, n));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Find output socket at a specific position. */
|
|
|
|
|
static bNodeSocket *ntree_shader_node_output_get(bNode *node, int n)
|
|
|
|
|
{
|
|
|
|
|
return reinterpret_cast<bNodeSocket *>(BLI_findlink(&node->outputs, n));
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-23 23:50:24 +01:00
|
|
|
/* Return true on success. */
|
|
|
|
|
static bool ntree_shader_expand_socket_default(bNodeTree *localtree,
|
|
|
|
|
bNode *node,
|
|
|
|
|
bNodeSocket *socket)
|
|
|
|
|
{
|
|
|
|
|
bNode *value_node;
|
|
|
|
|
bNodeSocket *value_socket;
|
|
|
|
|
bNodeSocketValueVector *src_vector;
|
|
|
|
|
bNodeSocketValueRGBA *src_rgba, *dst_rgba;
|
|
|
|
|
bNodeSocketValueFloat *src_float, *dst_float;
|
|
|
|
|
bNodeSocketValueInt *src_int;
|
|
|
|
|
|
|
|
|
|
switch (socket->type) {
|
|
|
|
|
case SOCK_VECTOR:
|
2022-01-04 11:41:10 -05:00
|
|
|
value_node = nodeAddStaticNode(nullptr, localtree, SH_NODE_RGB);
|
2020-03-23 23:50:24 +01:00
|
|
|
value_socket = ntree_shader_node_find_output(value_node, "Color");
|
2022-01-04 11:41:10 -05:00
|
|
|
BLI_assert(value_socket != nullptr);
|
|
|
|
|
src_vector = static_cast<bNodeSocketValueVector *>(socket->default_value);
|
|
|
|
|
dst_rgba = static_cast<bNodeSocketValueRGBA *>(value_socket->default_value);
|
2020-03-23 23:50:24 +01:00
|
|
|
copy_v3_v3(dst_rgba->value, src_vector->value);
|
|
|
|
|
dst_rgba->value[3] = 1.0f; /* should never be read */
|
|
|
|
|
break;
|
|
|
|
|
case SOCK_RGBA:
|
2022-01-04 11:41:10 -05:00
|
|
|
value_node = nodeAddStaticNode(nullptr, localtree, SH_NODE_RGB);
|
2020-03-23 23:50:24 +01:00
|
|
|
value_socket = ntree_shader_node_find_output(value_node, "Color");
|
2022-01-04 11:41:10 -05:00
|
|
|
BLI_assert(value_socket != nullptr);
|
|
|
|
|
src_rgba = static_cast<bNodeSocketValueRGBA *>(socket->default_value);
|
|
|
|
|
dst_rgba = static_cast<bNodeSocketValueRGBA *>(value_socket->default_value);
|
2020-03-23 23:50:24 +01:00
|
|
|
copy_v4_v4(dst_rgba->value, src_rgba->value);
|
|
|
|
|
break;
|
|
|
|
|
case SOCK_INT:
|
|
|
|
|
/* HACK: Support as float. */
|
2022-01-04 11:41:10 -05:00
|
|
|
value_node = nodeAddStaticNode(nullptr, localtree, SH_NODE_VALUE);
|
2020-03-23 23:50:24 +01:00
|
|
|
value_socket = ntree_shader_node_find_output(value_node, "Value");
|
2022-01-04 11:41:10 -05:00
|
|
|
BLI_assert(value_socket != nullptr);
|
|
|
|
|
src_int = static_cast<bNodeSocketValueInt *>(socket->default_value);
|
|
|
|
|
dst_float = static_cast<bNodeSocketValueFloat *>(value_socket->default_value);
|
2020-03-23 23:50:24 +01:00
|
|
|
dst_float->value = (float)(src_int->value);
|
|
|
|
|
break;
|
|
|
|
|
case SOCK_FLOAT:
|
2022-01-04 11:41:10 -05:00
|
|
|
value_node = nodeAddStaticNode(nullptr, localtree, SH_NODE_VALUE);
|
2020-03-23 23:50:24 +01:00
|
|
|
value_socket = ntree_shader_node_find_output(value_node, "Value");
|
2022-01-04 11:41:10 -05:00
|
|
|
BLI_assert(value_socket != nullptr);
|
|
|
|
|
src_float = static_cast<bNodeSocketValueFloat *>(socket->default_value);
|
|
|
|
|
dst_float = static_cast<bNodeSocketValueFloat *>(value_socket->default_value);
|
2020-03-23 23:50:24 +01:00
|
|
|
dst_float->value = src_float->value;
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
nodeAddLink(localtree, value_node, value_socket, node, socket);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2018-11-01 15:06:23 +01:00
|
|
|
static void ntree_shader_unlink_hidden_value_sockets(bNode *group_node, bNodeSocket *isock)
|
|
|
|
|
{
|
|
|
|
|
bNodeTree *group_ntree = (bNodeTree *)group_node->id;
|
|
|
|
|
bool removed_link = false;
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2022-01-04 11:41:10 -05:00
|
|
|
LISTBASE_FOREACH (bNode *, node, &group_ntree->nodes) {
|
|
|
|
|
const bool is_group = ELEM(node->type, NODE_GROUP, NODE_CUSTOM_GROUP) && (node->id != nullptr);
|
2020-08-01 19:56:53 +03:00
|
|
|
|
2020-04-03 19:15:01 +02:00
|
|
|
LISTBASE_FOREACH (bNodeSocket *, sock, &node->inputs) {
|
2020-08-01 19:56:53 +03:00
|
|
|
if (!is_group && (sock->flag & SOCK_HIDE_VALUE) == 0) {
|
2018-11-01 15:06:23 +01:00
|
|
|
continue;
|
2019-04-22 13:31:31 +10:00
|
|
|
}
|
2018-11-01 15:06:23 +01:00
|
|
|
/* If socket is linked to a group input node and sockets id match. */
|
|
|
|
|
if (sock && sock->link && sock->link->fromnode->type == NODE_GROUP_INPUT) {
|
|
|
|
|
if (STREQ(isock->identifier, sock->link->fromsock->identifier)) {
|
2020-08-01 19:56:53 +03:00
|
|
|
if (is_group) {
|
|
|
|
|
/* Recursively unlink sockets within the nested group. */
|
|
|
|
|
ntree_shader_unlink_hidden_value_sockets(node, sock);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
nodeRemLink(group_ntree, sock->link);
|
|
|
|
|
removed_link = true;
|
|
|
|
|
}
|
2018-11-01 15:06:23 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2018-11-01 15:06:23 +01:00
|
|
|
if (removed_link) {
|
2022-01-04 11:41:10 -05:00
|
|
|
BKE_ntree_update_main_tree(G.main, group_ntree, nullptr);
|
2018-11-01 15:06:23 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Node groups once expanded looses their input sockets values.
|
|
|
|
|
* To fix this, link value/rgba nodes into the sockets and copy the group sockets values. */
|
|
|
|
|
static void ntree_shader_groups_expand_inputs(bNodeTree *localtree)
|
|
|
|
|
{
|
|
|
|
|
bool link_added = false;
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2020-03-23 23:50:24 +01:00
|
|
|
LISTBASE_FOREACH (bNode *, node, &localtree->nodes) {
|
2022-01-04 11:41:10 -05:00
|
|
|
const bool is_group = ELEM(node->type, NODE_GROUP, NODE_CUSTOM_GROUP) && (node->id != nullptr);
|
2020-03-23 23:50:24 +01:00
|
|
|
const bool is_group_output = node->type == NODE_GROUP_OUTPUT && (node->flag & NODE_DO_OUTPUT);
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2020-03-23 23:50:24 +01:00
|
|
|
if (is_group) {
|
|
|
|
|
/* Do it recursively. */
|
|
|
|
|
ntree_shader_groups_expand_inputs((bNodeTree *)node->id);
|
2019-04-22 13:31:31 +10:00
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2020-03-23 23:50:24 +01:00
|
|
|
if (is_group || is_group_output) {
|
|
|
|
|
LISTBASE_FOREACH (bNodeSocket *, socket, &node->inputs) {
|
2022-01-04 11:41:10 -05:00
|
|
|
if (socket->link != nullptr && !(socket->link->flag & NODE_LINK_MUTED)) {
|
2020-03-23 23:50:24 +01:00
|
|
|
bNodeLink *link = socket->link;
|
|
|
|
|
/* Fix the case where the socket is actually converting the data. (see T71374)
|
2021-06-24 15:56:58 +10:00
|
|
|
* We only do the case of lossy conversion to float. */
|
2020-03-23 23:50:24 +01:00
|
|
|
if ((socket->type == SOCK_FLOAT) && (link->fromsock->type != link->tosock->type)) {
|
2020-05-14 19:56:14 +02:00
|
|
|
if (link->fromsock->type == SOCK_RGBA) {
|
2022-01-04 11:41:10 -05:00
|
|
|
bNode *tmp = nodeAddStaticNode(nullptr, localtree, SH_NODE_RGBTOBW);
|
|
|
|
|
nodeAddLink(localtree,
|
|
|
|
|
link->fromnode,
|
|
|
|
|
link->fromsock,
|
|
|
|
|
tmp,
|
|
|
|
|
static_cast<bNodeSocket *>(tmp->inputs.first));
|
|
|
|
|
nodeAddLink(
|
|
|
|
|
localtree, tmp, static_cast<bNodeSocket *>(tmp->outputs.first), node, socket);
|
2020-05-14 19:56:14 +02:00
|
|
|
}
|
|
|
|
|
else if (link->fromsock->type == SOCK_VECTOR) {
|
2022-01-04 11:41:10 -05:00
|
|
|
bNode *tmp = nodeAddStaticNode(nullptr, localtree, SH_NODE_VECTOR_MATH);
|
2020-05-14 19:56:14 +02:00
|
|
|
tmp->custom1 = NODE_VECTOR_MATH_DOT_PRODUCT;
|
2022-01-04 11:41:10 -05:00
|
|
|
bNodeSocket *dot_input1 = static_cast<bNodeSocket *>(tmp->inputs.first);
|
|
|
|
|
bNodeSocket *dot_input2 = static_cast<bNodeSocket *>(dot_input1->next);
|
|
|
|
|
bNodeSocketValueVector *input2_socket_value = static_cast<bNodeSocketValueVector *>(
|
|
|
|
|
dot_input2->default_value);
|
2020-05-14 19:56:14 +02:00
|
|
|
copy_v3_fl(input2_socket_value->value, 1.0f / 3.0f);
|
|
|
|
|
nodeAddLink(localtree, link->fromnode, link->fromsock, tmp, dot_input1);
|
2022-01-04 11:41:10 -05:00
|
|
|
nodeAddLink(
|
|
|
|
|
localtree, tmp, static_cast<bNodeSocket *>(tmp->outputs.last), node, socket);
|
2020-05-14 19:56:14 +02:00
|
|
|
}
|
2020-03-23 23:50:24 +01:00
|
|
|
}
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2020-01-23 17:40:40 +01:00
|
|
|
|
2020-03-23 23:50:24 +01:00
|
|
|
if (is_group) {
|
|
|
|
|
/* Detect the case where an input is plugged into a hidden value socket.
|
|
|
|
|
* In this case we should just remove the link to trigger the socket default override. */
|
|
|
|
|
ntree_shader_unlink_hidden_value_sockets(node, socket);
|
2020-01-23 17:40:40 +01:00
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2020-03-23 23:50:24 +01:00
|
|
|
if (ntree_shader_expand_socket_default(localtree, node, socket)) {
|
|
|
|
|
link_added = true;
|
|
|
|
|
}
|
2018-11-01 15:06:23 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2018-11-01 15:06:23 +01:00
|
|
|
if (link_added) {
|
2022-01-04 11:41:10 -05:00
|
|
|
BKE_ntree_update_main_tree(G.main, localtree, nullptr);
|
2018-11-01 15:06:23 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-01-18 13:06:49 +01:00
|
|
|
static void ntree_shader_groups_remove_muted_links(bNodeTree *ntree)
|
|
|
|
|
{
|
|
|
|
|
LISTBASE_FOREACH (bNode *, node, &ntree->nodes) {
|
|
|
|
|
if (node->type == NODE_GROUP) {
|
|
|
|
|
if (node->id != nullptr) {
|
|
|
|
|
ntree_shader_groups_remove_muted_links(reinterpret_cast<bNodeTree *>(node->id));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
LISTBASE_FOREACH_MUTABLE (bNodeLink *, link, &ntree->links) {
|
|
|
|
|
if (link->flag & NODE_LINK_MUTED) {
|
|
|
|
|
nodeRemLink(ntree, link);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-09-23 11:34:22 +02:00
|
|
|
static void flatten_group_do(bNodeTree *ntree, bNode *gnode)
|
|
|
|
|
{
|
2022-01-04 11:41:10 -05:00
|
|
|
LinkNode *group_interface_nodes = nullptr;
|
|
|
|
|
bNodeTree *ngroup = (bNodeTree *)gnode->id;
|
2019-09-23 11:34:22 +02:00
|
|
|
|
|
|
|
|
/* Add the nodes into the ntree */
|
2022-01-04 11:41:10 -05:00
|
|
|
LISTBASE_FOREACH_MUTABLE (bNode *, node, &ngroup->nodes) {
|
2019-09-23 11:34:22 +02:00
|
|
|
/* Remove interface nodes.
|
|
|
|
|
* This also removes remaining links to and from interface nodes.
|
|
|
|
|
* We must delay removal since sockets will reference this node. see: T52092 */
|
|
|
|
|
if (ELEM(node->type, NODE_GROUP_INPUT, NODE_GROUP_OUTPUT)) {
|
|
|
|
|
BLI_linklist_prepend(&group_interface_nodes, node);
|
|
|
|
|
}
|
|
|
|
|
/* migrate node */
|
|
|
|
|
BLI_remlink(&ngroup->nodes, node);
|
|
|
|
|
BLI_addtail(&ntree->nodes, node);
|
|
|
|
|
/* ensure unique node name in the node tree */
|
2019-10-09 23:45:31 +02:00
|
|
|
/* This is very slow and it has no use for GPU nodetree. (see T70609) */
|
|
|
|
|
// nodeUniqueName(ntree, node);
|
2019-09-23 11:34:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Save first and last link to iterate over flattened group links. */
|
2022-01-04 11:41:10 -05:00
|
|
|
bNodeLink *glinks_first = static_cast<bNodeLink *>(ntree->links.last);
|
2019-09-23 11:34:22 +02:00
|
|
|
|
|
|
|
|
/* Add internal links to the ntree */
|
2022-01-04 11:41:10 -05:00
|
|
|
LISTBASE_FOREACH_MUTABLE (bNodeLink *, link, &ngroup->links) {
|
2019-09-23 11:34:22 +02:00
|
|
|
BLI_remlink(&ngroup->links, link);
|
|
|
|
|
BLI_addtail(&ntree->links, link);
|
|
|
|
|
}
|
|
|
|
|
|
2022-01-04 11:41:10 -05:00
|
|
|
bNodeLink *glinks_last = static_cast<bNodeLink *>(ntree->links.last);
|
2019-09-23 11:34:22 +02:00
|
|
|
|
|
|
|
|
/* restore external links to and from the gnode */
|
2022-01-04 11:41:10 -05:00
|
|
|
if (glinks_first != nullptr) {
|
2019-09-23 11:34:22 +02:00
|
|
|
/* input links */
|
2022-01-04 11:41:10 -05:00
|
|
|
for (bNodeLink *link = glinks_first->next; link != glinks_last->next; link = link->next) {
|
2019-09-23 11:34:22 +02:00
|
|
|
if (link->fromnode->type == NODE_GROUP_INPUT) {
|
|
|
|
|
const char *identifier = link->fromsock->identifier;
|
|
|
|
|
/* find external links to this input */
|
2022-01-04 11:41:10 -05:00
|
|
|
for (bNodeLink *tlink = static_cast<bNodeLink *>(ntree->links.first);
|
|
|
|
|
tlink != glinks_first->next;
|
|
|
|
|
tlink = tlink->next) {
|
2019-09-23 11:34:22 +02:00
|
|
|
if (tlink->tonode == gnode && STREQ(tlink->tosock->identifier, identifier)) {
|
|
|
|
|
nodeAddLink(ntree, tlink->fromnode, tlink->fromsock, link->tonode, link->tosock);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-10-02 15:47:36 +02:00
|
|
|
/* Also iterate over the new links to cover passthrough links. */
|
2022-01-04 11:41:10 -05:00
|
|
|
glinks_last = static_cast<bNodeLink *>(ntree->links.last);
|
2019-09-23 11:34:22 +02:00
|
|
|
/* output links */
|
2022-01-04 11:41:10 -05:00
|
|
|
for (bNodeLink *tlink = static_cast<bNodeLink *>(ntree->links.first);
|
|
|
|
|
tlink != glinks_first->next;
|
|
|
|
|
tlink = tlink->next) {
|
2019-09-23 11:34:22 +02:00
|
|
|
if (tlink->fromnode == gnode) {
|
|
|
|
|
const char *identifier = tlink->fromsock->identifier;
|
|
|
|
|
/* find internal links to this output */
|
2022-01-04 11:41:10 -05:00
|
|
|
for (bNodeLink *link = glinks_first->next; link != glinks_last->next; link = link->next) {
|
2019-09-23 11:34:22 +02:00
|
|
|
/* only use active output node */
|
|
|
|
|
if (link->tonode->type == NODE_GROUP_OUTPUT && (link->tonode->flag & NODE_DO_OUTPUT)) {
|
|
|
|
|
if (STREQ(link->tosock->identifier, identifier)) {
|
|
|
|
|
nodeAddLink(ntree, link->fromnode, link->fromsock, tlink->tonode, tlink->tosock);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
while (group_interface_nodes) {
|
2022-01-04 11:41:10 -05:00
|
|
|
bNode *node = static_cast<bNode *>(BLI_linklist_pop(&group_interface_nodes));
|
2019-09-23 11:34:22 +02:00
|
|
|
ntreeFreeLocalNode(ntree, node);
|
|
|
|
|
}
|
|
|
|
|
|
2021-12-21 15:18:56 +01:00
|
|
|
BKE_ntree_update_tag_all(ntree);
|
2019-09-23 11:34:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Flatten group to only have a simple single tree */
|
|
|
|
|
static void ntree_shader_groups_flatten(bNodeTree *localtree)
|
|
|
|
|
{
|
2019-09-30 17:06:28 +10:00
|
|
|
/* This is effectively recursive as the flattened groups will add
|
2019-09-23 11:34:22 +02:00
|
|
|
* nodes at the end of the list, which will also get evaluated. */
|
2022-01-04 11:41:10 -05:00
|
|
|
for (bNode *node = static_cast<bNode *>(localtree->nodes.first), *node_next; node;
|
|
|
|
|
node = node_next) {
|
|
|
|
|
if (ELEM(node->type, NODE_GROUP, NODE_CUSTOM_GROUP) && node->id != nullptr) {
|
2019-09-23 11:34:22 +02:00
|
|
|
flatten_group_do(localtree, node);
|
|
|
|
|
/* Continue even on new flattened nodes. */
|
|
|
|
|
node_next = node->next;
|
|
|
|
|
/* delete the group instance and its localtree. */
|
|
|
|
|
bNodeTree *ngroup = (bNodeTree *)node->id;
|
|
|
|
|
ntreeFreeLocalNode(localtree, node);
|
|
|
|
|
ntreeFreeTree(ngroup);
|
2021-08-11 16:56:11 +10:00
|
|
|
BLI_assert(!ngroup->id.py_instance); /* Or call #BKE_libblock_free_data_py. */
|
2019-09-23 11:34:22 +02:00
|
|
|
MEM_freeN(ngroup);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
node_next = node->next;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-01-04 11:41:10 -05:00
|
|
|
BKE_ntree_update_main_tree(G.main, localtree, nullptr);
|
2019-09-23 11:34:22 +02:00
|
|
|
}
|
|
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
struct branchIterData {
|
|
|
|
|
bool (*node_filter)(const bNode *node);
|
|
|
|
|
int node_count;
|
|
|
|
|
};
|
2019-06-26 12:03:15 +02:00
|
|
|
|
2019-09-30 13:09:39 +02:00
|
|
|
static bool ntree_branch_count_and_tag_nodes(bNode *fromnode, bNode *tonode, void *userdata)
|
2019-06-26 12:03:15 +02:00
|
|
|
{
|
2022-04-14 18:47:58 +02:00
|
|
|
branchIterData *iter = (branchIterData *)userdata;
|
|
|
|
|
if (fromnode->tmp_flag == -1 && (iter->node_filter == nullptr || iter->node_filter(fromnode))) {
|
|
|
|
|
fromnode->tmp_flag = iter->node_count;
|
|
|
|
|
iter->node_count++;
|
2019-06-26 12:03:15 +02:00
|
|
|
}
|
2022-04-14 18:47:58 +02:00
|
|
|
if (tonode->tmp_flag == -1 && (iter->node_filter == nullptr || iter->node_filter(tonode))) {
|
|
|
|
|
tonode->tmp_flag = iter->node_count;
|
|
|
|
|
iter->node_count++;
|
2019-06-26 12:03:15 +02:00
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-19 20:33:17 +02:00
|
|
|
/* Create a copy of a branch starting from a given node.
|
|
|
|
|
* callback is executed once for every copied node.
|
|
|
|
|
* Returns input node copy. */
|
|
|
|
|
static bNode *ntree_shader_copy_branch(bNodeTree *ntree,
|
|
|
|
|
bNode *start_node,
|
2022-04-14 18:47:58 +02:00
|
|
|
bool (*node_filter)(const bNode *node),
|
2019-08-19 20:33:17 +02:00
|
|
|
void (*callback)(bNode *node, int user_data),
|
|
|
|
|
int user_data)
|
2019-06-26 12:03:15 +02:00
|
|
|
{
|
|
|
|
|
/* Init tmp flag. */
|
|
|
|
|
LISTBASE_FOREACH (bNode *, node, &ntree->nodes) {
|
|
|
|
|
node->tmp_flag = -1;
|
|
|
|
|
}
|
|
|
|
|
/* Count and tag all nodes inside the displacement branch of the tree. */
|
2019-08-19 20:33:17 +02:00
|
|
|
start_node->tmp_flag = 0;
|
2022-04-14 18:47:58 +02:00
|
|
|
branchIterData iter_data;
|
|
|
|
|
iter_data.node_filter = node_filter;
|
|
|
|
|
iter_data.node_count = 1;
|
|
|
|
|
nodeChainIterBackwards(ntree, start_node, ntree_branch_count_and_tag_nodes, &iter_data, 1);
|
2019-06-26 12:03:15 +02:00
|
|
|
/* Make a full copy of the branch */
|
2022-04-17 13:30:30 -05:00
|
|
|
Array<bNode *> nodes_copy(iter_data.node_count);
|
2019-06-26 12:03:15 +02:00
|
|
|
LISTBASE_FOREACH (bNode *, node, &ntree->nodes) {
|
|
|
|
|
if (node->tmp_flag >= 0) {
|
|
|
|
|
int id = node->tmp_flag;
|
2022-01-07 16:16:34 -06:00
|
|
|
nodes_copy[id] = blender::bke::node_copy(
|
|
|
|
|
ntree, *node, LIB_ID_CREATE_NO_USER_REFCOUNT | LIB_ID_CREATE_NO_MAIN, false);
|
2019-06-26 12:03:15 +02:00
|
|
|
nodes_copy[id]->tmp_flag = -2; /* Copy */
|
|
|
|
|
/* Make sure to clear all sockets links as they are invalid. */
|
|
|
|
|
LISTBASE_FOREACH (bNodeSocket *, sock, &nodes_copy[id]->inputs) {
|
2022-01-04 11:41:10 -05:00
|
|
|
sock->link = nullptr;
|
2019-06-26 12:03:15 +02:00
|
|
|
}
|
|
|
|
|
LISTBASE_FOREACH (bNodeSocket *, sock, &nodes_copy[id]->outputs) {
|
2022-01-04 11:41:10 -05:00
|
|
|
sock->link = nullptr;
|
2019-06-26 12:03:15 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
/* Recreate links between copied nodes. */
|
|
|
|
|
LISTBASE_FOREACH (bNodeLink *, link, &ntree->links) {
|
|
|
|
|
if (link->fromnode->tmp_flag >= 0 && link->tonode->tmp_flag >= 0) {
|
|
|
|
|
bNode *fromnode = nodes_copy[link->fromnode->tmp_flag];
|
|
|
|
|
bNode *tonode = nodes_copy[link->tonode->tmp_flag];
|
|
|
|
|
bNodeSocket *fromsock = ntree_shader_node_find_output(fromnode, link->fromsock->identifier);
|
|
|
|
|
bNodeSocket *tosock = ntree_shader_node_find_input(tonode, link->tosock->identifier);
|
|
|
|
|
nodeAddLink(ntree, fromnode, fromsock, tonode, tosock);
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-08-19 20:33:17 +02:00
|
|
|
/* Per node callback. */
|
|
|
|
|
if (callback) {
|
2022-04-14 18:47:58 +02:00
|
|
|
for (int i = 0; i < iter_data.node_count; i++) {
|
2019-08-19 20:33:17 +02:00
|
|
|
callback(nodes_copy[i], user_data);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
bNode *start_node_copy = nodes_copy[start_node->tmp_flag];
|
|
|
|
|
return start_node_copy;
|
|
|
|
|
}
|
|
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
/* Generate emission node to convert regular data to closure sockets.
|
|
|
|
|
* Returns validity of the tree.
|
|
|
|
|
*/
|
|
|
|
|
static bool ntree_shader_implicit_closure_cast(bNodeTree *ntree)
|
2019-08-19 20:33:17 +02:00
|
|
|
{
|
2022-04-14 18:47:58 +02:00
|
|
|
bool modified = false;
|
|
|
|
|
LISTBASE_FOREACH_MUTABLE (bNodeLink *, link, &ntree->links) {
|
|
|
|
|
if ((link->fromsock->type != SOCK_SHADER) && (link->tosock->type == SOCK_SHADER)) {
|
|
|
|
|
bNode *emission_node = nodeAddStaticNode(NULL, ntree, SH_NODE_EMISSION);
|
|
|
|
|
bNodeSocket *in_sock = ntree_shader_node_find_input(emission_node, "Color");
|
|
|
|
|
bNodeSocket *out_sock = ntree_shader_node_find_output(emission_node, "Emission");
|
|
|
|
|
nodeAddLink(ntree, link->fromnode, link->fromsock, emission_node, in_sock);
|
|
|
|
|
nodeAddLink(ntree, emission_node, out_sock, link->tonode, link->tosock);
|
|
|
|
|
nodeRemLink(ntree, link);
|
|
|
|
|
modified = true;
|
|
|
|
|
}
|
|
|
|
|
else if ((link->fromsock->type == SOCK_SHADER) && (link->tosock->type != SOCK_SHADER)) {
|
|
|
|
|
/* Meh. Not directly visible to the user. But better than nothing. */
|
|
|
|
|
fprintf(stderr, "Shader Nodetree Error: Invalid implicit socket conversion\n");
|
|
|
|
|
BKE_ntree_update_main_tree(G.main, ntree, nullptr);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (modified) {
|
|
|
|
|
BKE_ntree_update_main_tree(G.main, ntree, nullptr);
|
|
|
|
|
}
|
|
|
|
|
return true;
|
2019-06-26 12:03:15 +02:00
|
|
|
}
|
|
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
/* Socket already has a link to it. Add weights together. */
|
|
|
|
|
static void ntree_weight_tree_merge_weight(bNodeTree *ntree,
|
|
|
|
|
bNode *UNUSED(fromnode),
|
|
|
|
|
bNodeSocket *fromsock,
|
|
|
|
|
bNode **tonode,
|
|
|
|
|
bNodeSocket **tosock)
|
|
|
|
|
{
|
|
|
|
|
bNode *addnode = nodeAddStaticNode(NULL, ntree, SH_NODE_MATH);
|
|
|
|
|
addnode->custom1 = NODE_MATH_ADD;
|
|
|
|
|
addnode->tmp_flag = -2; /* Copy */
|
|
|
|
|
bNodeSocket *addsock_out = ntree_shader_node_output_get(addnode, 0);
|
|
|
|
|
bNodeSocket *addsock_in0 = ntree_shader_node_input_get(addnode, 0);
|
|
|
|
|
bNodeSocket *addsock_in1 = ntree_shader_node_input_get(addnode, 1);
|
|
|
|
|
bNodeLink *oldlink = fromsock->link;
|
|
|
|
|
nodeAddLink(ntree, oldlink->fromnode, oldlink->fromsock, addnode, addsock_in0);
|
|
|
|
|
nodeAddLink(ntree, *tonode, *tosock, addnode, addsock_in1);
|
|
|
|
|
nodeRemLink(ntree, oldlink);
|
|
|
|
|
*tonode = addnode;
|
|
|
|
|
*tosock = addsock_out;
|
2016-05-20 14:16:54 +02:00
|
|
|
}
|
|
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
static bool ntree_weight_tree_tag_nodes(bNode *fromnode, bNode *tonode, void *userdata)
|
2019-08-19 20:33:17 +02:00
|
|
|
{
|
2022-04-14 18:47:58 +02:00
|
|
|
int *node_count = (int *)userdata;
|
|
|
|
|
bool to_node_from_weight_tree = ELEM(tonode->type,
|
|
|
|
|
SH_NODE_ADD_SHADER,
|
|
|
|
|
SH_NODE_MIX_SHADER,
|
|
|
|
|
SH_NODE_OUTPUT_WORLD,
|
|
|
|
|
SH_NODE_OUTPUT_MATERIAL,
|
|
|
|
|
SH_NODE_SHADERTORGB);
|
|
|
|
|
if (tonode->tmp_flag == -1 && to_node_from_weight_tree) {
|
|
|
|
|
tonode->tmp_flag = *node_count;
|
|
|
|
|
*node_count += (tonode->type == SH_NODE_MIX_SHADER) ? 4 : 1;
|
2019-08-19 20:33:17 +02:00
|
|
|
}
|
2022-04-14 18:47:58 +02:00
|
|
|
if (fromnode->tmp_flag == -1 && ELEM(fromnode->type, SH_NODE_ADD_SHADER, SH_NODE_MIX_SHADER)) {
|
|
|
|
|
fromnode->tmp_flag = *node_count;
|
|
|
|
|
*node_count += (fromnode->type == SH_NODE_MIX_SHADER) ? 4 : 1;
|
2019-08-19 20:33:17 +02:00
|
|
|
}
|
2022-04-14 18:47:58 +02:00
|
|
|
return to_node_from_weight_tree;
|
2019-08-19 20:33:17 +02:00
|
|
|
}
|
|
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
/* Invert evaluation order of the weight tree (add & mix closure nodes) to feed the closure nodes
|
|
|
|
|
* with their respective weights. */
|
|
|
|
|
static void ntree_shader_weight_tree_invert(bNodeTree *ntree, bNode *output_node)
|
2019-08-19 20:33:17 +02:00
|
|
|
{
|
2022-04-14 18:47:58 +02:00
|
|
|
bNodeLink *displace_link = NULL;
|
|
|
|
|
bNodeSocket *displace_output = ntree_shader_node_find_input(output_node, "Displacement");
|
|
|
|
|
if (displace_output && displace_output->link) {
|
|
|
|
|
/* Remove any displacement link to avoid tagging it later on. */
|
|
|
|
|
displace_link = displace_output->link;
|
|
|
|
|
displace_output->link = NULL;
|
|
|
|
|
}
|
|
|
|
|
bNodeLink *thickness_link = NULL;
|
|
|
|
|
bNodeSocket *thickness_output = ntree_shader_node_find_input(output_node, "Thickness");
|
|
|
|
|
if (thickness_output && thickness_output->link) {
|
|
|
|
|
/* Remove any thickness link to avoid tagging it later on. */
|
|
|
|
|
thickness_link = thickness_output->link;
|
|
|
|
|
thickness_output->link = NULL;
|
|
|
|
|
}
|
|
|
|
|
/* Init tmp flag. */
|
|
|
|
|
LISTBASE_FOREACH (bNode *, node, &ntree->nodes) {
|
|
|
|
|
node->tmp_flag = -1;
|
|
|
|
|
}
|
|
|
|
|
/* Tag nodes from the weight tree. Only tag output node and mix/add shader nodes. */
|
|
|
|
|
output_node->tmp_flag = 0;
|
|
|
|
|
int node_count = 1;
|
|
|
|
|
nodeChainIterBackwards(ntree, output_node, ntree_weight_tree_tag_nodes, &node_count, 0);
|
|
|
|
|
/* Make a mirror copy of the weight tree. */
|
2022-04-17 13:30:30 -05:00
|
|
|
Array<bNode *> nodes_copy(node_count);
|
2022-04-14 18:47:58 +02:00
|
|
|
LISTBASE_FOREACH (bNode *, node, &ntree->nodes) {
|
|
|
|
|
if (node->tmp_flag >= 0) {
|
|
|
|
|
int id = node->tmp_flag;
|
2019-08-19 20:33:17 +02:00
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
switch (node->type) {
|
|
|
|
|
case SH_NODE_SHADERTORGB:
|
|
|
|
|
case SH_NODE_OUTPUT_WORLD:
|
|
|
|
|
case SH_NODE_OUTPUT_MATERIAL: {
|
|
|
|
|
/* Start the tree with full weight. */
|
|
|
|
|
nodes_copy[id] = nodeAddStaticNode(NULL, ntree, SH_NODE_VALUE);
|
|
|
|
|
nodes_copy[id]->tmp_flag = -2; /* Copy */
|
|
|
|
|
((bNodeSocketValueFloat *)ntree_shader_node_output_get(nodes_copy[id], 0)->default_value)
|
|
|
|
|
->value = 1.0f;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case SH_NODE_ADD_SHADER: {
|
|
|
|
|
/* Simple passthrough node. Each original inputs will get the same weight. */
|
2022-04-20 09:16:24 +10:00
|
|
|
/* TODO(fclem): Better use some kind of reroute node? */
|
2022-04-14 18:47:58 +02:00
|
|
|
nodes_copy[id] = nodeAddStaticNode(NULL, ntree, SH_NODE_MATH);
|
|
|
|
|
nodes_copy[id]->custom1 = NODE_MATH_ADD;
|
|
|
|
|
nodes_copy[id]->tmp_flag = -2; /* Copy */
|
|
|
|
|
((bNodeSocketValueFloat *)ntree_shader_node_input_get(nodes_copy[id], 0)->default_value)
|
|
|
|
|
->value = 0.0f;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case SH_NODE_MIX_SHADER: {
|
|
|
|
|
/* We need multiple nodes to emulate the mix node in reverse. */
|
|
|
|
|
bNode *fromnode, *tonode;
|
|
|
|
|
bNodeSocket *fromsock, *tosock;
|
|
|
|
|
int id_start = id;
|
|
|
|
|
/* output = (factor * input_weight) */
|
|
|
|
|
nodes_copy[id] = nodeAddStaticNode(NULL, ntree, SH_NODE_MATH);
|
|
|
|
|
nodes_copy[id]->custom1 = NODE_MATH_MULTIPLY;
|
|
|
|
|
nodes_copy[id]->tmp_flag = -2; /* Copy */
|
|
|
|
|
id++;
|
|
|
|
|
/* output = ((1.0 - factor) * input_weight) <=> (input_weight - factor * input_weight) */
|
|
|
|
|
nodes_copy[id] = nodeAddStaticNode(NULL, ntree, SH_NODE_MATH);
|
|
|
|
|
nodes_copy[id]->custom1 = NODE_MATH_SUBTRACT;
|
|
|
|
|
nodes_copy[id]->tmp_flag = -2; /* Copy */
|
|
|
|
|
id++;
|
|
|
|
|
/* Node sanitizes the input mix factor by clamping it. */
|
|
|
|
|
nodes_copy[id] = nodeAddStaticNode(NULL, ntree, SH_NODE_MATH);
|
|
|
|
|
nodes_copy[id]->custom1 = NODE_MATH_ADD;
|
|
|
|
|
nodes_copy[id]->custom2 = SHD_MATH_CLAMP;
|
|
|
|
|
nodes_copy[id]->tmp_flag = -2; /* Copy */
|
|
|
|
|
((bNodeSocketValueFloat *)ntree_shader_node_input_get(nodes_copy[id], 0)->default_value)
|
|
|
|
|
->value = 0.0f;
|
|
|
|
|
/* Copy default value if no link present. */
|
|
|
|
|
bNodeSocket *fac_sock = ntree_shader_node_find_input(node, "Fac");
|
|
|
|
|
if (!fac_sock->link) {
|
|
|
|
|
float default_value = ((bNodeSocketValueFloat *)fac_sock->default_value)->value;
|
|
|
|
|
bNodeSocket *dst_sock = ntree_shader_node_input_get(nodes_copy[id], 1);
|
|
|
|
|
((bNodeSocketValueFloat *)dst_sock->default_value)->value = default_value;
|
|
|
|
|
}
|
|
|
|
|
id++;
|
|
|
|
|
/* Reroute the weight input to the 3 processing nodes. Simplify linking later-on. */
|
2022-04-20 09:16:24 +10:00
|
|
|
/* TODO(fclem): Better use some kind of reroute node? */
|
2022-04-14 18:47:58 +02:00
|
|
|
nodes_copy[id] = nodeAddStaticNode(NULL, ntree, SH_NODE_MATH);
|
|
|
|
|
nodes_copy[id]->custom1 = NODE_MATH_ADD;
|
|
|
|
|
nodes_copy[id]->tmp_flag = -2; /* Copy */
|
|
|
|
|
((bNodeSocketValueFloat *)ntree_shader_node_input_get(nodes_copy[id], 0)->default_value)
|
|
|
|
|
->value = 0.0f;
|
|
|
|
|
id++;
|
2022-04-20 09:16:24 +10:00
|
|
|
/* Link between nodes for the subtraction. */
|
2022-04-14 18:47:58 +02:00
|
|
|
fromnode = nodes_copy[id_start];
|
|
|
|
|
tonode = nodes_copy[id_start + 1];
|
|
|
|
|
fromsock = ntree_shader_node_output_get(fromnode, 0);
|
|
|
|
|
tosock = ntree_shader_node_input_get(tonode, 1);
|
|
|
|
|
nodeAddLink(ntree, fromnode, fromsock, tonode, tosock);
|
|
|
|
|
/* Link mix input to first node. */
|
|
|
|
|
fromnode = nodes_copy[id_start + 2];
|
|
|
|
|
tonode = nodes_copy[id_start];
|
|
|
|
|
fromsock = ntree_shader_node_output_get(fromnode, 0);
|
|
|
|
|
tosock = ntree_shader_node_input_get(tonode, 1);
|
|
|
|
|
nodeAddLink(ntree, fromnode, fromsock, tonode, tosock);
|
|
|
|
|
/* Link weight input to both multiply nodes. */
|
|
|
|
|
fromnode = nodes_copy[id_start + 3];
|
|
|
|
|
fromsock = ntree_shader_node_output_get(fromnode, 0);
|
|
|
|
|
tonode = nodes_copy[id_start];
|
|
|
|
|
tosock = ntree_shader_node_input_get(tonode, 0);
|
|
|
|
|
nodeAddLink(ntree, fromnode, fromsock, tonode, tosock);
|
|
|
|
|
tonode = nodes_copy[id_start + 1];
|
|
|
|
|
tosock = ntree_shader_node_input_get(tonode, 0);
|
|
|
|
|
nodeAddLink(ntree, fromnode, fromsock, tonode, tosock);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
default:
|
|
|
|
|
BLI_assert(0);
|
|
|
|
|
break;
|
|
|
|
|
}
|
2019-08-19 20:33:17 +02:00
|
|
|
}
|
2022-04-14 18:47:58 +02:00
|
|
|
}
|
|
|
|
|
/* Recreate links between copied nodes. */
|
|
|
|
|
LISTBASE_FOREACH (bNode *, node, &ntree->nodes) {
|
|
|
|
|
if (node->tmp_flag >= 0) {
|
|
|
|
|
/* Naming can be confusing here. We use original nodelink name for from/to prefix.
|
|
|
|
|
* The final link is in reversed order. */
|
|
|
|
|
int socket_index;
|
|
|
|
|
LISTBASE_FOREACH_INDEX (bNodeSocket *, sock, &node->inputs, socket_index) {
|
|
|
|
|
bNodeSocket *tosock;
|
|
|
|
|
bNode *tonode;
|
|
|
|
|
|
|
|
|
|
switch (node->type) {
|
|
|
|
|
case SH_NODE_SHADERTORGB:
|
|
|
|
|
case SH_NODE_OUTPUT_WORLD:
|
|
|
|
|
case SH_NODE_OUTPUT_MATERIAL:
|
|
|
|
|
case SH_NODE_ADD_SHADER: {
|
|
|
|
|
tonode = nodes_copy[node->tmp_flag];
|
|
|
|
|
tosock = ntree_shader_node_output_get(tonode, 0);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case SH_NODE_MIX_SHADER: {
|
|
|
|
|
if (socket_index == 0) {
|
|
|
|
|
/* Mix Factor. */
|
|
|
|
|
tonode = nodes_copy[node->tmp_flag + 2];
|
|
|
|
|
tosock = ntree_shader_node_input_get(tonode, 1);
|
|
|
|
|
}
|
|
|
|
|
else if (socket_index == 1) {
|
|
|
|
|
/* Shader 1. */
|
|
|
|
|
tonode = nodes_copy[node->tmp_flag + 1];
|
|
|
|
|
tosock = ntree_shader_node_output_get(tonode, 0);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
/* Shader 2. */
|
|
|
|
|
tonode = nodes_copy[node->tmp_flag];
|
|
|
|
|
tosock = ntree_shader_node_output_get(tonode, 0);
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
default:
|
|
|
|
|
BLI_assert(0);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (sock->link) {
|
|
|
|
|
bNodeSocket *fromsock;
|
|
|
|
|
bNode *fromnode = sock->link->fromnode;
|
|
|
|
|
|
|
|
|
|
switch (fromnode->type) {
|
|
|
|
|
case SH_NODE_ADD_SHADER: {
|
|
|
|
|
fromnode = nodes_copy[fromnode->tmp_flag];
|
|
|
|
|
fromsock = ntree_shader_node_input_get(fromnode, 1);
|
|
|
|
|
if (fromsock->link) {
|
|
|
|
|
ntree_weight_tree_merge_weight(ntree, fromnode, fromsock, &tonode, &tosock);
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case SH_NODE_MIX_SHADER: {
|
|
|
|
|
fromnode = nodes_copy[fromnode->tmp_flag + 3];
|
|
|
|
|
fromsock = ntree_shader_node_input_get(fromnode, 1);
|
|
|
|
|
if (fromsock->link) {
|
|
|
|
|
ntree_weight_tree_merge_weight(ntree, fromnode, fromsock, &tonode, &tosock);
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case SH_NODE_BACKGROUND:
|
|
|
|
|
case SH_NODE_BSDF_ANISOTROPIC:
|
|
|
|
|
case SH_NODE_BSDF_DIFFUSE:
|
|
|
|
|
case SH_NODE_BSDF_GLASS:
|
|
|
|
|
case SH_NODE_BSDF_GLOSSY:
|
|
|
|
|
case SH_NODE_BSDF_HAIR_PRINCIPLED:
|
|
|
|
|
case SH_NODE_BSDF_HAIR:
|
|
|
|
|
case SH_NODE_BSDF_PRINCIPLED:
|
|
|
|
|
case SH_NODE_BSDF_REFRACTION:
|
|
|
|
|
case SH_NODE_BSDF_TOON:
|
|
|
|
|
case SH_NODE_BSDF_TRANSLUCENT:
|
|
|
|
|
case SH_NODE_BSDF_TRANSPARENT:
|
|
|
|
|
case SH_NODE_BSDF_VELVET:
|
|
|
|
|
case SH_NODE_EEVEE_SPECULAR:
|
|
|
|
|
case SH_NODE_EMISSION:
|
|
|
|
|
case SH_NODE_HOLDOUT:
|
|
|
|
|
case SH_NODE_SUBSURFACE_SCATTERING:
|
|
|
|
|
case SH_NODE_VOLUME_ABSORPTION:
|
|
|
|
|
case SH_NODE_VOLUME_PRINCIPLED:
|
|
|
|
|
case SH_NODE_VOLUME_SCATTER:
|
|
|
|
|
fromsock = ntree_shader_node_find_input(fromnode, "Weight");
|
|
|
|
|
if (fromsock->link) {
|
|
|
|
|
ntree_weight_tree_merge_weight(ntree, fromnode, fromsock, &tonode, &tosock);
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
fromsock = sock->link->fromsock;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2022-04-20 09:16:24 +10:00
|
|
|
/* Manually add the link to the socket to avoid calling:
|
|
|
|
|
* `BKE_ntree_update_main_tree(G.main, oop, nullptr)`. */
|
2022-04-14 18:47:58 +02:00
|
|
|
fromsock->link = nodeAddLink(ntree, fromnode, fromsock, tonode, tosock);
|
|
|
|
|
BLI_assert(fromsock->link);
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-08-19 20:33:17 +02:00
|
|
|
}
|
|
|
|
|
}
|
2022-04-14 18:47:58 +02:00
|
|
|
/* Restore displacement & thickness link. */
|
|
|
|
|
if (displace_link) {
|
|
|
|
|
nodeAddLink(
|
|
|
|
|
ntree, displace_link->fromnode, displace_link->fromsock, output_node, displace_output);
|
|
|
|
|
}
|
|
|
|
|
if (thickness_link) {
|
|
|
|
|
nodeAddLink(
|
|
|
|
|
ntree, thickness_link->fromnode, thickness_link->fromsock, output_node, thickness_output);
|
|
|
|
|
}
|
|
|
|
|
BKE_ntree_update_main_tree(G.main, ntree, nullptr);
|
2019-08-19 20:33:17 +02:00
|
|
|
}
|
|
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
static bool closure_node_filter(const bNode *node)
|
2017-07-15 16:09:44 +02:00
|
|
|
{
|
2022-04-14 18:47:58 +02:00
|
|
|
switch (node->type) {
|
|
|
|
|
case SH_NODE_ADD_SHADER:
|
|
|
|
|
case SH_NODE_MIX_SHADER:
|
|
|
|
|
case SH_NODE_BACKGROUND:
|
2017-07-15 16:09:44 +02:00
|
|
|
case SH_NODE_BSDF_ANISOTROPIC:
|
2022-04-14 18:47:58 +02:00
|
|
|
case SH_NODE_BSDF_DIFFUSE:
|
2017-08-15 09:36:23 +02:00
|
|
|
case SH_NODE_BSDF_GLASS:
|
2022-04-14 18:47:58 +02:00
|
|
|
case SH_NODE_BSDF_GLOSSY:
|
|
|
|
|
case SH_NODE_BSDF_HAIR_PRINCIPLED:
|
|
|
|
|
case SH_NODE_BSDF_HAIR:
|
2018-12-03 17:19:04 +01:00
|
|
|
case SH_NODE_BSDF_PRINCIPLED:
|
2022-04-14 18:47:58 +02:00
|
|
|
case SH_NODE_BSDF_REFRACTION:
|
|
|
|
|
case SH_NODE_BSDF_TOON:
|
|
|
|
|
case SH_NODE_BSDF_TRANSLUCENT:
|
|
|
|
|
case SH_NODE_BSDF_TRANSPARENT:
|
|
|
|
|
case SH_NODE_BSDF_VELVET:
|
|
|
|
|
case SH_NODE_EEVEE_SPECULAR:
|
|
|
|
|
case SH_NODE_EMISSION:
|
|
|
|
|
case SH_NODE_HOLDOUT:
|
|
|
|
|
case SH_NODE_SUBSURFACE_SCATTERING:
|
|
|
|
|
case SH_NODE_VOLUME_ABSORPTION:
|
|
|
|
|
case SH_NODE_VOLUME_PRINCIPLED:
|
|
|
|
|
case SH_NODE_VOLUME_SCATTER:
|
|
|
|
|
return true;
|
2017-07-15 16:09:44 +02:00
|
|
|
default:
|
2022-04-14 18:47:58 +02:00
|
|
|
return false;
|
2017-07-15 16:09:44 +02:00
|
|
|
}
|
2022-04-14 18:47:58 +02:00
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
static bool shader_to_rgba_node_gather(bNode *UNUSED(fromnode), bNode *tonode, void *userdata)
|
|
|
|
|
{
|
|
|
|
|
Vector<bNode *> &shader_to_rgba_nodes = *(Vector<bNode *> *)userdata;
|
|
|
|
|
if (tonode->tmp_flag == -1 && tonode->type == SH_NODE_SHADERTORGB) {
|
|
|
|
|
tonode->tmp_flag = 0;
|
|
|
|
|
shader_to_rgba_nodes.append(tonode);
|
|
|
|
|
}
|
2017-07-15 16:09:44 +02:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
/* Shader to rgba needs their associated closure duplicated and the weight tree generated for. */
|
|
|
|
|
static void ntree_shader_shader_to_rgba_branch(bNodeTree *ntree, bNode *output_node)
|
|
|
|
|
{
|
|
|
|
|
LISTBASE_FOREACH (bNode *, node, &ntree->nodes) {
|
|
|
|
|
node->tmp_flag = -1;
|
|
|
|
|
}
|
2022-04-22 08:16:37 +10:00
|
|
|
/* First gather the shader_to_rgba nodes linked to the output. This is separate to avoid
|
|
|
|
|
* conflicting usage of the `node->tmp_flag`. */
|
2022-04-14 18:47:58 +02:00
|
|
|
Vector<bNode *> shader_to_rgba_nodes;
|
|
|
|
|
nodeChainIterBackwards(ntree, output_node, shader_to_rgba_node_gather, &shader_to_rgba_nodes, 0);
|
|
|
|
|
|
|
|
|
|
for (bNode *shader_to_rgba : shader_to_rgba_nodes) {
|
|
|
|
|
bNodeSocket *closure_input = ntree_shader_node_input_get(shader_to_rgba, 0);
|
|
|
|
|
if (closure_input->link == nullptr) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
bNode *start_node = closure_input->link->fromnode;
|
|
|
|
|
bNode *start_node_copy = ntree_shader_copy_branch(
|
|
|
|
|
ntree, start_node, closure_node_filter, nullptr, 0);
|
|
|
|
|
/* Replace node copy link. This assumes that every node possibly connected to the closure input
|
2022-04-22 08:16:37 +10:00
|
|
|
* has only one output. */
|
2022-04-14 18:47:58 +02:00
|
|
|
bNodeSocket *closure_output = ntree_shader_node_output_get(start_node_copy, 0);
|
|
|
|
|
nodeRemLink(ntree, closure_input->link);
|
|
|
|
|
nodeAddLink(ntree, start_node_copy, closure_output, shader_to_rgba, closure_input);
|
|
|
|
|
BKE_ntree_update_main_tree(G.main, ntree, nullptr);
|
|
|
|
|
|
|
|
|
|
ntree_shader_weight_tree_invert(ntree, shader_to_rgba);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static bool ntree_branch_node_tag(bNode *fromnode, bNode *tonode, void *UNUSED(userdata))
|
2017-07-15 16:09:44 +02:00
|
|
|
{
|
2022-04-14 18:47:58 +02:00
|
|
|
fromnode->tmp_flag = 1;
|
|
|
|
|
tonode->tmp_flag = 1;
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Avoid adding more node execution when multiple outputs are present. */
|
|
|
|
|
/* NOTE(@fclem): This is also a workaround for the old EEVEE SSS implementation where only the
|
|
|
|
|
* first executed SSS node gets a SSS profile. */
|
|
|
|
|
static void ntree_shader_pruned_unused(bNodeTree *ntree, bNode *output_node)
|
|
|
|
|
{
|
|
|
|
|
bool changed = false;
|
|
|
|
|
|
|
|
|
|
LISTBASE_FOREACH (bNode *, node, &ntree->nodes) {
|
|
|
|
|
node->tmp_flag = 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Avoid deleting the output node if it is the only node in the tree. */
|
|
|
|
|
output_node->tmp_flag = 1;
|
|
|
|
|
|
|
|
|
|
nodeChainIterBackwards(ntree, output_node, ntree_branch_node_tag, nullptr, 0);
|
|
|
|
|
|
|
|
|
|
LISTBASE_FOREACH (bNode *, node, &ntree->nodes) {
|
|
|
|
|
if (node->type == SH_NODE_OUTPUT_AOV) {
|
|
|
|
|
nodeChainIterBackwards(ntree, node, ntree_branch_node_tag, nullptr, 0);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
LISTBASE_FOREACH_MUTABLE (bNode *, node, &ntree->nodes) {
|
|
|
|
|
if (node->tmp_flag == 0) {
|
|
|
|
|
ntreeFreeLocalNode(ntree, node);
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
2017-07-15 16:09:44 +02:00
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
if (changed) {
|
|
|
|
|
BKE_ntree_update_main_tree(G.main, ntree, nullptr);
|
|
|
|
|
}
|
2017-11-13 21:56:49 +01:00
|
|
|
}
|
|
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
void ntreeGPUMaterialNodes(bNodeTree *localtree, GPUMaterial *mat)
|
2017-10-27 16:07:44 +02:00
|
|
|
{
|
2018-07-05 17:20:44 +02:00
|
|
|
bNodeTreeExec *exec;
|
|
|
|
|
|
2022-01-18 13:06:49 +01:00
|
|
|
ntree_shader_groups_remove_muted_links(localtree);
|
2018-11-01 15:06:23 +01:00
|
|
|
ntree_shader_groups_expand_inputs(localtree);
|
2019-09-23 11:34:22 +02:00
|
|
|
ntree_shader_groups_flatten(localtree);
|
|
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
bNode *output = ntreeShaderOutputNode(localtree, SHD_OUTPUT_EEVEE);
|
2019-09-23 11:34:22 +02:00
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
/* Tree is valid if it contains no undefined implicit socket type cast. */
|
|
|
|
|
bool valid_tree = ntree_shader_implicit_closure_cast(localtree);
|
2018-07-05 17:20:44 +02:00
|
|
|
|
2022-04-14 18:47:58 +02:00
|
|
|
if (valid_tree && output != NULL) {
|
|
|
|
|
ntree_shader_pruned_unused(localtree, output);
|
|
|
|
|
ntree_shader_shader_to_rgba_branch(localtree, output);
|
|
|
|
|
ntree_shader_weight_tree_invert(localtree, output);
|
2020-12-04 08:13:54 +01:00
|
|
|
}
|
2019-08-19 20:33:17 +02:00
|
|
|
|
2018-07-05 17:20:44 +02:00
|
|
|
exec = ntreeShaderBeginExecTree(localtree);
|
2019-04-02 16:37:54 +02:00
|
|
|
ntreeExecGPUNodes(exec, mat, output);
|
2020-12-04 08:13:54 +01:00
|
|
|
LISTBASE_FOREACH (bNode *, node, &localtree->nodes) {
|
|
|
|
|
if (node->type == SH_NODE_OUTPUT_AOV) {
|
|
|
|
|
ntreeExecGPUNodes(exec, mat, node);
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-07-05 17:20:44 +02:00
|
|
|
ntreeShaderEndExecTree(exec);
|
2017-10-27 16:07:44 +02:00
|
|
|
}
|
|
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
bNodeTreeExec *ntreeShaderBeginExecTree_internal(bNodeExecContext *context,
|
|
|
|
|
bNodeTree *ntree,
|
|
|
|
|
bNodeInstanceKey parent_key)
|
2011-09-05 21:01:50 +00:00
|
|
|
{
|
|
|
|
|
/* ensures only a single output node is enabled */
|
|
|
|
|
ntreeSetOutput(ntree);
|
2018-06-08 08:07:48 +02:00
|
|
|
|
2011-09-05 21:01:50 +00:00
|
|
|
/* common base initialization */
|
2022-01-04 11:41:10 -05:00
|
|
|
bNodeTreeExec *exec = ntree_exec_begin(context, ntree, parent_key);
|
2018-06-08 08:07:48 +02:00
|
|
|
|
2011-09-05 21:01:50 +00:00
|
|
|
/* allocate the thread stack listbase array */
|
2022-01-04 11:41:10 -05:00
|
|
|
exec->threadstack = static_cast<ListBase *>(
|
|
|
|
|
MEM_callocN(BLENDER_MAX_THREADS * sizeof(ListBase), "thread stack array"));
|
2018-06-08 08:07:48 +02:00
|
|
|
|
2022-01-04 11:41:10 -05:00
|
|
|
LISTBASE_FOREACH (bNode *, node, &exec->nodetree->nodes) {
|
2012-09-03 02:41:12 +00:00
|
|
|
node->need_exec = 1;
|
2019-04-22 13:31:31 +10:00
|
|
|
}
|
2018-06-08 08:07:48 +02:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
return exec;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bNodeTreeExec *ntreeShaderBeginExecTree(bNodeTree *ntree)
|
|
|
|
|
{
|
|
|
|
|
bNodeExecContext context;
|
|
|
|
|
bNodeTreeExec *exec;
|
2018-06-08 08:07:48 +02:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
/* XXX hack: prevent exec data from being generated twice.
|
|
|
|
|
* this should be handled by the renderer!
|
|
|
|
|
*/
|
2019-04-22 13:31:31 +10:00
|
|
|
if (ntree->execdata) {
|
2013-03-18 16:34:57 +00:00
|
|
|
return ntree->execdata;
|
2019-04-22 13:31:31 +10:00
|
|
|
}
|
2018-06-08 08:07:48 +02:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
context.previews = ntree->previews;
|
2018-06-08 08:07:48 +02:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
exec = ntreeShaderBeginExecTree_internal(&context, ntree, NODE_INSTANCE_KEY_BASE);
|
2018-06-08 08:07:48 +02:00
|
|
|
|
2022-01-06 13:54:52 +11:00
|
|
|
/* XXX: this should not be necessary, but is still used for compositor/shader/texture nodes,
|
|
|
|
|
* which only store the `ntree` pointer. Should be fixed at some point!
|
2013-03-18 16:34:57 +00:00
|
|
|
*/
|
|
|
|
|
ntree->execdata = exec;
|
2018-06-08 08:07:48 +02:00
|
|
|
|
2011-09-05 21:01:50 +00:00
|
|
|
return exec;
|
|
|
|
|
}
|
|
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
void ntreeShaderEndExecTree_internal(bNodeTreeExec *exec)
|
2011-09-05 21:01:50 +00:00
|
|
|
{
|
2013-03-18 16:34:57 +00:00
|
|
|
if (exec->threadstack) {
|
2022-01-04 11:41:10 -05:00
|
|
|
for (int a = 0; a < BLENDER_MAX_THREADS; a++) {
|
|
|
|
|
LISTBASE_FOREACH (bNodeThreadStack *, nts, &exec->threadstack[a]) {
|
2019-04-22 13:31:31 +10:00
|
|
|
if (nts->stack) {
|
2013-03-18 16:34:57 +00:00
|
|
|
MEM_freeN(nts->stack);
|
2019-04-22 13:31:31 +10:00
|
|
|
}
|
|
|
|
|
}
|
2013-03-18 16:34:57 +00:00
|
|
|
BLI_freelistN(&exec->threadstack[a]);
|
2011-09-05 21:01:50 +00:00
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
MEM_freeN(exec->threadstack);
|
2022-01-04 11:41:10 -05:00
|
|
|
exec->threadstack = nullptr;
|
2013-03-18 16:34:57 +00:00
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
ntree_exec_end(exec);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void ntreeShaderEndExecTree(bNodeTreeExec *exec)
|
|
|
|
|
{
|
|
|
|
|
if (exec) {
|
2013-04-04 14:07:10 +00:00
|
|
|
/* exec may get freed, so assign ntree */
|
|
|
|
|
bNodeTree *ntree = exec->nodetree;
|
2013-03-18 16:34:57 +00:00
|
|
|
ntreeShaderEndExecTree_internal(exec);
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2013-03-18 16:34:57 +00:00
|
|
|
/* XXX clear nodetree backpointer to exec data, same problem as noted in ntreeBeginExecTree */
|
2022-01-04 11:41:10 -05:00
|
|
|
ntree->execdata = nullptr;
|
2011-09-05 21:01:50 +00:00
|
|
|
}
|
|
|
|
|
}
|