This repository has been archived on 2023-10-09. You can view files and clone it, but cannot push or open issues or pull requests.
Files
blender-archive/source/blender/blenkernel/intern/texture.c

791 lines
19 KiB
C
Raw Normal View History

2011-10-10 09:38:02 +00:00
/*
2002-10-12 11:37:38 +00:00
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
2002-10-12 11:37:38 +00:00
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation,
2010-02-12 13:34:04 +00:00
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2002-10-12 11:37:38 +00:00
*
* The Original Code is Copyright (C) 2001-2002 by NaN Holding BV.
* All rights reserved.
*/
/** \file
* \ingroup bke
2011-02-27 20:40:57 +00:00
*/
2002-10-12 11:37:38 +00:00
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include "MEM_guardedalloc.h"
#include "BLI_math.h"
#include "BLI_kdopbvh.h"
#include "BLI_utildefines.h"
#include "BLI_math_color.h"
2002-10-12 11:37:38 +00:00
#include "DNA_key_types.h"
#include "DNA_object_types.h"
#include "DNA_material_types.h"
#include "DNA_brush_types.h"
#include "DNA_node_types.h"
#include "DNA_color_types.h"
#include "DNA_particle_types.h"
#include "DNA_linestyle_types.h"
2002-10-12 11:37:38 +00:00
#include "IMB_imbuf.h"
#include "BKE_main.h"
#include "BKE_colorband.h"
2002-10-12 11:37:38 +00:00
#include "BKE_library.h"
#include "BKE_image.h"
#include "BKE_material.h"
2002-10-12 11:37:38 +00:00
#include "BKE_texture.h"
#include "BKE_key.h"
#include "BKE_icons.h"
#include "BKE_node.h"
#include "BKE_animsys.h"
#include "BKE_colortools.h"
#include "BKE_scene.h"
#include "RE_shader_ext.h"
2002-10-12 11:37:38 +00:00
/* ****************** Mapping ******************* */
2015-03-29 03:16:55 +11:00
TexMapping *BKE_texture_mapping_add(int type)
{
TexMapping *texmap = MEM_callocN(sizeof(TexMapping), "TexMapping");
2018-06-17 17:05:51 +02:00
2015-03-29 03:16:55 +11:00
BKE_texture_mapping_default(texmap, type);
2018-06-17 17:05:51 +02:00
return texmap;
}
2015-03-29 03:16:55 +11:00
void BKE_texture_mapping_default(TexMapping *texmap, int type)
{
memset(texmap, 0, sizeof(TexMapping));
texmap->size[0] = texmap->size[1] = texmap->size[2] = 1.0f;
texmap->max[0] = texmap->max[1] = texmap->max[2] = 1.0f;
unit_m4(texmap->mat);
texmap->projx = PROJ_X;
texmap->projy = PROJ_Y;
texmap->projz = PROJ_Z;
texmap->mapping = MTEX_FLAT;
texmap->type = type;
}
2015-03-29 03:16:55 +11:00
void BKE_texture_mapping_init(TexMapping *texmap)
{
float smat[4][4], rmat[4][4], tmat[4][4], proj[4][4], size[3];
if (texmap->projx == PROJ_X && texmap->projy == PROJ_Y && texmap->projz == PROJ_Z &&
is_zero_v3(texmap->loc) && is_zero_v3(texmap->rot) && is_one_v3(texmap->size))
{
unit_m4(texmap->mat);
texmap->flag |= TEXMAP_UNIT_MATRIX;
}
else {
/* axis projection */
zero_m4(proj);
proj[3][3] = 1.0f;
if (texmap->projx != PROJ_N)
proj[texmap->projx - 1][0] = 1.0f;
if (texmap->projy != PROJ_N)
proj[texmap->projy - 1][1] = 1.0f;
if (texmap->projz != PROJ_N)
proj[texmap->projz - 1][2] = 1.0f;
/* scale */
copy_v3_v3(size, texmap->size);
if (ELEM(texmap->type, TEXMAP_TYPE_TEXTURE, TEXMAP_TYPE_NORMAL)) {
/* keep matrix invertible */
if (fabsf(size[0]) < 1e-5f)
size[0] = signf(size[0]) * 1e-5f;
if (fabsf(size[1]) < 1e-5f)
size[1] = signf(size[1]) * 1e-5f;
if (fabsf(size[2]) < 1e-5f)
size[2] = signf(size[2]) * 1e-5f;
}
2018-06-17 17:05:51 +02:00
size_to_mat4(smat, texmap->size);
/* rotation */
eul_to_mat4(rmat, texmap->rot);
/* translation */
unit_m4(tmat);
copy_v3_v3(tmat[3], texmap->loc);
if (texmap->type == TEXMAP_TYPE_TEXTURE) {
/* to transform a texture, the inverse transform needs
* to be applied to the texture coordinate */
mul_m4_series(texmap->mat, tmat, rmat, smat);
invert_m4(texmap->mat);
}
else if (texmap->type == TEXMAP_TYPE_POINT) {
/* forward transform */
mul_m4_series(texmap->mat, tmat, rmat, smat);
}
else if (texmap->type == TEXMAP_TYPE_VECTOR) {
/* no translation for vectors */
mul_m4_m4m4(texmap->mat, rmat, smat);
}
else if (texmap->type == TEXMAP_TYPE_NORMAL) {
/* no translation for normals, and inverse transpose */
mul_m4_m4m4(texmap->mat, rmat, smat);
invert_m4(texmap->mat);
transpose_m4(texmap->mat);
}
/* projection last */
mul_m4_m4m4(texmap->mat, texmap->mat, proj);
texmap->flag &= ~TEXMAP_UNIT_MATRIX;
}
}
2015-03-29 03:16:55 +11:00
ColorMapping *BKE_texture_colormapping_add(void)
{
ColorMapping *colormap = MEM_callocN(sizeof(ColorMapping), "ColorMapping");
2018-06-17 17:05:51 +02:00
2015-03-29 03:16:55 +11:00
BKE_texture_colormapping_default(colormap);
2018-06-17 17:05:51 +02:00
return colormap;
}
2015-03-29 03:16:55 +11:00
void BKE_texture_colormapping_default(ColorMapping *colormap)
{
memset(colormap, 0, sizeof(ColorMapping));
2017-12-07 15:52:59 +11:00
BKE_colorband_init(&colormap->coba, true);
colormap->bright = 1.0;
colormap->contrast = 1.0;
colormap->saturation = 1.0;
colormap->blend_color[0] = 0.8f;
colormap->blend_color[1] = 0.8f;
colormap->blend_color[2] = 0.8f;
colormap->blend_type = MA_RAMP_BLEND;
colormap->blend_factor = 0.0f;
}
2002-10-12 11:37:38 +00:00
/* ******************* TEX ************************ */
ID-Remap - Step one: core work (cleanup and rework of generic ID datablock handling). This commit changes a lot of how IDs are handled internally, especially the unlinking/freeing processes. So far, this was very fuzy, to summarize cleanly deleting or replacing a datablock was pretty much impossible, except for a few special cases. Also, unlinking was handled by each datatype, in a rather messy and prone-to-errors way (quite a few ID usages were missed or wrongly handled that way). One of the main goal of id-remap branch was to cleanup this, and fatorize ID links handling by using library_query utils to allow generic handling of those, which is now the case (now, generic ID links handling is only "knwon" from readfile.c and library_query.c). This commit also adds backends to allow live replacement and deletion of datablocks in Blender (so-called 'remapping' process, where we replace all usages of a given ID pointer by a new one, or NULL one in case of unlinking). This will allow nice new features, like ability to easily reload or relocate libraries, real immediate deletion of datablocks in blender, replacement of one datablock by another, etc. Some of those are for next commits. A word of warning: this commit is highly risky, because it affects potentially a lot in Blender core. Though it was tested rather deeply, being totally impossible to check all possible ID usage cases, it's likely there are some remaining issues and bugs in new code... Please report them! ;) Review task: D2027 (https://developer.blender.org/D2027). Reviewed by campbellbarton, thanks a bunch.
2016-06-22 17:29:38 +02:00
/** Free (or release) any data used by this texture (does not free the texure itself). */
void BKE_texture_free(Tex *tex)
2002-10-12 11:37:38 +00:00
{
ID-Remap - Step one: core work (cleanup and rework of generic ID datablock handling). This commit changes a lot of how IDs are handled internally, especially the unlinking/freeing processes. So far, this was very fuzy, to summarize cleanly deleting or replacing a datablock was pretty much impossible, except for a few special cases. Also, unlinking was handled by each datatype, in a rather messy and prone-to-errors way (quite a few ID usages were missed or wrongly handled that way). One of the main goal of id-remap branch was to cleanup this, and fatorize ID links handling by using library_query utils to allow generic handling of those, which is now the case (now, generic ID links handling is only "knwon" from readfile.c and library_query.c). This commit also adds backends to allow live replacement and deletion of datablocks in Blender (so-called 'remapping' process, where we replace all usages of a given ID pointer by a new one, or NULL one in case of unlinking). This will allow nice new features, like ability to easily reload or relocate libraries, real immediate deletion of datablocks in blender, replacement of one datablock by another, etc. Some of those are for next commits. A word of warning: this commit is highly risky, because it affects potentially a lot in Blender core. Though it was tested rather deeply, being totally impossible to check all possible ID usage cases, it's likely there are some remaining issues and bugs in new code... Please report them! ;) Review task: D2027 (https://developer.blender.org/D2027). Reviewed by campbellbarton, thanks a bunch.
2016-06-22 17:29:38 +02:00
BKE_animdata_free((ID *)tex, false);
/* is no lib link block, but texture extension */
if (tex->nodetree) {
ntreeFreeNestedTree(tex->nodetree);
MEM_freeN(tex->nodetree);
ID-Remap - Step one: core work (cleanup and rework of generic ID datablock handling). This commit changes a lot of how IDs are handled internally, especially the unlinking/freeing processes. So far, this was very fuzy, to summarize cleanly deleting or replacing a datablock was pretty much impossible, except for a few special cases. Also, unlinking was handled by each datatype, in a rather messy and prone-to-errors way (quite a few ID usages were missed or wrongly handled that way). One of the main goal of id-remap branch was to cleanup this, and fatorize ID links handling by using library_query utils to allow generic handling of those, which is now the case (now, generic ID links handling is only "knwon" from readfile.c and library_query.c). This commit also adds backends to allow live replacement and deletion of datablocks in Blender (so-called 'remapping' process, where we replace all usages of a given ID pointer by a new one, or NULL one in case of unlinking). This will allow nice new features, like ability to easily reload or relocate libraries, real immediate deletion of datablocks in blender, replacement of one datablock by another, etc. Some of those are for next commits. A word of warning: this commit is highly risky, because it affects potentially a lot in Blender core. Though it was tested rather deeply, being totally impossible to check all possible ID usage cases, it's likely there are some remaining issues and bugs in new code... Please report them! ;) Review task: D2027 (https://developer.blender.org/D2027). Reviewed by campbellbarton, thanks a bunch.
2016-06-22 17:29:38 +02:00
tex->nodetree = NULL;
}
MEM_SAFE_FREE(tex->coba);
2018-06-17 17:05:51 +02:00
ID-Remap - Step one: core work (cleanup and rework of generic ID datablock handling). This commit changes a lot of how IDs are handled internally, especially the unlinking/freeing processes. So far, this was very fuzy, to summarize cleanly deleting or replacing a datablock was pretty much impossible, except for a few special cases. Also, unlinking was handled by each datatype, in a rather messy and prone-to-errors way (quite a few ID usages were missed or wrongly handled that way). One of the main goal of id-remap branch was to cleanup this, and fatorize ID links handling by using library_query utils to allow generic handling of those, which is now the case (now, generic ID links handling is only "knwon" from readfile.c and library_query.c). This commit also adds backends to allow live replacement and deletion of datablocks in Blender (so-called 'remapping' process, where we replace all usages of a given ID pointer by a new one, or NULL one in case of unlinking). This will allow nice new features, like ability to easily reload or relocate libraries, real immediate deletion of datablocks in blender, replacement of one datablock by another, etc. Some of those are for next commits. A word of warning: this commit is highly risky, because it affects potentially a lot in Blender core. Though it was tested rather deeply, being totally impossible to check all possible ID usage cases, it's likely there are some remaining issues and bugs in new code... Please report them! ;) Review task: D2027 (https://developer.blender.org/D2027). Reviewed by campbellbarton, thanks a bunch.
2016-06-22 17:29:38 +02:00
BKE_icon_id_delete((ID *)tex);
BKE_previewimg_free(&tex->preview);
2002-10-12 11:37:38 +00:00
}
/* ------------------------------------------------------------------------- */
2015-03-29 03:16:55 +11:00
void BKE_texture_default(Tex *tex)
2002-10-12 11:37:38 +00:00
{
/* BLI_assert(MEMCMP_STRUCT_AFTER_IS_ZERO(tex, id)); */ /* Not here, can be called with some pointers set. :/ */
tex->type = TEX_IMAGE;
tex->ima = NULL;
tex->stype = 0;
tex->flag = TEX_CHECKER_ODD;
tex->imaflag = TEX_INTERPOL | TEX_MIPMAP | TEX_USEALPHA;
tex->extend = TEX_REPEAT;
tex->cropxmin = tex->cropymin = 0.0;
tex->cropxmax = tex->cropymax = 1.0;
tex->texfilter = TXF_EWA;
tex->afmax = 8;
tex->xrepeat = tex->yrepeat = 1;
tex->sfra = 1;
tex->frames = 0;
tex->offset = 0;
tex->noisesize = 0.25;
tex->noisedepth = 2;
tex->turbul = 5.0;
tex->nabla = 0.025; // also in do_versions
tex->bright = 1.0;
tex->contrast = 1.0;
tex->saturation = 1.0;
tex->filtersize = 1.0;
tex->rfac = 1.0;
tex->gfac = 1.0;
tex->bfac = 1.0;
/* newnoise: init. */
tex->noisebasis = 0;
tex->noisebasis2 = 0;
/* musgrave */
tex->mg_H = 1.0;
tex->mg_lacunarity = 2.0;
tex->mg_octaves = 2.0;
tex->mg_offset = 1.0;
tex->mg_gain = 1.0;
tex->ns_outscale = 1.0;
/* distnoise */
tex->dist_amount = 1.0;
/* voronoi */
tex->vn_w1 = 1.0;
tex->vn_w2 = tex->vn_w3 = tex->vn_w4 = 0.0;
tex->vn_mexp = 2.5;
tex->vn_distm = 0;
tex->vn_coltype = 0;
tex->iuser.ok = 1;
tex->iuser.frames = 100;
tex->iuser.sfra = 1;
2018-06-17 17:05:51 +02:00
tex->preview = NULL;
2002-10-12 11:37:38 +00:00
}
2015-03-29 03:16:55 +11:00
void BKE_texture_type_set(Tex *tex, int type)
{
tex->type = type;
}
2002-10-12 11:37:38 +00:00
/* ------------------------------------------------------------------------- */
2015-03-29 03:16:55 +11:00
Tex *BKE_texture_add(Main *bmain, const char *name)
2002-10-12 11:37:38 +00:00
{
Tex *tex;
tex = BKE_libblock_alloc(bmain, ID_TE, name, 0);
2018-06-17 17:05:51 +02:00
2015-03-29 03:16:55 +11:00
BKE_texture_default(tex);
2018-06-17 17:05:51 +02:00
2002-10-12 11:37:38 +00:00
return tex;
}
/* ------------------------------------------------------------------------- */
2015-03-29 03:16:55 +11:00
void BKE_texture_mtex_default(MTex *mtex)
2002-10-12 11:37:38 +00:00
{
mtex->texco = TEXCO_UV;
mtex->mapto = MAP_COL;
mtex->object = NULL;
mtex->projx = PROJ_X;
mtex->projy = PROJ_Y;
mtex->projz = PROJ_Z;
mtex->mapping = MTEX_FLAT;
mtex->ofs[0] = 0.0;
mtex->ofs[1] = 0.0;
mtex->ofs[2] = 0.0;
mtex->size[0] = 1.0;
mtex->size[1] = 1.0;
mtex->size[2] = 1.0;
mtex->tex = NULL;
mtex->colormodel = 0;
mtex->r = 1.0;
mtex->g = 0.0;
mtex->b = 1.0;
mtex->k = 1.0;
mtex->def_var = 1.0;
mtex->blendtype = MTEX_BLEND;
mtex->colfac = 1.0;
mtex->norfac = 1.0;
mtex->varfac = 1.0;
mtex->dispfac = 0.2;
mtex->colspecfac = 1.0f;
mtex->mirrfac = 1.0f;
mtex->alphafac = 1.0f;
mtex->difffac = 1.0f;
mtex->specfac = 1.0f;
mtex->emitfac = 1.0f;
mtex->hardfac = 1.0f;
mtex->raymirrfac = 1.0f;
mtex->translfac = 1.0f;
mtex->ambfac = 1.0f;
mtex->colemitfac = 1.0f;
mtex->colreflfac = 1.0f;
mtex->coltransfac = 1.0f;
mtex->densfac = 1.0f;
mtex->scatterfac = 1.0f;
mtex->reflfac = 1.0f;
mtex->shadowfac = 1.0f;
mtex->zenupfac = 1.0f;
mtex->zendownfac = 1.0f;
mtex->blendfac = 1.0f;
mtex->timefac = 1.0f;
mtex->lengthfac = 1.0f;
mtex->clumpfac = 1.0f;
mtex->kinkfac = 1.0f;
mtex->kinkampfac = 1.0f;
mtex->roughfac = 1.0f;
mtex->twistfac = 1.0f;
mtex->padensfac = 1.0f;
mtex->lifefac = 1.0f;
mtex->sizefac = 1.0f;
mtex->ivelfac = 1.0f;
mtex->dampfac = 1.0f;
mtex->gravityfac = 1.0f;
mtex->fieldfac = 1.0f;
mtex->normapspace = MTEX_NSPACE_TANGENT;
mtex->brush_map_mode = MTEX_MAP_MODE_TILED;
2015-01-31 17:23:30 +11:00
mtex->random_angle = 2.0f * (float)M_PI;
mtex->brush_angle_mode = 0;
2002-10-12 11:37:38 +00:00
}
/* ------------------------------------------------------------------------- */
2015-03-29 03:16:55 +11:00
MTex *BKE_texture_mtex_add(void)
2002-10-12 11:37:38 +00:00
{
MTex *mtex;
2018-06-17 17:05:51 +02:00
2015-03-29 03:16:55 +11:00
mtex = MEM_callocN(sizeof(MTex), "BKE_texture_mtex_add");
2018-06-17 17:05:51 +02:00
2015-03-29 03:16:55 +11:00
BKE_texture_mtex_default(mtex);
2018-06-17 17:05:51 +02:00
2002-10-12 11:37:38 +00:00
return mtex;
}
/* slot -1 for first free ID */
2015-03-29 03:16:55 +11:00
MTex *BKE_texture_mtex_add_id(ID *id, int slot)
{
MTex **mtex_ar;
short act;
give_active_mtex(id, &mtex_ar, &act);
if (mtex_ar == NULL) {
return NULL;
}
2018-06-17 17:05:51 +02:00
if (slot == -1) {
/* find first free */
int i;
for (i = 0; i < MAX_MTEX; i++) {
if (!mtex_ar[i]) {
slot = i;
break;
}
}
if (slot == -1) {
return NULL;
}
}
else {
/* make sure slot is valid */
if (slot < 0 || slot >= MAX_MTEX) {
return NULL;
}
}
if (mtex_ar[slot]) {
id_us_min((ID *)mtex_ar[slot]->tex);
MEM_freeN(mtex_ar[slot]);
mtex_ar[slot] = NULL;
}
2015-03-29 03:16:55 +11:00
mtex_ar[slot] = BKE_texture_mtex_add();
return mtex_ar[slot];
}
2002-10-12 11:37:38 +00:00
/* ------------------------------------------------------------------------- */
/**
* Only copy internal data of Texture ID from source to already allocated/initialized destination.
2019-02-04 20:39:59 +01:00
* You probably never want to use that directly, use BKE_id_copy or BKE_id_copy_ex for typical needs.
*
* WARNING! This function will not handle ID user count!
*
* \param flag: Copying options (see BKE_library.h's LIB_ID_COPY_... flags for more).
*/
void BKE_texture_copy_data(Main *bmain, Tex *tex_dst, const Tex *tex_src, const int flag)
2002-10-12 11:37:38 +00:00
{
if (!BKE_texture_is_image_user(tex_src)) {
tex_dst->ima = NULL;
}
if (tex_dst->coba) {
tex_dst->coba = MEM_dupallocN(tex_dst->coba);
}
if (tex_src->nodetree) {
if (tex_src->nodetree->execdata) {
ntreeTexEndExecTree(tex_src->nodetree->execdata);
}
/* Note: nodetree is *not* in bmain, however this specific case is handled at lower level
* (see BKE_libblock_copy_ex()). */
BKE_id_copy_ex(bmain, (ID *)tex_src->nodetree, (ID **)&tex_dst->nodetree, flag);
}
if ((flag & LIB_ID_COPY_NO_PREVIEW) == 0) {
BKE_previewimg_id_copy(&tex_dst->id, &tex_src->id);
}
else {
tex_dst->preview = NULL;
}
}
Tex *BKE_texture_copy(Main *bmain, const Tex *tex)
{
Tex *tex_copy;
BKE_id_copy(bmain, &tex->id, (ID **)&tex_copy);
return tex_copy;
2002-10-12 11:37:38 +00:00
}
/* texture copy without adding to main dbase */
2015-03-29 03:16:55 +11:00
Tex *BKE_texture_localize(Tex *tex)
{
2018-11-15 16:28:07 +01:00
/* TODO(bastien): Replace with something like:
*
2018-11-15 16:28:07 +01:00
* Tex *tex_copy;
* BKE_id_copy_ex(bmain, &tex->id, (ID **)&tex_copy,
* LIB_ID_COPY_NO_MAIN | LIB_ID_COPY_NO_PREVIEW | LIB_ID_COPY_NO_USER_REFCOUNT,
* false);
* return tex_copy;
*
* NOTE: Only possible once nested node trees are fully converted to that too. */
Tex *texn;
2018-06-17 17:05:51 +02:00
texn = BKE_libblock_copy_for_localize(&tex->id);
2018-06-17 17:05:51 +02:00
/* image texture: BKE_texture_free also doesn't decrease */
2018-06-17 17:05:51 +02:00
if (texn->coba) texn->coba = MEM_dupallocN(texn->coba);
2018-06-17 17:05:51 +02:00
texn->preview = NULL;
2018-06-17 17:05:51 +02:00
if (tex->nodetree) {
texn->nodetree = ntreeLocalize(tex->nodetree);
}
2018-06-17 17:05:51 +02:00
2018-11-15 16:36:35 +01:00
texn->id.tag |= LIB_TAG_LOCALIZED;
return texn;
}
2002-10-12 11:37:38 +00:00
/* ------------------------------------------------------------------------- */
void BKE_texture_make_local(Main *bmain, Tex *tex, const bool lib_local)
2002-10-12 11:37:38 +00:00
{
BKE_id_make_local_generic(bmain, &tex->id, true, lib_local);
2002-10-12 11:37:38 +00:00
}
Tex *give_current_linestyle_texture(FreestyleLineStyle *linestyle)
{
MTex *mtex = NULL;
Tex *tex = NULL;
if (linestyle) {
mtex = linestyle->mtex[(int)(linestyle->texact)];
if (mtex) tex = mtex->tex;
}
return tex;
}
void set_current_linestyle_texture(FreestyleLineStyle *linestyle, Tex *newtex)
{
int act = linestyle->texact;
if (linestyle->mtex[act] && linestyle->mtex[act]->tex)
id_us_min(&linestyle->mtex[act]->tex->id);
if (newtex) {
if (!linestyle->mtex[act]) {
2015-03-29 03:16:55 +11:00
linestyle->mtex[act] = BKE_texture_mtex_add();
linestyle->mtex[act]->texco = TEXCO_STROKE;
}
linestyle->mtex[act]->tex = newtex;
id_us_plus(&newtex->id);
}
else if (linestyle->mtex[act]) {
MEM_freeN(linestyle->mtex[act]);
linestyle->mtex[act] = NULL;
}
}
bool give_active_mtex(ID *id, MTex ***mtex_ar, short *act)
2009-10-12 16:00:39 +00:00
{
switch (GS(id->name)) {
case ID_LS:
*mtex_ar = ((FreestyleLineStyle *)id)->mtex;
if (act) *act = (((FreestyleLineStyle *)id)->texact);
break;
case ID_PA:
*mtex_ar = ((ParticleSettings *)id)->mtex;
if (act) *act = (((ParticleSettings *)id)->texact);
break;
default:
*mtex_ar = NULL;
if (act) *act = 0;
return false;
2009-10-12 16:00:39 +00:00
}
return true;
2009-10-12 16:00:39 +00:00
}
void set_active_mtex(ID *id, short act)
{
if (act < 0) act = 0;
else if (act >= MAX_MTEX) act = MAX_MTEX - 1;
2009-10-12 16:00:39 +00:00
switch (GS(id->name)) {
case ID_LS:
((FreestyleLineStyle *)id)->texact = act;
break;
case ID_PA:
((ParticleSettings *)id)->texact = act;
break;
default:
break;
2009-10-12 16:00:39 +00:00
}
}
Tex *give_current_brush_texture(Brush *br)
{
return br->mtex.tex;
}
void set_current_brush_texture(Brush *br, Tex *newtex)
{
if (br->mtex.tex)
id_us_min(&br->mtex.tex->id);
if (newtex) {
br->mtex.tex = newtex;
id_us_plus(&newtex->id);
}
}
Tex *give_current_particle_texture(ParticleSettings *part)
{
MTex *mtex = NULL;
Tex *tex = NULL;
2018-06-17 17:05:51 +02:00
if (!part) return NULL;
2018-06-17 17:05:51 +02:00
mtex = part->mtex[(int)(part->texact)];
if (mtex) tex = mtex->tex;
2018-06-17 17:05:51 +02:00
return tex;
}
void set_current_particle_texture(ParticleSettings *part, Tex *newtex)
{
int act = part->texact;
if (part->mtex[act] && part->mtex[act]->tex)
id_us_min(&part->mtex[act]->tex->id);
if (newtex) {
if (!part->mtex[act]) {
2015-03-29 03:16:55 +11:00
part->mtex[act] = BKE_texture_mtex_add();
part->mtex[act]->texco = TEXCO_ORCO;
part->mtex[act]->blendtype = MTEX_MUL;
}
2018-06-17 17:05:51 +02:00
part->mtex[act]->tex = newtex;
id_us_plus(&newtex->id);
}
else if (part->mtex[act]) {
MEM_freeN(part->mtex[act]);
part->mtex[act] = NULL;
}
}
Giant commit! A full detailed description of this will be done later... is several days of work. Here's a summary: Render: - Full cleanup of render code, removing *all* globals and bad level calls all over blender. Render module is now not called abusive anymore - API-fied calls to rendering - Full recode of internal render pipeline. Is now rendering tiles by default, prepared for much smarter 'bucket' render later. - Each thread now can render a full part - Renders were tested with 4 threads, goes fine, apart from some lookup tables in softshadow and AO still - Rendering is prepared to do multiple layers and passes - No single 32 bits trick in render code anymore, all 100% floats now. Writing images/movies - moved writing images to blender kernel (bye bye 'schrijfplaatje'!) - made a new Movie handle system, also in kernel. This will enable much easier use of movies in Blender PreviewRender: - Using new render API, previewrender (in buttons) now uses regular render code to generate images. - new datafile 'preview.blend.c' has the preview scenes in it - previews get rendered in exact displayed size (1 pixel = 1 pixel) 3D Preview render - new; press Pkey in 3d window, for a panel that continuously renders (pkey is for games, i know... but we dont do that in orange now!) - this render works nearly identical to buttons-preview render, so it stops rendering on any event (mouse, keyboard, etc) - on moving/scaling the panel, the render code doesn't recreate all geometry - same for shifting/panning view - all other operations (now) regenerate the full render database still. - this is WIP... but big fun, especially for simple scenes! Compositor - Using same node system as now in use for shaders, you can composit images - works pretty straightforward... needs much more options/tools and integration with rendering still - is not threaded yet, nor is so smart to only recalculate changes... will be done soon! - the "Render Result" node will get all layers/passes as output sockets - The "Output" node renders to a builtin image, which you can view in the Image window. (yes, output nodes to render-result, and to files, is on the list!) The Bad News - "Unified Render" is removed. It might come back in some stage, but this system should be built from scratch. I can't really understand this code... I expect it is not much needed, especially with advanced layer/passes control - Panorama render, Field render, Motion blur, is not coded yet... (I had to recode every single feature in render, so...!) - Lens Flare is also not back... needs total revision, might become composit effect though (using zbuffer for visibility) - Part render is gone! (well, thats obvious, its default now). - The render window is only restored with limited functionality... I am going to check first the option to render to a Image window, so Blender can become a true single-window application. :) For example, the 'Spare render buffer' (jkey) doesnt work. - Render with border, now default creates a smaller image - No zbuffers are written yet... on the todo! - Scons files and MSVC will need work to get compiling again OK... thats what I can quickly recall. Now go compiling!
2006-01-23 22:05:47 +00:00
/* ------------------------------------------------------------------------- */
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
void BKE_texture_pointdensity_init_data(PointDensity *pd)
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
{
pd->flag = 0;
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
pd->radius = 0.3f;
pd->falloff_type = TEX_PD_FALLOFF_STD;
pd->falloff_softness = 2.0;
Point Density texture The Point Density texture now has some additional options for how the point locations are cached. Previously it was all relative to worldspace, but there are now some other options that make things a lot more convenient for mapping the texture to Local (or Orco). Thanks to theeth for helping with the space conversions! The new Object space options allow this sort of thing to be possible - a particle system, instanced on a transformed renderable object: http://mke3.net/blender/devel/rendering/volumetrics/pd_objectspace.mov It's also a lot easier to use multiple instances, just duplicate the renderable objects and move them around. The new particle cache options are: * Emit Object space This caches the particles relative to the emitter object's coordinate space (i.e. relative to the emitter's object center). This makes it possible to map the Texture to Local or Orco easily, so you can easily move, rotate or scale the rendering object that has the Point Density texture. It's relative to the emitter's location, rotation and scale, so if the object you're rendering the texture on is aligned differently to the emitter, the results will be rotated etc. * Emit Object Location This offsets the particles to the emitter object's location in 3D space. It's similar to Emit Object Space, however the emitter object's rotation and scale are ignored. This is probably the easiest to use, since you don't need to worry about the rotation and scale of the emitter object (just the rendered object), so it's the default. * Global Space This is the same as previously, the particles are cached in global space, so to use this effectively you'll need to map the texture to Global, and have the rendered object in the right global location.
2008-09-29 04:19:24 +00:00
pd->source = TEX_PD_PSYS;
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
pd->point_tree = NULL;
pd->point_data = NULL;
pd->noise_size = 0.5f;
pd->noise_depth = 1;
pd->noise_fac = 1.0f;
pd->noise_influence = TEX_PD_NOISE_STATIC;
2017-12-07 15:52:59 +11:00
pd->coba = BKE_colorband_add(true);
Point Density texture: colouring This introduces a few new ways of modifying the intensity and colour output generated by the Point Density texture. Previously, the texture only output intensity information, but now you can map it to colours along a gradient ramp, based on information coming out of a particle system. This lets you do things like colour a particle system based on the individual particles' age - the main reason I need it is to fade particles out over time. The colorband influences both the colour and intensity (using the colorband's alpha value), which makes it easy to map a single point density texture to both intensity values in the Map To panel (such as density or emit) and colour values (such as absorb col or emit col). This is how the below examples are set up, an example .blend file is available here: http://mke3.net/blender/devel/rendering/volumetrics/pd_test4.blend The different modes: * Constant No modifications to intensity or colour (pure white) * Particle Age Maps the color ramp along the particles' lifetimes: http://mke3.net/blender/devel/rendering/volumetrics/pd_mod_partage.mov * Particle Speed Maps the color ramp to the particles' absolute speed per frame (in Blender units). There's an additional scale parameter that you can use to bring this speed into a 0.0 - 1.0 range, if your particles are travelling too faster or slower than 0-1. http://mke3.net/blender/devel/rendering/volumetrics/pd_mod_speed.mov * Velocity -> RGB Outputs the particle XYZ velocity vector as RGB colours. This may be useful for comp work, or maybe in the future things like displacement. Again, there's a scale parameter to control it. http://mke3.net/blender/devel/rendering/volumetrics/pd_mod_velrgb.mov
2008-11-09 01:16:12 +00:00
pd->speed_scale = 1.0f;
pd->totpoints = 0;
2009-08-17 22:09:36 +00:00
pd->object = NULL;
pd->psys = 0;
pd->psys_cache_space = TEX_PD_WORLDSPACE;
pd->falloff_curve = curvemapping_add(1, 0, 0, 1, 1);
pd->falloff_curve->preset = CURVE_PRESET_LINE;
pd->falloff_curve->cm->flag &= ~CUMA_EXTEND_EXTRAPOLATE;
curvemap_reset(pd->falloff_curve->cm, &pd->falloff_curve->clipr, pd->falloff_curve->preset, CURVEMAP_SLOPE_POSITIVE);
curvemapping_changed(pd->falloff_curve, false);
}
PointDensity *BKE_texture_pointdensity_add(void)
{
PointDensity *pd = MEM_callocN(sizeof(PointDensity), "pointdensity");
BKE_texture_pointdensity_init_data(pd);
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
return pd;
2018-06-17 17:05:51 +02:00
}
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
PointDensity *BKE_texture_pointdensity_copy(const PointDensity *pd, const int UNUSED(flag))
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
{
PointDensity *pdn;
Point Density texture The Point Density texture now has some additional options for how the point locations are cached. Previously it was all relative to worldspace, but there are now some other options that make things a lot more convenient for mapping the texture to Local (or Orco). Thanks to theeth for helping with the space conversions! The new Object space options allow this sort of thing to be possible - a particle system, instanced on a transformed renderable object: http://mke3.net/blender/devel/rendering/volumetrics/pd_objectspace.mov It's also a lot easier to use multiple instances, just duplicate the renderable objects and move them around. The new particle cache options are: * Emit Object space This caches the particles relative to the emitter object's coordinate space (i.e. relative to the emitter's object center). This makes it possible to map the Texture to Local or Orco easily, so you can easily move, rotate or scale the rendering object that has the Point Density texture. It's relative to the emitter's location, rotation and scale, so if the object you're rendering the texture on is aligned differently to the emitter, the results will be rotated etc. * Emit Object Location This offsets the particles to the emitter object's location in 3D space. It's similar to Emit Object Space, however the emitter object's rotation and scale are ignored. This is probably the easiest to use, since you don't need to worry about the rotation and scale of the emitter object (just the rendered object), so it's the default. * Global Space This is the same as previously, the particles are cached in global space, so to use this effectively you'll need to map the texture to Global, and have the rendered object in the right global location.
2008-09-29 04:19:24 +00:00
pdn = MEM_dupallocN(pd);
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
pdn->point_tree = NULL;
pdn->point_data = NULL;
if (pdn->coba) {
pdn->coba = MEM_dupallocN(pdn->coba);
}
pdn->falloff_curve = curvemapping_copy(pdn->falloff_curve); /* can be NULL */
return pdn;
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
}
2015-03-29 03:16:55 +11:00
void BKE_texture_pointdensity_free_data(PointDensity *pd)
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
{
if (pd->point_tree) {
BLI_bvhtree_free(pd->point_tree);
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
pd->point_tree = NULL;
}
if (pd->point_data) {
MEM_freeN(pd->point_data);
pd->point_data = NULL;
}
if (pd->coba) {
MEM_freeN(pd->coba);
pd->coba = NULL;
}
curvemapping_free(pd->falloff_curve); /* can be NULL */
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
}
2015-03-29 03:16:55 +11:00
void BKE_texture_pointdensity_free(PointDensity *pd)
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
{
2015-03-29 03:16:55 +11:00
BKE_texture_pointdensity_free_data(pd);
* Volumetrics Removed all the old particle rendering code and options I had in there before, in order to make way for... A new procedural texture: 'Point Density' Point Density is a 3d texture that find the density of a group of 'points' in space and returns that in the texture as an intensity value. Right now, its at an early stage and it's only enabled for particles, but it would be cool to extend it later for things like object vertices, or point cache files from disk - i.e. to import point cloud data into Blender for rendering volumetrically. Currently there are just options for an Object and its particle system number, this is the particle system that will get cached before rendering, and then used for the texture's density estimation. It works totally consistent with as any other procedural texture, so previously where I've mapped a clouds texture to volume density to make some of those test renders, now I just map a point density texture to volume density. Here's a version of the same particle smoke test file from before, updated to use the point density texture instead: http://mke3.net/blender/devel/rendering/volumetrics/smoke_test02.blend There are a few cool things about implementing this as a texture: - The one texture (and cache) can be instanced across many different materials: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_instanced.png This means you can calculate and bake one particle system, but render it multiple times across the scene, with different material settings, at no extra memory cost. Right now, the particles are cached in world space, so you have to map it globally, and if you want it offset, you have to do it in the material (as in the file above). I plan to add an option to bake in local space, so you can just map the texture to local and it just works. - It also works for solid surfaces too, it just gets the density at that particular point on the surface, eg: http://mke3.net/blender/devel/rendering/volumetrics/pointdensity_solid.mov - You can map it to whatever you want, not only density but the various emissions and colours as well. I'd like to investigate using the other outputs in the texture too (like the RGB or normal outputs), perhaps with options to colour by particle age, generating normals for making particle 'dents' in a surface, whatever!
2008-09-28 08:00:22 +00:00
MEM_freeN(pd);
}
/* ------------------------------------------------------------------------- */
* Volume Rendering: Voxel data This commit introduces a new texture ('Voxel Data'), used to load up saved voxel data sets for rendering, contributed by Raúl 'farsthary' Fernández Hernández with some additional tweaks. Thanks, Raúl! The texture works similar to the existing point density texture, currently it only provides intensity information, which can then be mapped (for example) to density in a volume material. This is an early version, intended to read the voxel format saved by Raúl's command line simulators, in future revisions there's potential for making a more full-featured 'Blender voxel file format', and also for supporting other formats too. Note: Due to some subtleties in Raúl's existing released simulators, in order to load them correctly the voxel data texture, you'll need to raise the 'resolution' value by 2. So if you baked out the simulation at resolution 50, enter 52 for the resolution in the texture panel. This can possibly be fixed in the simulator later on. Right now, the way the texture is mapped is just in the space 0,0,0 <-> 1,1,1 and it can appear rotated 90 degrees incorrectly. This will be tackled, for now, probably the easiest way to map it is with and empty, using Map Input -> Object. Smoke test: http://www.vimeo.com/2449270 One more note, trilinear interpolation seems a bit slow at the moment, we'll look into this. For curiosity, while testing/debugging this, I made a script that exports a mesh to voxel data. Here's a test of grogan (www.kajimba.com) converted to voxels, rendered as a volume: http://www.vimeo.com/2512028 The script is available here: http://mke3.net/projects/bpython/export_object_voxeldata.py * Another smaller thing, brought back early ray termination (was disabled previously for debugging) and made it user configurable. It now appears as a new value in the volume material: 'Depth Cutoff'. For some background info on what this does, check: http://farsthary.wordpress.com/2008/12/11/cutting-down-render-times/ * Also some disabled work-in-progess code for light cache
2008-12-13 05:41:34 +00:00
/**
* \returns true if this texture can use its #Texture.ima (even if its NULL)
*/
bool BKE_texture_is_image_user(const struct Tex *tex)
{
switch (tex->type) {
case TEX_IMAGE:
{
return true;
}
}
return false;
}
* Volume Rendering: Voxel data This commit introduces a new texture ('Voxel Data'), used to load up saved voxel data sets for rendering, contributed by Raúl 'farsthary' Fernández Hernández with some additional tweaks. Thanks, Raúl! The texture works similar to the existing point density texture, currently it only provides intensity information, which can then be mapped (for example) to density in a volume material. This is an early version, intended to read the voxel format saved by Raúl's command line simulators, in future revisions there's potential for making a more full-featured 'Blender voxel file format', and also for supporting other formats too. Note: Due to some subtleties in Raúl's existing released simulators, in order to load them correctly the voxel data texture, you'll need to raise the 'resolution' value by 2. So if you baked out the simulation at resolution 50, enter 52 for the resolution in the texture panel. This can possibly be fixed in the simulator later on. Right now, the way the texture is mapped is just in the space 0,0,0 <-> 1,1,1 and it can appear rotated 90 degrees incorrectly. This will be tackled, for now, probably the easiest way to map it is with and empty, using Map Input -> Object. Smoke test: http://www.vimeo.com/2449270 One more note, trilinear interpolation seems a bit slow at the moment, we'll look into this. For curiosity, while testing/debugging this, I made a script that exports a mesh to voxel data. Here's a test of grogan (www.kajimba.com) converted to voxels, rendered as a volume: http://www.vimeo.com/2512028 The script is available here: http://mke3.net/projects/bpython/export_object_voxeldata.py * Another smaller thing, brought back early ray termination (was disabled previously for debugging) and made it user configurable. It now appears as a new value in the volume material: 'Depth Cutoff'. For some background info on what this does, check: http://farsthary.wordpress.com/2008/12/11/cutting-down-render-times/ * Also some disabled work-in-progess code for light cache
2008-12-13 05:41:34 +00:00
Giant commit! A full detailed description of this will be done later... is several days of work. Here's a summary: Render: - Full cleanup of render code, removing *all* globals and bad level calls all over blender. Render module is now not called abusive anymore - API-fied calls to rendering - Full recode of internal render pipeline. Is now rendering tiles by default, prepared for much smarter 'bucket' render later. - Each thread now can render a full part - Renders were tested with 4 threads, goes fine, apart from some lookup tables in softshadow and AO still - Rendering is prepared to do multiple layers and passes - No single 32 bits trick in render code anymore, all 100% floats now. Writing images/movies - moved writing images to blender kernel (bye bye 'schrijfplaatje'!) - made a new Movie handle system, also in kernel. This will enable much easier use of movies in Blender PreviewRender: - Using new render API, previewrender (in buttons) now uses regular render code to generate images. - new datafile 'preview.blend.c' has the preview scenes in it - previews get rendered in exact displayed size (1 pixel = 1 pixel) 3D Preview render - new; press Pkey in 3d window, for a panel that continuously renders (pkey is for games, i know... but we dont do that in orange now!) - this render works nearly identical to buttons-preview render, so it stops rendering on any event (mouse, keyboard, etc) - on moving/scaling the panel, the render code doesn't recreate all geometry - same for shifting/panning view - all other operations (now) regenerate the full render database still. - this is WIP... but big fun, especially for simple scenes! Compositor - Using same node system as now in use for shaders, you can composit images - works pretty straightforward... needs much more options/tools and integration with rendering still - is not threaded yet, nor is so smart to only recalculate changes... will be done soon! - the "Render Result" node will get all layers/passes as output sockets - The "Output" node renders to a builtin image, which you can view in the Image window. (yes, output nodes to render-result, and to files, is on the list!) The Bad News - "Unified Render" is removed. It might come back in some stage, but this system should be built from scratch. I can't really understand this code... I expect it is not much needed, especially with advanced layer/passes control - Panorama render, Field render, Motion blur, is not coded yet... (I had to recode every single feature in render, so...!) - Lens Flare is also not back... needs total revision, might become composit effect though (using zbuffer for visibility) - Part render is gone! (well, thats obvious, its default now). - The render window is only restored with limited functionality... I am going to check first the option to render to a Image window, so Blender can become a true single-window application. :) For example, the 'Spare render buffer' (jkey) doesnt work. - Render with border, now default creates a smaller image - No zbuffers are written yet... on the todo! - Scons files and MSVC will need work to get compiling again OK... thats what I can quickly recall. Now go compiling!
2006-01-23 22:05:47 +00:00
/* ------------------------------------------------------------------------- */
bool BKE_texture_dependsOnTime(const struct Tex *texture)
{
if (texture->ima && BKE_image_is_animated(texture->ima)) {
2014-12-01 17:11:18 +01:00
return true;
}
else if (texture->adt) {
2012-07-07 22:51:57 +00:00
/* assume anything in adt means the texture is animated */
2014-12-01 17:11:18 +01:00
return true;
}
else if (texture->type == TEX_NOISE) {
2012-07-07 22:51:57 +00:00
/* noise always varies with time */
2014-12-01 17:11:18 +01:00
return true;
}
2014-12-01 17:11:18 +01:00
return false;
}
/* ------------------------------------------------------------------------- */
void BKE_texture_get_value_ex(
2015-03-21 22:10:43 +11:00
const Scene *scene, Tex *texture,
float *tex_co, TexResult *texres,
struct ImagePool *pool,
bool use_color_management)
{
int result_type;
bool do_color_manage = false;
if (scene && use_color_management) {
do_color_manage = BKE_scene_check_color_management_enabled(scene);
}
/* no node textures for now */
result_type = multitex_ext_safe(texture, tex_co, texres, pool, do_color_manage, false);
/* if the texture gave an RGB value, we assume it didn't give a valid
* intensity, since this is in the context of modifiers don't use perceptual color conversion.
* if the texture didn't give an RGB value, copy the intensity across
*/
if (result_type & TEX_RGB) {
texres->tin = (1.0f / 3.0f) * (texres->tr + texres->tg + texres->tb);
}
else {
copy_v3_fl(&texres->tr, texres->tin);
}
}
void BKE_texture_get_value(
const Scene *scene, Tex *texture,
float *tex_co, TexResult *texres, bool use_color_management)
{
BKE_texture_get_value_ex(scene, texture, tex_co, texres, NULL, use_color_management);
}
static void texture_nodes_fetch_images_for_pool(Tex *texture, bNodeTree *ntree, struct ImagePool *pool)
{
for (bNode *node = ntree->nodes.first; node; node = node->next) {
if (node->type == SH_NODE_TEX_IMAGE && node->id != NULL) {
Image *image = (Image *)node->id;
BKE_image_pool_acquire_ibuf(image, &texture->iuser, pool);
}
else if (node->type == NODE_GROUP && node->id != NULL) {
/* TODO(sergey): Do we need to control recursion here? */
bNodeTree *nested_tree = (bNodeTree *)node->id;
texture_nodes_fetch_images_for_pool(texture, nested_tree, pool);
}
}
}
/* Make sure all images used by texture are loaded into pool. */
void BKE_texture_fetch_images_for_pool(Tex *texture, struct ImagePool *pool)
{
if (texture->nodetree != NULL) {
texture_nodes_fetch_images_for_pool(texture, texture->nodetree, pool);
}
else {
if (texture->type == TEX_IMAGE) {
if (texture->ima != NULL) {
BKE_image_pool_acquire_ibuf(texture->ima, &texture->iuser, pool);
}
}
}
}