diff --git a/mesh_tissue/README.md b/mesh_tissue/README.md index dfdc6fecc..90f53ec4b 100644 --- a/mesh_tissue/README.md +++ b/mesh_tissue/README.md @@ -1,21 +1,15 @@ # Tissue ![cover](http://www.co-de-it.com/wordpress/wp-content/uploads/2015/07/tissue_graphics.jpg) Tissue - Blender's add-on for computational design by Co-de-iT -http://www.co-de-it.com/wordpress/code/blender-tissue +https://www.co-de-it.com/code/blender-tissue Tissue is already shipped with both Blender. However I recommend to update the default version downloading manually the most recent one, for more updated features and more stability. -### Blender 2.93 +### Blender 4.0.2 -Tissue v0.3.52 for Blender 2.93 (latest stable release): https://github.com/alessandro-zomparelli/tissue/releases/tag/v0-3-52 +Download the latest release: https://github.com/alessandro-zomparelli/tissue/releases -Development branch (usually the most updated version): https://github.com/alessandro-zomparelli/tissue/tree/b290-dev - -### Blender 2.79 (unsupported) - -Tissue v0.3.4 for Blender 2.79b (latest stable release): https://github.com/alessandro-zomparelli/tissue/releases/tag/v0-3-4 - -Development branch (most updated version): https://github.com/alessandro-zomparelli/tissue/tree/dev1 +Current development branch (usually the most updated version): https://github.com/alessandro-zomparelli/tissue/tree/b401-dev ### Installation: @@ -27,21 +21,18 @@ Development branch (most updated version): https://github.com/alessandro-zompare ### Documentation -Tissue documentation for Blender 2.80: https://github.com/alessandro-zomparelli/tissue/wiki - +Tissue documentation for Blender's latest version: https://docs.blender.org/manual/en/latest/addons/mesh/tissue.html ### Issues -Please help me keeping Tissue stable and updated, report any issues or feedback here: https://github.com/alessandro-zomparelli/tissue/issues +Please help me keep Tissue stable and updated, report any issues or feedback here: https://github.com/alessandro-zomparelli/tissue/issues ### Contribute -Tissue is free and open-source. I really think that this is the power of Blender and I wanted to give my small contribution to it. +Tissue is free and open-source. I think that this is the power of Blender and I wanted to give my small contribution to it. -If you like my work and you want to help me, please consider to support me on **Patreon**, where I share some tips about Blender, Tissue and scripting: https://www.patreon.com/alessandrozomparelli +If you like my work and you want to help me, please consider supporting me on **Patreon**, where I share some tips about Blender, Tissue and scripting: https://www.patreon.com/alessandrozomparelli [![Patreon](http://alessandrozomparelli.com/wp-content/uploads/2020/04/patreon-transparent-vector-small.png)](https://www.patreon.com/alessandrozomparelli) -A special thanks to all my patrons, in particular to my **Tissue Supporters**: *TomaLaboratory*, *Scott Shorter*, *Garrett Post*, *Kairomon*, *Art Evans*, *Justin Davis*, *John Wise*, *Avi Bryant*, *Ahmed Saber*, *SlimeSound Production*, *Steffen Meier*. - Many thanks, Alessandro diff --git a/mesh_tissue/__init__.py b/mesh_tissue/__init__.py index 5f2b70b50..09d6a4668 100644 --- a/mesh_tissue/__init__.py +++ b/mesh_tissue/__init__.py @@ -1,6 +1,21 @@ -# SPDX-FileCopyrightText: 2017-2023 Blender Foundation -# # SPDX-License-Identifier: GPL-2.0-or-later +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### # --------------------------------- TISSUE ----------------------------------- # # ------------------------------- version 0.3 -------------------------------- # @@ -12,15 +27,15 @@ # (2017) # # # # http://www.co-de-it.com/ # -# http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Mesh/Tissue # +# https://docs.blender.org/manual/en/latest/addons/mesh/tissue.html # # # # ############################################################################ # bl_info = { "name": "Tissue", - "author": "Alessandro Zomparelli (Co-de-iT)", - "version": (0, 3, 54), - "blender": (2, 93, 0), + "author": "Alessandro Zomparelli", + "version": (0, 3, 70), + "blender": (4, 0, 2), "location": "", "description": "Tools for Computational Design", "warning": "", @@ -35,6 +50,7 @@ if "bpy" in locals(): importlib.reload(tessellate_numpy) importlib.reload(tissue_properties) importlib.reload(weight_tools) + importlib.reload(weight_reaction_diffusion) importlib.reload(dual_mesh) importlib.reload(lattice) importlib.reload(uv_to_mesh) @@ -43,11 +59,14 @@ if "bpy" in locals(): importlib.reload(material_tools) importlib.reload(curves_tools) importlib.reload(polyhedra) + importlib.reload(texture_reaction_diffusion) + importlib.reload(contour_curves) else: from . import tessellate_numpy from . import tissue_properties from . import weight_tools + from . import weight_reaction_diffusion from . import dual_mesh from . import lattice from . import uv_to_mesh @@ -56,6 +75,8 @@ else: from . import material_tools from . import curves_tools from . import polyhedra + from . import texture_reaction_diffusion + from . import contour_curves import bpy from bpy.props import PointerProperty, CollectionProperty, BoolProperty @@ -86,6 +107,7 @@ classes = ( tessellate_numpy.TISSUE_PT_tessellate_morphing, tessellate_numpy.TISSUE_PT_tessellate_iterations, tessellate_numpy.tissue_render_animation, + tessellate_numpy.tissue_remove, weight_tools.face_area_to_vertex_groups, weight_tools.vertex_colors_to_vertex_groups, @@ -93,31 +115,36 @@ classes = ( weight_tools.vertex_group_to_uv, weight_tools.TISSUE_PT_weight, weight_tools.TISSUE_PT_color, - weight_tools.weight_contour_curves, - weight_tools.tissue_weight_contour_curves_pattern, weight_tools.weight_contour_mask, weight_tools.weight_contour_displace, weight_tools.harmonic_weight, weight_tools.edges_deformation, weight_tools.edges_bending, weight_tools.weight_laplacian, - weight_tools.reaction_diffusion, - weight_tools.start_reaction_diffusion, - weight_tools.TISSUE_PT_reaction_diffusion, - weight_tools.TISSUE_PT_reaction_diffusion_weight, - weight_tools.reset_reaction_diffusion_weight, + weight_reaction_diffusion.start_reaction_diffusion, + weight_reaction_diffusion.TISSUE_PT_reaction_diffusion, + weight_reaction_diffusion.TISSUE_PT_reaction_diffusion_performance, + weight_reaction_diffusion.TISSUE_PT_reaction_diffusion_vector_field, + weight_reaction_diffusion.TISSUE_PT_reaction_diffusion_weight, + weight_reaction_diffusion.TISSUE_PT_reaction_diffusion_cache, + weight_reaction_diffusion.reset_reaction_diffusion_weight, weight_tools.formula_prop, - weight_tools.reaction_diffusion_prop, + weight_reaction_diffusion.reaction_diffusion_prop, weight_tools.weight_formula, weight_tools.update_weight_formula, weight_tools.curvature_to_vertex_groups, weight_tools.weight_formula_wiki, weight_tools.tissue_weight_distance, weight_tools.random_weight, - weight_tools.bake_reaction_diffusion, - weight_tools.reaction_diffusion_free_data, + weight_reaction_diffusion.bake_reaction_diffusion, + weight_reaction_diffusion.reaction_diffusion_free_data, weight_tools.tissue_weight_streamlines, + contour_curves.tissue_weight_contour_curves_pattern, + contour_curves.tissue_update_contour_curves, + contour_curves.tissue_contour_curves_prop, + contour_curves.TISSUE_PT_contour_curves, + dual_mesh.dual_mesh, dual_mesh.dual_mesh_tessellated, @@ -128,12 +155,21 @@ classes = ( curves_tools.tissue_to_curve_prop, curves_tools.tissue_convert_to_curve, - curves_tools.tissue_convert_to_curve_update, + curves_tools.tissue_update_convert_to_curve, curves_tools.TISSUE_PT_convert_to_curve, uv_to_mesh.uv_to_mesh, - polyhedra.polyhedra_wireframe + polyhedra.polyhedral_wireframe, + polyhedra.tissue_update_polyhedra, + polyhedra.tissue_polyhedra_prop, + polyhedra.TISSUE_PT_polyhedra_object, + + texture_reaction_diffusion.tex_reaction_diffusion_prop, + texture_reaction_diffusion.start_tex_reaction_diffusion, + texture_reaction_diffusion.reset_tex_reaction_diffusion, + texture_reaction_diffusion.TISSUE_PT_tex_reaction_diffusion, + texture_reaction_diffusion.TISSUE_PT_tex_reaction_diffusion_images ) def register(): @@ -147,18 +183,27 @@ def register(): bpy.types.Object.tissue_tessellate = PointerProperty( type=tissue_properties.tissue_tessellate_prop ) + bpy.types.Object.tissue_polyhedra = PointerProperty( + type=polyhedra.tissue_polyhedra_prop + ) bpy.types.Object.tissue_to_curve = PointerProperty( type=curves_tools.tissue_to_curve_prop ) + bpy.types.Object.tissue_contour_curves = PointerProperty( + type=contour_curves.tissue_contour_curves_prop + ) bpy.types.Object.formula_settings = CollectionProperty( type=weight_tools.formula_prop ) bpy.types.Object.reaction_diffusion_settings = PointerProperty( - type=weight_tools.reaction_diffusion_prop + type=weight_reaction_diffusion.reaction_diffusion_prop ) + bpy.types.Object.tex_reaction_diffusion_settings = PointerProperty( + type=texture_reaction_diffusion.tex_reaction_diffusion_prop + ) # weight_tools - bpy.app.handlers.frame_change_post.append(weight_tools.reaction_diffusion_def) - #bpy.app.handlers.frame_change_post.append(tessellate_numpy.anim_tessellate) + bpy.app.handlers.frame_change_post.append(weight_reaction_diffusion.reaction_diffusion_def) + bpy.app.handlers.frame_change_post.append(texture_reaction_diffusion.tex_reaction_diffusion_def) def unregister(): from bpy.utils import unregister_class diff --git a/mesh_tissue/config.py b/mesh_tissue/config.py index 016b10c8b..f7182171c 100644 --- a/mesh_tissue/config.py +++ b/mesh_tissue/config.py @@ -1,5 +1,3 @@ -# SPDX-FileCopyrightText: 2022-2023 Blender Foundation -# # SPDX-License-Identifier: GPL-2.0-or-later import bpy @@ -15,7 +13,7 @@ class tissuePreferences(bpy.types.AddonPreferences): print_stats : IntProperty( name="Print Stats", - description="Print in the console all details about the computing time", + description="Print in the console all details about the computing time.", default=1, min=0, max=4 @@ -37,9 +35,13 @@ class tissuePreferences(bpy.types.AddonPreferences): numba_spec = importlib.util.find_spec('numba') found = numba_spec is not None if found: - layout.label(text='Numba module installed correctly!', icon='INFO') - layout.prop(self, "use_numba_tess") - else: + try: + import numba + layout.label(text='Numba module installed correctly!', icon='INFO') + layout.prop(self, "use_numba_tess") + except: + found = False + if not found: layout.label(text='Numba module not installed!', icon='ERROR') layout.label(text='Installing Numba will make Tissue faster', icon='INFO') row = layout.row() @@ -56,6 +58,8 @@ class tissue_install_numba(bpy.types.Operator): try: from .utils_pip import Pip #Pip.upgrade_pip() + Pip.uninstall('llvmlite') + Pip.uninstall('numba') Pip.install('llvmlite') Pip.install('numba') from numba import jit, njit, guvectorize, float64, int32, prange diff --git a/mesh_tissue/contour_curves.py b/mesh_tissue/contour_curves.py new file mode 100644 index 000000000..4698b89ec --- /dev/null +++ b/mesh_tissue/contour_curves.py @@ -0,0 +1,1189 @@ +# SPDX-License-Identifier: GPL-2.0-or-later + +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +#-------------------------- COLORS / GROUPS EXCHANGER -------------------------# +# # +# Vertex Color to Vertex Group allow you to convert colors channles to weight # +# maps. # +# The main purpose is to use vertex colors to store information when importing # +# files from other softwares. The script works with the active vertex color # +# slot. # +# For use the command "Vertex Clors to Vertex Groups" use the search bar # +# (space bar). # +# # +# (c) Alessandro Zomparelli # +# (2017) # +# # +# http://www.co-de-it.com/ # +# # +################################################################################ + +import bpy, bmesh, os +import numpy as np +import math, timeit, time +from math import pi +from mathutils import Vector +from numpy import * + +from bpy.types import ( + Operator, + Panel, + PropertyGroup, + ) + +from bpy.props import ( + BoolProperty, + EnumProperty, + FloatProperty, + IntProperty, + StringProperty, + FloatVectorProperty, + IntVectorProperty, + PointerProperty +) + +from .utils import * + +def anim_contour_curves(self, context): + ob = context.object + props = ob.tissue_contour_curves + if not (ob.tissue.bool_lock or ob.tissue.bool_hold): + #try: + props.object.name + bpy.ops.object.tissue_update_contour_curves() + #except: pass + +class tissue_contour_curves_prop(PropertyGroup): + object : PointerProperty( + type=bpy.types.Object, + name="Object", + description="Source object", + update = anim_contour_curves + ) + + use_modifiers : BoolProperty( + name="Use Modifiers", default=True, + description="Apply all the modifiers", + update = anim_contour_curves + ) + + variable_bevel : BoolProperty( + name="Variable Bevel", default=False, + description="Variable Bevel", + update = anim_contour_curves + ) + + min_value : FloatProperty( + name="Offset Value", default=0., #soft_min=0, soft_max=1, + description="Offset contouring values", + update = anim_contour_curves + ) + + range_value : FloatProperty( + name="Range Values", default=100, #soft_min=0, soft_max=1, + description="Maximum range of contouring values", + update = anim_contour_curves + ) + + n_curves : IntProperty( + name="Curves", default=1000, soft_min=1, soft_max=200, + description="Number of Contour Curves", + update = anim_contour_curves + ) + + in_displace : FloatProperty( + name="Displace A", default=0, soft_min=-10, soft_max=10, + description="Pattern displace strength", + update = anim_contour_curves + ) + + out_displace : FloatProperty( + name="Displace B", default=2, soft_min=-10, soft_max=10, + description="Pattern displace strength", + update = anim_contour_curves + ) + + in_steps : IntProperty( + name="Steps A", default=1, min=0, soft_max=10, + description="Number of layers to move inwards", + update = anim_contour_curves + ) + + out_steps : IntProperty( + name="Steps B", default=1, min=0, soft_max=10, + description="Number of layers to move outwards", + update = anim_contour_curves + ) + + displace_x : BoolProperty( + name="Use X", default=True, + description="Displace along X axis", + update = anim_contour_curves + ) + + displace_y : BoolProperty( + name="Use Y", default=True, + description="Displace along Y axis", + update = anim_contour_curves + ) + + displace_z : BoolProperty( + name="Use Z", default=True, + description="Displace along Z axis", + update = anim_contour_curves + ) + + merge : BoolProperty( + name="Merge Vertices", default=True, + description="Merge points", + update = anim_contour_curves + ) + + merge_thres : FloatProperty( + name="Merge Threshold", default=0.01, min=0, soft_max=1, + description="Minimum Curve Radius", + update = anim_contour_curves + ) + + bevel_depth : FloatProperty( + name="Bevel Depth", default=0, min=0, soft_max=1, + description="", + update = anim_contour_curves + ) + + min_bevel_depth : FloatProperty( + name="Min Bevel Depth", default=0.05, min=0, soft_max=1, + description="", + update = anim_contour_curves + ) + + max_bevel_depth : FloatProperty( + name="Max Bevel Depth", default=0.20, min=0, soft_max=1, + description="", + update = anim_contour_curves + ) + + remove_open_curves : BoolProperty( + name="Remove Open Curves", default=False, + description="Remove Open Curves", + update = anim_contour_curves + ) + + vertex_group_pattern : StringProperty( + name="Displace", default='', + description="Vertex Group used for pattern displace", + update = anim_contour_curves + ) + + vertex_group_bevel : StringProperty( + name="Bevel", default='', + description="Variable Bevel depth", + update = anim_contour_curves + ) + + object_name : StringProperty( + name="Active Object", default='', + description="", + update = anim_contour_curves + ) + + vertex_group_contour : StringProperty( + name="Contour", default="", + description="Vertex Group used for contouring", + update = anim_contour_curves + ) + + clean_distance : FloatProperty( + name="Clean Distance", default=0.005, min=0, soft_max=10, + description="Remove short segments", + update = anim_contour_curves + ) + + spiralized: BoolProperty( + name='Spiralized', default=False, + description='Create a Spiral Contour. Works better with dense meshes.', + update = anim_contour_curves + ) + + spiral_axis: FloatVectorProperty( + name="Spiral Axis", default=(0,0,1), + description="Axis of the Spiral (in local coordinates)", + update = anim_contour_curves + ) + + spiral_rotation : FloatProperty( + name="Spiral Rotation", default=0, min=0, max=2*pi, + description="", + update = anim_contour_curves + ) + + contour_mode : EnumProperty( + items=( + ('VECTOR', "Vector", "Orient the Contour to a given vector starting from the origin of the object"), + ('OBJECT', "Object", "Orient the Contour to a target object's Z"), + ('WEIGHT', "Weight", "Contour based on a Vertex Group"), + ('ATTRIBUTE', "Attribute", "Contour based on an Attribute (Vertex > Float)"), + ('GEODESIC', "Geodesic Distance", "Contour based on the geodesic distance from the chosen vertices"), + ('TOPOLOGY', "Topology Distance", "Contour based on the topology distance from the chosen vertices") + ), + default='VECTOR', + name="Mode used for the Contour Curves", + update = anim_contour_curves + ) + + contour_vector : FloatVectorProperty( + name='Vector', description='Constant Vector', default=(0.0, 0.0, 1.0), + update = anim_contour_curves + ) + + contour_vector_object : PointerProperty( + type=bpy.types.Object, + name="", + description="Target Object", + update = anim_contour_curves + ) + + contour_offset : FloatProperty( + name="Offset", default=0.05, min=0.000001, soft_min=0.01, soft_max=10, + description="Contour offset along the Vector", + update = anim_contour_curves + ) + + seeds_mode : EnumProperty( + items=( + ('BOUND', "Boundary Edges", "Compute the distance starting from the boundary edges"), + ('SHARP', "Sharp Edges", "Compute the distance starting from the sharp edges"), + ('WEIGHT', "Weight", "Compute the distance starting from the selected vertex group") + ), + default='BOUND', + name="Seeds used for computing the distance", + update = anim_contour_curves + ) + + vertex_group_seed : StringProperty( + name="Seeds", default="", + description="Vertex Group used for computing the distance", + update = anim_contour_curves + ) + + spline_type : EnumProperty( + items=( + ('POLY', "Poly", "Generate Poly curves"), + ('NURBS', "NURBS", "Generate NURBS curves") + ), + default='POLY', + name="Spline type", + update = anim_contour_curves + ) + + contour_attribute : StringProperty( + name="Contour Attribute", default='', + description="Vertex > Float attribute used for contouring", + update = anim_contour_curves + ) + + +class tissue_weight_contour_curves_pattern(Operator): + bl_idname = "object.tissue_weight_contour_curves_pattern" + bl_label = "Contour Curves" + bl_description = ("") + bl_options = {'REGISTER', 'UNDO'} + + object : StringProperty( + name="Object", + description="Source object", + default = "" + ) + + use_modifiers : BoolProperty( + name="Use Modifiers", default=True, + description="Apply all the modifiers" + ) + + variable_bevel : BoolProperty( + name="Variable Bevel", default=False, + description="Variable Bevel" + ) + + min_value : FloatProperty( + name="Offset Value", default=0., + description="Offset contouring values" + ) + + range_value : FloatProperty( + name="Range Values", default=100, + description="Maximum range of contouring values" + ) + + n_curves : IntProperty( + name="Curves", default=1000, soft_min=1, soft_max=200, + description="Number of Contour Curves" + ) + + min_rad = 1 + max_rad = 1 + + in_displace : FloatProperty( + name="Displace A", default=0, soft_min=-10, soft_max=10, + description="Pattern displace strength" + ) + + out_displace : FloatProperty( + name="Displace B", default=2, soft_min=-10, soft_max=10, + description="Pattern displace strength" + ) + + in_steps : IntProperty( + name="Steps A", default=1, min=0, soft_max=10, + description="Number of layers to move inwards" + ) + + out_steps : IntProperty( + name="Steps B", default=1, min=0, soft_max=10, + description="Number of layers to move outwards" + ) + + displace_x : BoolProperty( + name="Use X", default=True, + description="Displace along X axis" + ) + + displace_y : BoolProperty( + name="Use Y", default=True, + description="Displace along Y axis" + ) + + displace_z : BoolProperty( + name="Use Z", default=True, + description="Displace along Z axis" + ) + + merge : BoolProperty( + name="Merge Vertices", default=True, + description="Merge points" + ) + + merge_thres : FloatProperty( + name="Merge Threshold", default=0.01, min=0, soft_max=1, + description="Minimum Curve Radius" + ) + + bevel_depth : FloatProperty( + name="Bevel Depth", default=0, min=0, soft_max=1, + description="" + ) + + min_bevel_depth : FloatProperty( + name="Min Bevel Depth", default=0.05, min=0, soft_max=1, + description="" + ) + + max_bevel_depth : FloatProperty( + name="Max Bevel Depth", default=0.20, min=0, soft_max=1, + description="" + ) + + remove_open_curves : BoolProperty( + name="Remove Open Curves", default=False, + description="Remove Open Curves" + ) + + vertex_group_pattern : StringProperty( + name="Displace", default='', + description="Vertex Group used for pattern displace" + ) + + vertex_group_bevel : StringProperty( + name="Bevel", default='', + description="Variable Bevel depth" + ) + + object_name : StringProperty( + name="Active Object", default='', + description="" + ) + + contour_attribute : StringProperty( + name="Contour Attribute", default='', + description="Vertex > Float attribute used for contouring" + ) + + try: vg_name = bpy.context.object.vertex_groups.active.name + except: vg_name = '' + + vertex_group_contour : StringProperty( + name="Contour", default=vg_name, + description="Vertex Group used for contouring" + ) + + clean_distance : FloatProperty( + name="Clean Distance", default=0.005, min=0, soft_max=10, + description="Remove short segments" + ) + + spiralized: BoolProperty( + name='Spiralized', default=False, + description='Create a Spiral Contour. Works better with dense meshes.' + ) + + spiral_axis: FloatVectorProperty( + name="Spiral Axis", default=(0,0,1), + description="Axis of the Spiral (in local coordinates)" + ) + + spiral_rotation : FloatProperty( + name="Spiral Rotation", default=0, min=0, max=2*pi, + description="" + ) + + bool_hold : BoolProperty( + name="Hold", + description="Wait...", + default=False + ) + + contour_mode : EnumProperty( + items=( + ('VECTOR', "Vector", "Orient the Contour to a given vector starting from the origin of the object"), + ('OBJECT', "Object", "Orient the Contour to a target object's Z"), + ('WEIGHT', "Weight", "Contour based on a Vertex Group"), + ('ATTRIBUTE', "Attribute", "Contour based on an Attribute (Vertex > Float)"), + ('GEODESIC', "Geodesic Distance", "Contour based on the geodesic distance from the chosen vertices"), + ('TOPOLOGY', "Topology Distance", "Contour based on the topology distance from the chosen vertices") + ), + default='VECTOR', + name="Mode used for the Contour Curves" + ) + + contour_vector : FloatVectorProperty( + name='Vector', description='Constant Vector', default=(0.0, 0.0, 1.0) + ) + + contour_vector_object : StringProperty( + name="Object", + description="Target object", + default = "" + ) + + contour_offset : FloatProperty( + name="Offset", default=0.05, min=0.000001, soft_min=0.01, soft_max=10, + description="Contour offset along the Vector" + ) + + seeds_mode : EnumProperty( + items=( + ('BOUND', "Boundary Edges", "Compute the distance starting from the boundary edges"), + ('SHARP', "Sharp Edges", "Compute the distance starting from the sharp edges"), + ('WEIGHT', "Weight", "Compute the distance starting from the selected vertex group") + ), + default='BOUND', + name="Seeds used for computing the distance" + ) + + vertex_group_seed : StringProperty( + name="Seeds", default=vg_name, + description="Vertex Group used for computing the distance" + ) + + spline_type : EnumProperty( + items=( + ('POLY', "Poly", "Generate Poly curves"), + ('NURBS', "NURBS", "Generate NURBS curves") + ), + default='POLY', + name="Spline type" + ) + + def invoke(self, context, event): + self.object = context.object.name + return context.window_manager.invoke_props_dialog(self, width=250) + + def draw(self, context): + ob = context.object + ob0 = bpy.data.objects[self.object] + + if self.contour_mode == 'WEIGHT': + try: + if self.vertex_group_contour not in [vg.name for vg in ob.vertex_groups]: + self.vertex_group_contour = ob.vertex_groups.active.name + except: + self.contour_mode == 'VECTOR' + + if not self.bool_hold: + self.object = ob.name + self.bool_hold = True + + layout = self.layout + col = layout.column(align=True) + col.prop(self, "use_modifiers") + col.label(text="Contour Curves:") + + row = col.row() + row.prop(self, "spline_type", icon='NONE', expand=True, + slider=True, toggle=False, icon_only=False, event=False, + full_event=False, emboss=True, index=-1) + col.separator() + col.prop(self, "contour_mode", text="Mode") + + if self.contour_mode == 'VECTOR': + row = col.row() + row.prop(self,'contour_vector') + elif self.contour_mode == 'WEIGHT': + col.prop_search(self, 'vertex_group_contour', ob, "vertex_groups", text='Group') + elif self.contour_mode == 'ATTRIBUTE': + col.prop_search(self, 'contour_attribute', ob0.data, "attributes", text='Attribute') + is_attribute = True + if self.contour_attribute in ob0.data.attributes: + attr = ob0.data.attributes[self.contour_attribute] + is_attribute = attr.data_type == 'FLOAT' and attr.domain == 'POINT' + else: + is_attribute = False + if not is_attribute: + col.label(text="Please select a (Vertex > Float) Attribute for contouring.", icon='ERROR') + elif self.contour_mode in ('TOPOLOGY','GEODESIC'): + col.prop(self, "seeds_mode", text="Seeds") + if self.seeds_mode == 'WEIGHT': + col.prop_search(self, 'vertex_group_seed', ob, "vertex_groups", text='Group') + elif self.contour_mode == 'OBJECT': + col.prop_search(self, "contour_vector_object", context.scene, "objects", text='Object') + col.separator() + + if self.contour_mode == 'OBJECT': + col.prop(self,'contour_offset') + col.prop(self,'n_curves', text='Max Curves') + elif self.contour_mode in ('VECTOR', 'GEODESIC', 'ATTRIBUTE'): + col.prop(self,'contour_offset') + row = col.row(align=True) + row.prop(self,'min_value') + row.prop(self,'range_value') + col.prop(self,'n_curves', text='Max Curves') + elif self.contour_mode in ('TOPOLOGY', 'WEIGHT'): + row = col.row(align=True) + row.prop(self,'min_value') + row.prop(self,'range_value') + col.prop(self,'n_curves') + + col.separator() + col.label(text='Curves Bevel:') + col.prop(self,'variable_bevel') + row = col.row(align=True) + row.prop(self,'min_bevel_depth') + row.prop(self,'max_bevel_depth') + row2 = col.row(align=True) + row2.prop_search(self, 'vertex_group_bevel', ob, "vertex_groups", text='') + if not self.variable_bevel: + row.enabled = row2.enabled = False + col.separator() + + col.label(text="Displace Pattern:") + col.prop_search(self, 'vertex_group_pattern', ob, "vertex_groups", text='') + if self.vertex_group_pattern != '': + col.separator() + row = col.row(align=True) + row.prop(self,'in_steps') + row.prop(self,'out_steps') + row = col.row(align=True) + row.prop(self,'in_displace') + row.prop(self,'out_displace') + col.separator() + row = col.row(align=True) + row.label(text="Axis") + row.prop(self,'displace_x', text="X", toggle=1) + row.prop(self,'displace_y', text="Y", toggle=1) + row.prop(self,'displace_z', text="Z", toggle=1) + col.separator() + + col.label(text='Clean Curves:') + col.prop(self,'clean_distance') + col.prop(self,'remove_open_curves') + + def execute(self, context): + ob0 = bpy.context.object + + self.object_name = "Contour Curves" + # Check if existing object with same name + names = [o.name for o in bpy.data.objects] + if self.object_name in names: + count_name = 1 + while True: + test_name = self.object_name + '.{:03d}'.format(count_name) + if not (test_name in names): + self.object_name = test_name + break + count_name += 1 + + if bpy.ops.object.select_all.poll(): + bpy.ops.object.select_all(action='DESELECT') + bpy.ops.object.mode_set(mode='OBJECT') + + bool_update = False + if context.object == ob0: + auto_layer_collection() + curve = bpy.data.curves.new(self.object_name,'CURVE') + new_ob = bpy.data.objects.new(self.object_name,curve) + bpy.context.collection.objects.link(new_ob) + bpy.context.view_layer.objects.active = new_ob + if bpy.ops.object.select_all.poll(): + bpy.ops.object.select_all(action='DESELECT') + bpy.ops.object.mode_set(mode='OBJECT') + new_ob.select_set(True) + else: + new_ob = context.object + bool_update = True + + # Store parameters + props = new_ob.tissue_contour_curves + new_ob.tissue.bool_hold = True + if self.object in bpy.data.objects.keys(): + props.object = bpy.data.objects[self.object] + props.use_modifiers = self.use_modifiers + props.variable_bevel = self.variable_bevel + props.min_value = self.min_value + props.range_value = self.range_value + props.n_curves = self.n_curves + props.in_displace = self.in_displace + props.out_displace = self.out_displace + props.in_steps = self.in_steps + props.out_steps = self.out_steps + props.displace_x = self.displace_x + props.displace_y = self.displace_y + props.displace_z = self.displace_z + props.merge = self.merge + props.merge_thres = self.merge_thres + props.bevel_depth = self.bevel_depth + props.min_bevel_depth = self.min_bevel_depth + props.max_bevel_depth = self.max_bevel_depth + props.remove_open_curves = self.remove_open_curves + props.vertex_group_pattern = self.vertex_group_pattern + props.vertex_group_bevel = self.vertex_group_bevel + props.object_name = self.object_name + props.vertex_group_contour = self.vertex_group_contour + props.clean_distance = self.clean_distance + props.spiralized = self.spiralized + props.spiral_axis = self.spiral_axis + props.spiral_rotation = self.spiral_rotation + props.contour_mode = self.contour_mode + if self.contour_vector_object in bpy.data.objects.keys(): + props.contour_vector_object = bpy.data.objects[self.contour_vector_object] + props.contour_vector = self.contour_vector + props.contour_offset = self.contour_offset + props.seeds_mode = self.seeds_mode + props.vertex_group_seed = self.vertex_group_seed + props.spline_type = self.spline_type + props.contour_attribute = self.contour_attribute + new_ob.tissue.bool_hold = False + + new_ob.tissue.tissue_type = 'CONTOUR_CURVES' + try: bpy.ops.object.tissue_update_contour_curves() + except RuntimeError as e: + print("no update") + bpy.data.objects.remove(new_ob) + remove_temp_objects() + self.report({'ERROR'}, str(e)) + return {'CANCELLED'} + if not bool_update: + self.object_name = new_ob.name + #self.working_on = self.object_name + new_ob.location = ob0.location + new_ob.matrix_world = ob0.matrix_world + + # Assign collection of the base object + old_coll = new_ob.users_collection + if old_coll != ob0.users_collection: + for c in old_coll: + c.objects.unlink(new_ob) + for c in ob0.users_collection: + c.objects.link(new_ob) + context.view_layer.objects.active = new_ob + + return {'FINISHED'} + +class tissue_update_contour_curves(Operator): + bl_idname = "object.tissue_update_contour_curves" + bl_label = "Update Contour Curves" + bl_description = ("Update a previously generated Contour Curves object") + bl_options = {'REGISTER', 'UNDO'} + + def execute(self, context): + ob = context.object + props = ob.tissue_contour_curves + _ob0 = props.object + n_curves = props.n_curves + tt0 = time.time() + tt1 = time.time() + tissue_time(None,'Tissue: Contour Curves of "{}"...'.format(ob.name), levels=0) + + ob0 = convert_object_to_mesh(_ob0, apply_modifiers=props.use_modifiers) + ob0.name = "_tissue_tmp_ob0" + me0 = ob0.data + + # generate new bmesh + bm = bmesh.new() + bm.from_mesh(me0) + n_verts = len(bm.verts) + vertices, normals = get_vertices_and_normals_numpy(me0) + + if props.contour_mode == 'OBJECT': + try: + vec_ob = props.contour_vector_object + vec_ob_name = vec_ob.name + except: + bm.free() + bpy.data.objects.remove(ob0) + self.report({'ERROR'}, "Please select an target Object") + return {'CANCELLED'} + + tt1 = tissue_time(tt1, "Load objects", levels=1) + + # store weight values + if props.contour_mode in ('VECTOR','OBJECT'): + ob0_matrix = np.matrix(ob0.matrix_world.to_3x3().transposed()) + global_verts = np.matmul(vertices,ob0_matrix) + global_verts += np.array(ob0.matrix_world.translation) + if props.contour_mode == 'OBJECT' and props.contour_vector_object: + vec_ob = props.contour_vector_object + global_verts -= np.array(vec_ob.matrix_world.translation) + vec_ob_matrix = np.matrix(vec_ob.matrix_world.to_3x3().inverted().transposed()) + global_verts = np.matmul(global_verts,vec_ob_matrix) + weight = global_verts[:,2].A1 + elif props.contour_mode == 'VECTOR': + vec = np.array(props.contour_vector) + vec_len = np.linalg.norm(vec) + if vec_len == 0: + vec = np.array((0,0,1)) + vec_len = 1 + else: + vec /= vec_len + vec_len = 1 + global_verts = global_verts.A + projected_verts = global_verts * vec + projected_verts = np.sum(projected_verts,axis=1)[:,np.newaxis] + weight = projected_verts.reshape((-1)) + elif props.contour_mode == 'WEIGHT': + try: + weight = get_weight_numpy(ob0.vertex_groups[props.vertex_group_contour], len(me0.vertices)) + except: + bm.free() + bpy.data.objects.remove(ob0) + self.report({'ERROR'}, "Please select a Vertex Group for contouring") + return {'CANCELLED'} + elif props.contour_mode == 'ATTRIBUTE': + if props.contour_attribute in me0.attributes: + weight = [0]*n_verts + me0.attributes[props.contour_attribute].data.foreach_get('value',weight) + weight = np.array(weight) + else: + bm.free() + bpy.data.objects.remove(ob0) + self.report({'ERROR'}, "Please select a (Vertex > Float) Attribute for contouring") + return {'CANCELLED'} + elif props.contour_mode in ('GEODESIC','TOPOLOGY'): + cancel = False + weight = [None]*n_verts + seed_verts = [] + bm.verts.ensure_lookup_table() + if props.seeds_mode == 'BOUND': + for v in bm.verts: + if v.is_boundary: + seed_verts.append(v) + weight[v.index] = 0 + if props.seeds_mode == 'SHARP': + for e, bme in zip(me0.edges, bm.edges): + if e.use_edge_sharp: + seed_verts.append(bme.verts[0]) + seed_verts.append(bme.verts[1]) + seed_verts = list(set(seed_verts)) + if len(seed_verts) == 0: cancel = True + for i in [v.index for v in seed_verts]: + weight[i] = 0 + if props.seeds_mode == 'WEIGHT': + try: + seeds = get_weight_numpy(ob0.vertex_groups[props.vertex_group_seed], len(me0.vertices)) + except: + bm.free() + bpy.data.objects.remove(ob0) + self.report({'ERROR'}, "Please select a Vertex Group as seed") + return {'CANCELLED'} + for i,v in enumerate(bm.verts): + if seeds[i]>0.999999: + seed_verts.append(v) + weight[i] = 0 + if cancel or len(seed_verts)==0: + bm.free() + bpy.data.objects.remove(ob0) + self.report({'ERROR'}, "No seed vertices found") + return {'CANCELLED'} + + weight = fill_neighbors_attribute(seed_verts, weight, props.contour_mode) + weight = np.array(weight) + print(weight[weight==None]) + weight[weight==None] = 0 + print(weight[weight==None]) + + try: + pattern_weight = get_weight_numpy(ob0.vertex_groups[props.vertex_group_pattern], len(me0.vertices)) + except: + #self.report({'WARNING'}, "There is no Vertex Group assigned to the pattern displace") + pattern_weight = np.zeros(len(me0.vertices)) + + weight_bevel = False + if props.variable_bevel: + try: + bevel_weight = get_weight_numpy(ob0.vertex_groups[props.vertex_group_bevel], len(me0.vertices)) + weight_bevel = True + except: + bevel_weight = np.ones(len(me0.vertices)) + else: + bevel_weight = np.ones(len(me0.vertices)) + + total_verts = np.zeros((0,3)) + total_radii = np.zeros((0,1)) + total_edges_index = np.zeros((0)).astype('int') + total_segments = []# np.array([]) + radius = [] + + tt1 = tissue_time(tt1, "Compute values", levels=1) + + # start iterate contours levels + filtered_edges = get_edges_id_numpy(me0) + + min_value = props.min_value + max_value = props.min_value + props.range_value + + if props.contour_mode in ('VECTOR','OBJECT','GEODESIC','ATTRIBUTE'): + delta_iso = props.contour_offset + n_curves = min(int((np.max(weight)-props.min_value)/delta_iso)+1, props.n_curves) + else: + if n_curves == 1: + delta_iso = props.range_value/2 + else: + delta_iso = props.range_value/(n_curves-1) + if props.contour_mode == 'TOPOLOGY': + weight = weight/np.max(weight) + + if False: + edges_verts = get_attribute_numpy(me0.edges,"vertices",mult=2).astype('int') + edges_vec = vertices[edges_verts[:,0]]-vertices[edges_verts[:,1]] + #edges_vec = global_verts[edges_verts[:,0]]-global_verts[edges_verts[:,1]] + edges_length = np.linalg.norm(edges_vec,axis=1) + edges_vec /= edges_length[:,np.newaxis] + edges_dw = np.abs(weight[edges_verts[:,0]]-weight[edges_verts[:,1]]) + edges_bevel = delta_iso*edges_length/edges_dw/2*0 + 1 + + ''' + # numpy method + faces_n_verts = get_attribute_numpy(me0.polygons, attribute='loop_total').astype('int') + faces_verts = get_attribute_numpy(me0.polygons, attribute='vertices', size=np.sum(faces_n_verts)).astype('int') + faces_weight = weight[faces_verts] + faces_weight = np.split(faces_weight, np.cumsum(faces_n_verts)[:-1]) + ''' + faces_weight = [np.array([weight[v] for v in p.vertices]) for p in me0.polygons] + try: + fw_min = np.min(faces_weight, axis=1) + fw_max = np.max(faces_weight, axis=1) + except: + # necessary for irregular meshes + fw_min = np.array([min(fw) for fw in faces_weight]) + fw_max = np.array([max(fw) for fw in faces_weight]) + bm_faces = np.array(bm.faces) + + tt1 = tissue_time(tt1, "Compute face values", levels=1) + for c in range(n_curves): + if delta_iso: + iso_val = c*delta_iso + min_value + else: + iso_val = min_value + range_value/2 + if iso_val > max_value: break + + # remove passed faces + bool_mask = iso_val <= fw_max + bm_faces = bm_faces[bool_mask] + fw_min = fw_min[bool_mask] + fw_max = fw_max[bool_mask] + + # mask faces + bool_mask = fw_min <= iso_val + faces_mask = bm_faces[bool_mask] + + count = len(total_verts) + + if not weight_bevel and props.variable_bevel: + bevel_weight = np.full(n_verts, c/n_curves) + new_filtered_edges, edges_index, verts, bevel = contour_edges_pattern(props, c, len(total_verts), iso_val, vertices, normals, filtered_edges, weight, pattern_weight, bevel_weight) + #bevel = edges_bevel[edges_index][:,np.newaxis] + + if len(edges_index) > 0: + if props.variable_bevel and props.max_bevel_depth != props.min_bevel_depth and False: + #min_radius = min(props.min_bevel_depth, props.max_bevel_depth) + #max_radius = max(props.min_bevel_depth, props.max_bevel_depth) + min_radius = props.min_bevel_depth + max_radius = props.max_bevel_depth + min_radius = min_radius / max(0.0001,max_radius) + radii = min_radius + bevel*(1 - min_radius) + else: + radii = bevel + else: + continue + + if verts[0,0] == None: continue + else: filtered_edges = new_filtered_edges + edges_id = {} + for i, id in enumerate(edges_index): edges_id[id] = i + count + + if len(verts) == 0: continue + + # finding segments + segments = [] + for f in faces_mask: + seg = [] + for e in f.edges: + try: + #seg.append(new_ids[np.where(edges_index == e.index)[0][0]]) + seg.append(edges_id[e.index]) + if len(seg) == 2: + segments.append(seg) + seg = [] + except: pass + + total_segments = total_segments + segments + total_verts = np.concatenate((total_verts, verts)) + total_radii = np.concatenate((total_radii, radii)) + total_edges_index = np.concatenate((total_edges_index, edges_index)) + tt1 = tissue_time(tt1, "Compute curves", levels=1) + + if len(total_segments) > 0: + ordered_points, ordered_points_edge_id = find_curves_attribute(total_segments, len(total_verts), total_edges_index) + + total_tangents = np.zeros((len(total_verts),3)) + for curve in ordered_points: + np_curve = np.array(curve).astype('int') + curve_pts = np.array(total_verts[np_curve], dtype=np.float64) + tangents = np.roll(curve_pts,1) - np.roll(curve_pts,-1) + tangents /= np.linalg.norm(tangents,axis=1)[:,np.newaxis] + total_tangents[curve] = tangents + + step_time = timeit.default_timer() + ob.data.splines.clear() + if props.variable_bevel:# and not weight_bevel: + total_radii = np.interp(total_radii, (total_radii.min(), total_radii.max()), (props.min_bevel_depth, props.max_bevel_depth)) + ob.data = curve_from_pydata(total_verts, total_radii, ordered_points, ob0.name + '_ContourCurves', props.remove_open_curves, merge_distance=props.clean_distance, only_data=True, curve=ob.data, spline_type=props.spline_type) + #context.view_layer.objects.active = crv + if props.variable_bevel: + if not weight_bevel: + ob.data.bevel_depth = 1 + else: + ob.data.bevel_depth = max(props.max_bevel_depth, props.min_bevel_depth) + tt1 = tissue_time(tt1, "Store curves data", levels=1) + else: + ob.data.splines.clear() + pass + bm.free() + for o in bpy.data.objects: + if '_tissue_tmp_' in o.name: + bpy.data.objects.remove(o) + + tt0 = tissue_time(tt0, "Contour Curves", levels=0) + return {'FINISHED'} + + +class TISSUE_PT_contour_curves(Panel): + bl_space_type = 'PROPERTIES' + bl_region_type = 'WINDOW' + bl_context = "data" + bl_label = "Tissue Contour Curves" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + try: + #bool_curve = context.object.tissue_to_curve.object != None + ob = context.object + return ob.type == 'CURVE' and ob.tissue.tissue_type == 'CONTOUR_CURVES' + except: + return False + + def draw(self, context): + ob = context.object + props = ob.tissue_contour_curves + ob0 = bpy.data.objects[props.object.name] + + layout = self.layout + #layout.use_property_split = True + #layout.use_property_decorate = False + col = layout.column(align=True) + row = col.row(align=True) + #col.operator("object.tissue_update_convert_to_curve", icon='FILE_REFRESH', text='Refresh') + row.operator("object.tissue_update_tessellate_deps", icon='FILE_REFRESH', text='Refresh') #### + lock_icon = 'LOCKED' if ob.tissue.bool_lock else 'UNLOCKED' + #lock_icon = 'PINNED' if props.bool_lock else 'UNPINNED' + deps_icon = 'LINKED' if ob.tissue.bool_dependencies else 'UNLINKED' + row.prop(ob.tissue, "bool_dependencies", text="", icon=deps_icon) + row.prop(ob.tissue, "bool_lock", text="", icon=lock_icon) + col2 = row.column(align=True) + col2.prop(ob.tissue, "bool_run", text="",icon='TIME') + col2.enabled = not ob.tissue.bool_lock + col2 = row.column(align=True) + col2.operator("mesh.tissue_remove", text="", icon='X') + + col.separator() + row = col.row(align=True) + row.prop_search(props, "object", context.scene, "objects", text="") + row.prop(props, "use_modifiers", icon='MODIFIER', text='') + col.separator() + col.label(text="Contour Curves:") + row = col.row() + row.prop(props, "spline_type", icon='NONE', expand=True, + slider=True, toggle=False, icon_only=False, event=False, + full_event=False, emboss=True, index=-1) + col.separator() + col.prop(props, "contour_mode", text="Mode") + + if props.contour_mode == 'VECTOR': + row = col.row() + row.prop(props,'contour_vector') + elif props.contour_mode == 'WEIGHT': + col.prop_search(props, 'vertex_group_contour', ob0, "vertex_groups", text='Group') + elif props.contour_mode == 'ATTRIBUTE': + col.prop_search(props, 'contour_attribute', ob0.data, "attributes", text='Attribute') + is_attribute = True + if props.contour_attribute in ob0.data.attributes: + attr = ob0.data.attributes[props.contour_attribute] + is_attribute = attr.data_type == 'FLOAT' and attr.domain == 'POINT' + else: + is_attribute = False + if not is_attribute: + col.label(text="Please select a (Vertex > Float) Attribute for contouring.", icon='ERROR') + elif props.contour_mode in ('TOPOLOGY','GEODESIC'): + col.prop(props, "seeds_mode", text="Seeds") + if props.seeds_mode == 'WEIGHT': + col.prop_search(props, 'vertex_group_seed', ob0, "vertex_groups", text='Group') + elif props.contour_mode == 'OBJECT': + col.prop_search(props, "contour_vector_object", context.scene, "objects", text='Object') + col.separator() + + if props.contour_mode == 'OBJECT': + col.prop(props,'contour_offset') + col.prop(props,'n_curves', text='Max Curves') + elif props.contour_mode in ('VECTOR','GEODESIC','ATTRIBUTE'): + col.prop(props,'contour_offset') + row = col.row(align=True) + row.prop(props,'min_value') + row.prop(props,'range_value') + col.prop(props,'n_curves', text='Max Curves') + elif props.contour_mode in ('TOPOLOGY', 'WEIGHT'): + row = col.row(align=True) + row.prop(props,'min_value') + row.prop(props,'range_value') + col.prop(props,'n_curves') + + col.separator() + col.label(text='Curves Bevel:') + col.prop(props,'variable_bevel') + row = col.row(align=True) + row.prop(props,'min_bevel_depth') + row.prop(props,'max_bevel_depth') + row2 = col.row(align=True) + row2.prop_search(props, 'vertex_group_bevel', ob0, "vertex_groups", text='') + if not props.variable_bevel: + row.enabled = row2.enabled = False + col.separator() + + col.label(text="Displace Pattern:") + col.prop_search(props, 'vertex_group_pattern', ob0, "vertex_groups", text='') + if props.vertex_group_pattern != '': + col.separator() + row = col.row(align=True) + row.prop(props,'in_steps') + row.prop(props,'out_steps') + row = col.row(align=True) + row.prop(props,'in_displace') + row.prop(props,'out_displace') + col.separator() + row = col.row(align=True) + row.label(text="Axis") + row.prop(props,'displace_x', text="X", toggle=1) + row.prop(props,'displace_y', text="Y", toggle=1) + row.prop(props,'displace_z', text="Z", toggle=1) + col.separator() + row=col.row(align=True) + + col.label(text='Clean Curves:') + col.prop(props,'clean_distance') + col.prop(props,'remove_open_curves') + +def contour_edges_pattern(operator, c, verts_count, iso_val, vertices, normals, filtered_edges, weight, pattern_weight, bevel_weight): + # vertices indexes + id0 = filtered_edges[:,0] + id1 = filtered_edges[:,1] + # vertices weight + w0 = weight[id0] + w1 = weight[id1] + # weight condition + bool_w0 = w0 <= iso_val + bool_w1 = w1 <= iso_val + + # mask all edges that have one weight value below the iso value + mask_new_verts = np.logical_xor(bool_w0, bool_w1) + if not mask_new_verts.any(): + return np.array([[None]]), {}, np.array([[None]]), np.array([[None]]) + + id0 = id0[mask_new_verts] + id1 = id1[mask_new_verts] + # filter arrays + v0 = vertices[id0] + v1 = vertices[id1] + n0 = normals[id0] + n1 = normals[id1] + w0 = w0[mask_new_verts] + w1 = w1[mask_new_verts] + pattern0 = pattern_weight[id0] + pattern1 = pattern_weight[id1] + try: + bevel0 = bevel_weight[id0] + bevel1 = bevel_weight[id1] + except: pass + + param = (iso_val - w0)/(w1-w0) + if c%(operator.in_steps + operator.out_steps) < operator.in_steps: + mult = operator.in_displace + else: + mult = operator.out_displace + pattern_value = pattern0 + (pattern1-pattern0)*param + try: + bevel_value = bevel0 + (bevel1-bevel0)*param + bevel_value = np.expand_dims(bevel_value,axis=1) + except: bevel_value = None + disp = pattern_value * mult + + param = np.expand_dims(param,axis=1) + disp = np.expand_dims(disp,axis=1) + verts = v0 + (v1-v0)*param + norm = n0 + (n1-n0)*param + axis = np.array((operator.displace_x, operator.displace_y, operator.displace_z)) + norm[:] *= axis + verts = verts + norm*disp + + # indexes of edges with new vertices + edges_index = filtered_edges[mask_new_verts][:,2] + + # remove all edges completely below the iso value + #mask_edges = np.logical_not(np.logical_and(bool_w0, bool_w1)) + #filtered_edges = filtered_edges[mask_edges] + return filtered_edges.astype("int"), edges_index, verts, bevel_value diff --git a/mesh_tissue/curves_tools.py b/mesh_tissue/curves_tools.py index 3aa39cf3d..22bc93b39 100644 --- a/mesh_tissue/curves_tools.py +++ b/mesh_tissue/curves_tools.py @@ -1,7 +1,23 @@ -# SPDX-FileCopyrightText: 2022-2023 Blender Foundation -# # SPDX-License-Identifier: GPL-2.0-or-later +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + # # # (c) Alessandro Zomparelli # # (2017) # @@ -37,7 +53,8 @@ from .utils import ( convert_object_to_mesh, get_weight_numpy, loops_from_bmesh, - get_mesh_before_subs + get_mesh_before_subs, + tissue_time ) import time @@ -48,7 +65,7 @@ def anim_curve_active(self, context): try: props.object.name if not ob.tissue.bool_lock: - bpy.ops.object.tissue_convert_to_curve_update() + bpy.ops.object.tissue_update_convert_to_curve() except: pass @@ -67,7 +84,7 @@ class tissue_to_curve_prop(PropertyGroup): ) bool_lock : BoolProperty( name="Lock", - description="Prevent automatic update on settings changes or if other objects have it in the hierarchy", + description="Prevent automatic update on settings changes or if other objects have it in the hierarchy.", default=False, update = anim_curve_active ) @@ -79,7 +96,7 @@ class tissue_to_curve_prop(PropertyGroup): ) bool_run : BoolProperty( name="Animatable Curve", - description="Automatically recompute the conversion when the frame is changed", + description="Automatically recompute the conversion when the frame is changed.", default = False ) use_modifiers : BoolProperty( @@ -480,12 +497,12 @@ class tissue_convert_to_curve(Operator): new_ob.tissue.bool_lock = False - bpy.ops.object.tissue_convert_to_curve_update() + bpy.ops.object.tissue_update_convert_to_curve() return {'FINISHED'} -class tissue_convert_to_curve_update(Operator): - bl_idname = "object.tissue_convert_to_curve_update" +class tissue_update_convert_to_curve(Operator): + bl_idname = "object.tissue_update_convert_to_curve" bl_label = "Tissue Update Curve" bl_description = "Update Curve object" bl_options = {'REGISTER', 'UNDO'} @@ -500,9 +517,10 @@ class tissue_convert_to_curve_update(Operator): return False def execute(self, context): + ob = context.object + tissue_time(None,'Tissue: Convert to Curve of "{}"...'.format(ob.name), levels=0) start_time = time.time() - ob = context.object props = ob.tissue_to_curve ob0 = props.object if props.mode == 'PARTICLES': @@ -669,8 +687,7 @@ class tissue_convert_to_curve_update(Operator): ob.data.splines.update() if not props.bool_smooth: bpy.ops.object.shade_flat() - end_time = time.time() - print('Tissue: object "{}" converted to Curve in {:.4f} sec'.format(ob.name, end_time-start_time)) + tissue_time(start_time,'Convert to Curve',levels=0) return {'FINISHED'} @@ -700,7 +717,7 @@ class TISSUE_PT_convert_to_curve(Panel): #layout.use_property_decorate = False col = layout.column(align=True) row = col.row(align=True) - #col.operator("object.tissue_convert_to_curve_update", icon='FILE_REFRESH', text='Refresh') + #col.operator("object.tissue_update_convert_to_curve", icon='FILE_REFRESH', text='Refresh') row.operator("object.tissue_update_tessellate_deps", icon='FILE_REFRESH', text='Refresh') #### lock_icon = 'LOCKED' if ob.tissue.bool_lock else 'UNLOCKED' #lock_icon = 'PINNED' if props.bool_lock else 'UNPINNED' @@ -710,6 +727,8 @@ class TISSUE_PT_convert_to_curve(Panel): col2 = row.column(align=True) col2.prop(ob.tissue, "bool_run", text="",icon='TIME') col2.enabled = not ob.tissue.bool_lock + col2 = row.column(align=True) + col2.operator("mesh.tissue_remove", text="", icon='X') col.separator() row = col.row(align=True) diff --git a/mesh_tissue/dual_mesh.py b/mesh_tissue/dual_mesh.py index 304806160..c8ca71355 100644 --- a/mesh_tissue/dual_mesh.py +++ b/mesh_tissue/dual_mesh.py @@ -1,7 +1,23 @@ -# SPDX-FileCopyrightText: 2017-2023 Blender Foundation -# # SPDX-License-Identifier: GPL-2.0-or-later +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + # --------------------------------- DUAL MESH -------------------------------- # # -------------------------------- version 0.3 ------------------------------- # # # @@ -230,10 +246,14 @@ class dual_mesh(Operator): ) bpy.ops.mesh.select_all(action='DESELECT') bpy.ops.object.mode_set(mode='OBJECT') - subsurf_modifier = context.object.modifiers.new("dual_mesh_subsurf", 'SUBSURF') - context.object.modifiers.move(len(context.object.modifiers)-1, 0) + bpy.ops.object.modifier_add(type='SUBSURF') + ob.modifiers[-1].name = "dual_mesh_subsurf" + while True: + bpy.ops.object.modifier_move_up(modifier="dual_mesh_subsurf") + if ob.modifiers[0].name == "dual_mesh_subsurf": + break - bpy.ops.object.modifier_apply(modifier=subsurf_modifier.name) + bpy.ops.object.modifier_apply(modifier='dual_mesh_subsurf') bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_all(action='DESELECT') @@ -246,7 +266,7 @@ class dual_mesh(Operator): bpy.ops.mesh.select_more(use_face_step=False) bpy.ops.mesh.select_similar( - type='EDGE', compare='EQUAL', threshold=0.01) + type='VERT_EDGES', compare='EQUAL', threshold=0.01) bpy.ops.mesh.select_all(action='INVERT') bpy.ops.mesh.dissolve_verts() diff --git a/mesh_tissue/lattice.py b/mesh_tissue/lattice.py index b25ac72ee..6753cc2a1 100644 --- a/mesh_tissue/lattice.py +++ b/mesh_tissue/lattice.py @@ -1,7 +1,20 @@ -# SPDX-FileCopyrightText: 2017-2023 Blender Foundation +# ##### BEGIN GPL LICENSE BLOCK ##### # -# SPDX-License-Identifier: GPL-2.0-or-later - +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### # --------------------------- LATTICE ALONG SURFACE -------------------------- # # -------------------------------- version 0.3 ------------------------------- # # # @@ -327,6 +340,7 @@ class lattice_along_surface(Operator): grid_mesh = temp_grid_obj.data for v in grid_mesh.vertices: v.co = grid_obj.matrix_world @ v.co + #grid_mesh.calc_normals() if len(grid_mesh.polygons) > 64 * 64: bpy.data.objects.remove(temp_grid_obj) @@ -372,13 +386,16 @@ class lattice_along_surface(Operator): lattice.scale.z = 1 context.view_layer.objects.active = obj - lattice_modifier = context.object.modifiers.new("", 'LATTICE') - lattice_modifier.object = lattice + bpy.ops.object.modifier_add(type='LATTICE') + obj.modifiers[-1].object = lattice # set as parent if self.set_parent: - override = {'active_object': obj, 'selected_objects' : [lattice,obj]} - bpy.ops.object.parent_set(override, type='OBJECT', keep_transform=False) + override = context.copy() + override['active_object'] = obj + override['selected_objects'] = [lattice,obj] + with context.temp_override(**override): + bpy.ops.object.parent_set(type='OBJECT', keep_transform=False) # reading grid structure verts_grid, edges_grid, faces_grid = grid_from_mesh( @@ -434,7 +451,7 @@ class lattice_along_surface(Operator): bpy.ops.object.delete(use_global=False) context.view_layer.objects.active = obj obj.select_set(True) - bpy.ops.object.modifier_remove(modifier=lattice_modifier.name) + bpy.ops.object.modifier_remove(modifier=obj.modifiers[-1].name) if nu > 64 or nv > 64: self.report({'ERROR'}, "Maximum resolution allowed for Lattice is 64") return {'CANCELLED'} diff --git a/mesh_tissue/material_tools.py b/mesh_tissue/material_tools.py index 5bb49a0e6..ccd0db50e 100644 --- a/mesh_tissue/material_tools.py +++ b/mesh_tissue/material_tools.py @@ -1,6 +1,20 @@ -# SPDX-FileCopyrightText: 2022 Blender Foundation +# ##### BEGIN GPL LICENSE BLOCK ##### # -# SPDX-License-Identifier: GPL-2.0-or-later +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### # # # (c) Alessandro Zomparelli # @@ -210,8 +224,8 @@ class weight_to_materials(Operator): faces_weight.append(w) faces_weight = np.array(faces_weight) faces_weight = faces_weight * count - faces_weight.astype('int') - ob.data.polygons.foreach_set('material_index',list(faces_weight)) + faces_weight = list(faces_weight.astype('int')) + ob.data.polygons.foreach_set('material_index', faces_weight) ob.data.update() bpy.ops.object.mode_set(mode='OBJECT') return {'FINISHED'} diff --git a/mesh_tissue/numba_functions.py b/mesh_tissue/numba_functions.py index 874f73fe9..aba8cdf59 100644 --- a/mesh_tissue/numba_functions.py +++ b/mesh_tissue/numba_functions.py @@ -1,6 +1,20 @@ -# SPDX-FileCopyrightText: 2019-2022 Blender Foundation +# ##### BEGIN GPL LICENSE BLOCK ##### # -# SPDX-License-Identifier: GPL-2.0-or-later +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### import numpy as np import time @@ -32,11 +46,87 @@ except: if bool_numba: #from numba import jit, njit, guvectorize, float64, int32, prange + + @njit(parallel=True) + #@cuda.jit('void(float32[:,:], float32[:,:])') + def tex_laplacian(lap, arr): + arr2 = arr*2 + diag = sqrt(2)/2 + nx = arr.shape[0] + ny = arr.shape[1] + for i in prange(nx): + for j in prange(ny): + i0 = (i-1)%nx + j0 = (j-1)%ny + i1 = (i+1)%nx + j1 = (j+1)%ny + #lap[i+1,j+1] = arr[i, j+1] + arr[i+2, j+1] + arr[i+1, j] + arr[i+1, j+2] - 4*arr[i+1,j+1] + + lap[i,j] = ((arr[i0, j] + arr[i1, j] - arr2[i,j]) + \ + (arr[i, j0] + arr[i, j1] - arr2[i,j]) + \ + (arr[i0, j0] + arr[i1, j1] - arr2[i,j])*diag + \ + (arr[i1, j0] + arr[i0, j1] - arr2[i,j])*diag)*0.75 + + @njit(parallel=True) + def tex_laplacian_ani(lap, arr, VF): + arr2 = arr*2 + nx = arr.shape[0] + ny = arr.shape[1] + i0 = np.arange(nx)-1 + i0[0] = 1 + i1 = np.arange(nx)+1 + i1[nx-1] = nx-2 + j0 = np.arange(ny)-1 + j0[0] = 1 + j1 = np.arange(ny)+1 + j1[ny-1] = ny-2 + for i in prange(nx): + for j in prange(ny): + lap[i,j] = (arr[i0[i], j] + arr[i1[i], j] - arr2[i,j])*VF[0,i,j] + \ + (arr[i, j0[j]] + arr[i, j1[j]] - arr2[i,j])*VF[1,i,j] + \ + (arr[i0[i], j0[j]] + arr[i1[i], j1[j]] - arr2[i,j])*VF[2,i,j] + \ + (arr[i1[i], j0[j]] + arr[i0[i], j1[j]] - arr2[i,j])*VF[3,i,j] + #lap[0,:] = lap[1,:] + #lap[:,0] = lap[:,1] + #lap[-1,:] = lap[-2,:] + #lap[:,-1] = lap[:,-2] + + #@cuda.jit(parallel=True) + @njit(parallel=True) + def run_tex_rd(A, B, lap_A, lap_B, diff_A, diff_B, f, k, dt, steps, brush): + for t in range(steps): + tex_laplacian(lap_A, A) + tex_laplacian(lap_B, B) + nx = A.shape[0] + ny = A.shape[1] + for i in prange(nx): + for j in prange(ny): + B[i,j] += brush[i,j] + ab2 = A[i,j]*B[i,j]**2 + A[i,j] += (lap_A[i,j]*diff_A - ab2 + f*(1-A[i,j]))*dt + B[i,j] += (lap_B[i,j]*diff_B + ab2 - (k+f)*B[i,j])*dt + + @njit(parallel=True) + def run_tex_rd_ani(A, B, lap_A, lap_B, diff_A, diff_B, f, k, dt, steps, vf1, vf2, brush): + for t in range(steps): + tex_laplacian_ani(lap_A, A, vf2) + #laplacian(lap_A, A) + tex_laplacian_ani(lap_B, B, vf1) + nx = A.shape[0] + ny = A.shape[1] + for i in prange(nx): + for j in prange(ny): + B[i,j] += brush[i,j] + ab2 = A[i ,j]*B[i,j]**2 + A[i,j] += (lap_A[i,j]*diff_A[i,j] - ab2 + f[i,j]*(1-A[i,j]))*dt + B[i,j] += (lap_B[i,j]*diff_B[i,j] + ab2 - (k[i,j]+f[i,j])*B[i,j])*dt + + @njit(parallel=True) def numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, brush, diff_a, diff_b, f, k, dt, time_steps): - arr = np.arange(n_edges)*2 - id0 = edge_verts[arr] - id1 = edge_verts[arr+1] + arr = np.arange(n_edges) + id0 = edge_verts[arr*2] + id1 = edge_verts[arr*2+1] for i in range(time_steps): lap_a, lap_b = rd_init_laplacian(n_verts) numba_rd_laplacian(id0, id1, a, b, lap_a, lap_b) @@ -62,17 +152,14 @@ if bool_numba: return values @njit(parallel=True) - def numba_reaction_diffusion_anisotropic(n_verts, n_edges, edge_verts, a, b, brush, diff_a, diff_b, f, k, dt, time_steps, grad): - arr = np.arange(n_edges)*2 - id0 = edge_verts[arr] - id1 = edge_verts[arr+1] - #grad = weight_grad[id0] - weight_grad[id1] - #grad = np.abs(grad) - #grad /= abs(np.max(grad)) - #grad = grad*0.98 + 0.02 + def numba_reaction_diffusion_anisotropic(n_verts, n_edges, edge_verts, a, b, brush, diff_a, diff_b, f, k, dt, time_steps, field_mult): + arr = np.arange(n_edges) + id0 = edge_verts[arr*2] + id1 = edge_verts[arr*2+1] + mult = field_mult[arr] for i in range(time_steps): lap_a, lap_b = rd_init_laplacian(n_verts) - numba_rd_laplacian_anisotropic(id0, id1, a, b, lap_a, lap_b, grad) + numba_rd_laplacian_anisotropic(id0, id1, a, b, lap_a, lap_b, mult) numba_rd_core(a, b, lap_a, lap_b, diff_a, diff_b, f, k, dt) numba_set_ab(a,b,brush) return a,b @@ -112,28 +199,27 @@ if bool_numba: if a[i] < 0: a[i] = 0 elif a[i] > 1: a[i] = 1 - - #@guvectorize(['(float64[:] ,float64[:] ,float64[:] , float64[:], float64[:], float64[:])'],'(m),(m),(n),(n),(n),(n)',target='parallel') @njit(parallel=True) def numba_rd_laplacian(id0, id1, a, b, lap_a, lap_b): for i in prange(len(id0)): v0 = id0[i] v1 = id1[i] - lap_a[v0] += a[v1] - a[v0] - lap_a[v1] += a[v0] - a[v1] - lap_b[v0] += b[v1] - b[v0] - lap_b[v1] += b[v0] - b[v1] + lap_a[v0] += (a[v1] - a[v0]) + lap_a[v1] += (a[v0] - a[v1]) + lap_b[v0] += (b[v1] - b[v0]) + lap_b[v1] += (b[v0] - b[v1]) #return lap_a, lap_b @njit(parallel=True) - def numba_rd_laplacian_anisotropic(id0, id1, a, b, lap_a, lap_b, grad): + def numba_rd_laplacian_anisotropic(id0, id1, a, b, lap_a, lap_b, mult): for i in prange(len(id0)): v0 = id0[i] v1 = id1[i] - lap_a[v0] += (a[v1] - a[v0]) - lap_a[v1] += (a[v0] - a[v1]) - lap_b[v0] -= (b[v1] - b[v0])*grad[i] - lap_b[v1] += (b[v0] - b[v1])*grad[i] + multiplier = mult[i] + lap_a[v0] += (a[v1] - a[v0])# * multiplier + lap_a[v1] += (a[v0] - a[v1])# * multiplier + lap_b[v0] += (b[v1] - b[v0]) * multiplier + lap_b[v1] += (b[v0] - b[v1]) * multiplier #return lap_a, lap_b @njit(parallel=True) diff --git a/mesh_tissue/polyhedra.py b/mesh_tissue/polyhedra.py index e5c81e200..a97fbc68e 100644 --- a/mesh_tissue/polyhedra.py +++ b/mesh_tissue/polyhedra.py @@ -1,6 +1,20 @@ -# SPDX-FileCopyrightText: 2022 Blender Foundation +# ##### BEGIN GPL LICENSE BLOCK ##### # -# SPDX-License-Identifier: GPL-2.0-or-later +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### # ---------------------------- ADAPTIVE DUPLIFACES --------------------------- # # ------------------------------- version 0.84 ------------------------------- # @@ -37,9 +51,159 @@ import random, time, copy import bmesh from .utils import * -class polyhedra_wireframe(Operator): - bl_idname = "object.polyhedra_wireframe" - bl_label = "Tissue Polyhedra Wireframe" +def anim_polyhedra_active(self, context): + ob = context.object + props = ob.tissue_polyhedra + if ob.tissue.tissue_type=='POLYHEDRA' and not ob.tissue.bool_lock: + props.object.name + bpy.ops.object.tissue_update_polyhedra() + +class tissue_polyhedra_prop(PropertyGroup): + object : PointerProperty( + type=bpy.types.Object, + name="Object", + description="Source object", + update = anim_polyhedra_active + ) + + mode : EnumProperty( + items=( + ('POLYHEDRA', "Polyhedra", "Polyhedral Complex Decomposition, the result are disconnected polyhedra geometries"), + ('WIREFRAME', "Wireframe", "Polyhedral Wireframe through edges tickening") + ), + default='POLYHEDRA', + name="Polyhedra Mode", + update = anim_polyhedra_active + ) + + bool_modifiers : BoolProperty( + name="Use Modifiers", + description="", + default=True, + update = anim_polyhedra_active + ) + + dissolve : EnumProperty( + items=( + ('NONE', "None", "Keeps original topology"), + ('INNER', "Inner", "Dissolve inner loops"), + ('OUTER', "Outer", "Dissolve outer loops") + ), + default='NONE', + name="Dissolve", + update = anim_polyhedra_active + ) + + thickness : FloatProperty( + name="Thickness", default=1, soft_min=0, soft_max=10, + description="Thickness along the edges", + update = anim_polyhedra_active + ) + + crease : FloatProperty( + name="Crease", default=0, min=0, max=1, + description="Crease Inner Loops", + update = anim_polyhedra_active + ) + + segments : IntProperty( + name="Segments", + default=0, + min=1, + soft_max=20, + description="Segments for every edge", + update = anim_polyhedra_active + ) + + proportional_segments : BoolProperty( + name="Proportional Segments", default=True, + description="The number of segments is proportional to the length of the edges", + update = anim_polyhedra_active + ) + + selective_wireframe : EnumProperty( + name="Selective", + items=( + ('NONE', "None", "Apply wireframe to every cell"), + ('THICKNESS', "Thickness", "Wireframe only on bigger cells compared to the thickness"), + ('AREA', "Area", "Wireframe based on cells dimensions"), + ('WEIGHT', "Weight", "Wireframe based on vertex groups") + ), + default='NONE', + update = anim_polyhedra_active + ) + + thickness_threshold_correction : FloatProperty( + name="Correction", default=1, min=0, soft_max=2, + description="Adjust threshold based on thickness", + update = anim_polyhedra_active + ) + + area_threshold : FloatProperty( + name="Threshold", default=0, min=0, soft_max=10, + description="Use only faces with an area greater than the threshold", + update = anim_polyhedra_active + ) + + thicken_all : BoolProperty( + name="Thicken all", + description="Thicken original faces as well", + default=True, + update = anim_polyhedra_active + ) + + vertex_group_thickness : StringProperty( + name="Thickness weight", default='', + description="Vertex Group used for thickness", + update = anim_polyhedra_active + ) + invert_vertex_group_thickness : BoolProperty( + name="Invert", default=False, + description="Invert the vertex group influence", + update = anim_polyhedra_active + ) + vertex_group_thickness_factor : FloatProperty( + name="Factor", + default=0, + min=0, + max=1, + description="Thickness factor to use for zero vertex group influence", + update = anim_polyhedra_active + ) + + vertex_group_selective : StringProperty( + name="Thickness weight", default='', + description="Vertex Group used for selective wireframe", + update = anim_polyhedra_active + ) + invert_vertex_group_selective : BoolProperty( + name="Invert", default=False, + description="Invert the vertex group influence", + update = anim_polyhedra_active + ) + vertex_group_selective_threshold : FloatProperty( + name="Threshold", + default=0.5, + min=0, + max=1, + description="Selective wireframe threshold", + update = anim_polyhedra_active + ) + bool_smooth : BoolProperty( + name="Smooth Shading", + default=False, + description="Output faces with smooth shading rather than flat shaded", + update = anim_polyhedra_active + ) + + error_message : StringProperty( + name="Error Message", + default="" + ) + +class polyhedral_wireframe(Operator): + bl_idname = "object.polyhedral_wireframe" + bl_label = "Tissue Polyhedral Wireframe" bl_description = "Generate wireframes around the faces.\ \nDoesn't works with boundary edges.\ \n(Experimental)" @@ -50,430 +214,422 @@ class polyhedra_wireframe(Operator): description="Wireframe thickness" ) - subdivisions : IntProperty( + crease : FloatProperty( + name="Crease", default=0, min=0, max=1, + description="Crease Inner Loops" + ) + + segments : IntProperty( name="Segments", default=1, min=1, soft_max=10, - description="Max sumber of segments, used for the longest edge" + description="Segments for every edge" ) - #regular_sections : BoolProperty( - # name="Regular Sections", default=False, - # description="Turn inner loops into polygons" - # ) - - dissolve_inners : BoolProperty( - name="Dissolve Inners", default=False, - description="Dissolve inner edges" + proportional_segments : BoolProperty( + name="Proportional Segments", default=True, + description="The number of segments is proportional to the length of the edges" ) - @classmethod - def poll(cls, context): - try: - #bool_tessellated = context.object.tissue_tessellate.generator != None - ob = context.object - return ob.type == 'MESH' and ob.mode == 'OBJECT'# and bool_tessellated - except: - return False + mode : EnumProperty( + items=( + ('POLYHEDRA', "Polyhedra", "Polyhedral Complex Decomposition, the result are disconnected polyhedra geometries"), + ('WIREFRAME', "Wireframe", "Polyhedral Wireframe through edges tickening") + ), + default='POLYHEDRA', + name="Polyhedra Mode" + ) + + dissolve : EnumProperty( + items=( + ('NONE', "None", "Keeps original topology"), + ('INNER', "Inner", "Dissolve inner loops"), + ('OUTER', "Outer", "Dissolve outer loops") + ), + default='NONE', + name="Dissolve" + ) + + selective_wireframe : EnumProperty( + items=( + ('NONE', "None", "Apply wireframe to every cell"), + ('THICKNESS', "Thickness", "Wireframe only on bigger cells compared to the thickness"), + ('AREA', "Area", "Wireframe based on cells dimensions"), + ('WEIGHT', "Weight", "Wireframe based on vertex groups") + ), + default='NONE', + name="Selective" + ) + + thickness_threshold_correction : FloatProperty( + name="Correction", default=1, min=0, soft_max=2, + description="Adjust threshold based on thickness" + ) + + area_threshold : FloatProperty( + name="Threshold", default=0, min=0, soft_max=10, + description="Use only faces with an area greater than the threshold" + ) + + thicken_all : BoolProperty( + name="Thicken all", + description="Thicken original faces as well", + default=True + ) + + vertex_group_thickness : StringProperty( + name="Thickness weight", default='', + description="Vertex Group used for thickness" + ) + + invert_vertex_group_thickness : BoolProperty( + name="Invert", default=False, + description="Invert the vertex group influence" + ) + + vertex_group_thickness_factor : FloatProperty( + name="Factor", + default=0, + min=0, + max=1, + description="Thickness factor to use for zero vertex group influence" + ) + + vertex_group_selective : StringProperty( + name="Thickness weight", default='', + description="Vertex Group used for thickness" + ) + + invert_vertex_group_selective : BoolProperty( + name="Invert", default=False, + description="Invert the vertex group influence" + ) + + vertex_group_selective_threshold : FloatProperty( + name="Threshold", + default=0.5, + min=0, + max=1, + description="Selective wireframe threshold" + ) + + bool_smooth : BoolProperty( + name="Smooth Shading", + default=False, + description="Output faces with smooth shading rather than flat shaded" + ) + + bool_hold : BoolProperty( + name="Hold", + description="Wait...", + default=False + ) + + def draw(self, context): + ob = context.object + layout = self.layout + col = layout.column(align=True) + self.bool_hold = True + if self.mode == 'WIREFRAME': + col.separator() + col.prop(self, "thickness") + col.separator() + col.prop(self, "segments") + return def invoke(self, context, event): return context.window_manager.invoke_props_dialog(self) def execute(self, context): + ob0 = context.object - merge_dist = self.thickness*0.001 + self.object_name = "Polyhedral Wireframe" + # Check if existing object with same name + names = [o.name for o in bpy.data.objects] + if self.object_name in names: + count_name = 1 + while True: + test_name = self.object_name + '.{:03d}'.format(count_name) + if not (test_name in names): + self.object_name = test_name + break + count_name += 1 - subs = self.subdivisions + if ob0.type not in ('MESH'): + message = "Source object must be a Mesh!" + self.report({'ERROR'}, message) - start_time = time.time() + if bpy.ops.object.select_all.poll(): + bpy.ops.object.select_all(action='TOGGLE') + bpy.ops.object.mode_set(mode='OBJECT') + + bool_update = False + auto_layer_collection() + new_ob = convert_object_to_mesh(ob0,False,False) + new_ob.data.name = self.object_name + new_ob.name = self.object_name + + # Store parameters + props = new_ob.tissue_polyhedra + lock_status = new_ob.tissue.bool_lock + new_ob.tissue.bool_lock = True + props.mode = self.mode + props.thickness = self.thickness + props.segments = self.segments + props.dissolve = self.dissolve + props.proportional_segments = self.proportional_segments + props.crease = self.crease + props.object = ob0 + + new_ob.tissue.tissue_type = 'POLYHEDRA' + try: bpy.ops.object.tissue_update_polyhedra() + except RuntimeError as e: + bpy.data.objects.remove(new_ob) + remove_temp_objects() + self.report({'ERROR'}, str(e)) + return {'CANCELLED'} + if not bool_update: + self.object_name = new_ob.name + new_ob.location = ob0.location + new_ob.matrix_world = ob0.matrix_world + + # Assign collection of the base object + old_coll = new_ob.users_collection + if old_coll != ob0.users_collection: + for c in old_coll: + c.objects.unlink(new_ob) + for c in ob0.users_collection: + c.objects.link(new_ob) + context.view_layer.objects.active = new_ob + + # unlock + new_ob.tissue.bool_lock = lock_status + + return {'FINISHED'} + +class tissue_update_polyhedra(Operator): + bl_idname = "object.tissue_update_polyhedra" + bl_label = "Tissue Update Polyhedral Wireframe" + bl_description = "Update a previously generated polyhedral object" + bl_options = {'REGISTER', 'UNDO'} + + def execute(self, context): ob = context.object - me = simple_to_mesh(ob) + tissue_time(None,'Tissue: Polyhedral Wireframe of "{}"...'.format(ob.name), levels=0) + start_time = time.time() + begin_time = time.time() + props = ob.tissue_polyhedra + thickness = props.thickness + + merge_dist = thickness*0.0001 + + subs = props.segments + if props.mode == 'POLYHEDRA': subs = 1 + + + # Source mesh + ob0 = props.object + if props.bool_modifiers: + me = simple_to_mesh(ob0) + else: + me = ob0.data.copy() + bm = bmesh.new() bm.from_mesh(me) - bm.verts.ensure_lookup_table() - bm.edges.ensure_lookup_table() - bm.faces.ensure_lookup_table() + pre_processing(bm) + polyhedral_subdivide_edges(bm, subs, props.proportional_segments) + tissue_time(start_time,'Subdivide edges',levels=1) + start_time = time.time() - # Subdivide edges - proportional_subs = True - if subs > 1 and proportional_subs: - wire_length = [e.calc_length() for e in bm.edges] - all_edges = list(bm.edges) - max_segment = max(wire_length)/subs - split_edges = [[] for i in range(subs+1)] - for e, l in zip(all_edges, wire_length): - split_edges[int(l//max_segment)].append(e) - for i in range(2,subs): - perc = {} - for e in split_edges[i]: - perc[e]=0.1 - bmesh.ops.bisect_edges(bm, edges=split_edges[i], cuts=i, edge_percents=perc) + thickness = np.ones(len(bm.verts))*props.thickness + if(props.vertex_group_thickness in ob.vertex_groups.keys()): + dvert_lay = bm.verts.layers.deform.active + group_index_thickness = ob.vertex_groups[props.vertex_group_thickness].index + thickness_weight = bmesh_get_weight_numpy(group_index_thickness, dvert_lay, bm.verts) + if 'invert_vertex_group_thickness' in props.keys(): + if props['invert_vertex_group_thickness']: + thickness_weight = 1-thickness_weight + fact = 0 + if 'vertex_group_thickness_factor' in props.keys(): + fact = props['vertex_group_thickness_factor'] + if fact > 0: + thickness_weight = thickness_weight*(1-fact) + fact + thickness *= thickness_weight + thickness_dict = dict(zip([tuple(v.co) for v in bm.verts],thickness)) - ### Create double faces - double_faces = [] - double_layer_edge = [] - double_layer_piece = [] - for f in bm.faces: - verts0 = [v.co for v in f.verts] - verts1 = [v.co for v in f.verts] - verts1.reverse() - double_faces.append(verts0) - double_faces.append(verts1) + bm1 = get_double_faces_bmesh(bm) + polyhedra = get_decomposed_polyhedra(bm) + if(type(polyhedra) is str): + bm.free() + bm1.free() + self.report({'ERROR'}, polyhedra) + return {'CANCELLED'} - # Create new bmesh object and data layers - bm1 = bmesh.new() - - # Create faces and assign Edge Layers - for verts in double_faces: - new_verts = [] - for v in verts: - vert = bm1.verts.new(v) - new_verts.append(vert) - bm1.faces.new(new_verts) - - bm1.verts.ensure_lookup_table() - bm1.edges.ensure_lookup_table() - bm1.faces.ensure_lookup_table() - - n_faces = len(bm.faces) - n_doubles = len(bm1.faces) - - polyhedra = [] - - for e in bm.edges: - done = [] - - # ERROR: Naked edges - e_faces = len(e.link_faces) - if e_faces < 2: - bm.free() - bm1.free() - message = "Naked edges are not allowed" - self.report({'ERROR'}, message) - return {'CANCELLED'} - - edge_vec = e.verts[1].co - e.verts[0].co - - # run first face - for i1 in range(e_faces-1): - f1 = e.link_faces[i1] - #edge_verts1 = [v.index for v in f1.verts if v in e.verts] - verts1 = [v.index for v in f1.verts] - va1 = verts1.index(e.verts[0].index) - vb1 = verts1.index(e.verts[1].index) - # check if order of the edge matches the order of the face - dir1 = va1 == (vb1+1)%len(verts1) - edge_vec1 = edge_vec if dir1 else -edge_vec - - # run second face - faces2 = [] - normals2 = [] - for i2 in range(i1+1,e_faces): - #for i2 in range(n_faces): - if i1 == i2: continue - f2 = e.link_faces[i2] - f2.normal_update() - #edge_verts2 = [v.index for v in f2.verts if v in e.verts] - verts2 = [v.index for v in f2.verts] - va2 = verts2.index(e.verts[0].index) - vb2 = verts2.index(e.verts[1].index) - # check if order of the edge matches the order of the face - dir2 = va2 == (vb2+1)%len(verts2) - # check for normal consistency - if dir1 != dir2: - # add face - faces2.append(f2.index+1) - normals2.append(f2.normal) - else: - # add flipped face - faces2.append(-(f2.index+1)) - normals2.append(-f2.normal) - - - - # find first polyhedra (positive) - plane_x = f1.normal # normal - plane_y = plane_x.cross(edge_vec1) # tangent face perp edge - id1 = (f1.index+1) - - min_angle0 = 10000 - - # check consistent faces - if id1 not in done: - id2 = None - min_angle = min_angle0 - for i2, n2 in zip(faces2,normals2): - v2 = flatten_vector(-n2, plane_x, plane_y) - angle = vector_rotation(v2) - if angle < min_angle: - id2 = i2 - min_angle = angle - if id2: done.append(id2) - new_poly = True - # add to existing polyhedron - for p in polyhedra: - if id1 in p or id2 in p: - new_poly = False - if id2 not in p: p.append(id2) - if id1 not in p: p.append(id1) - break - # start new polyhedron - if new_poly: polyhedra.append([id1, id2]) - - # find second polyhedra (negative) - plane_x = -f1.normal # normal - plane_y = plane_x.cross(-edge_vec1) # tangent face perp edge - id1 = -(f1.index+1) - - if id1 not in done: - id2 = None - min_angle = min_angle0 - for i2, n2 in zip(faces2, normals2): - v2 = flatten_vector(n2, plane_x, plane_y) - angle = vector_rotation(v2) - if angle < min_angle: - id2 = -i2 - min_angle = angle - done.append(id2) - add = True - for p in polyhedra: - if id1 in p or id2 in p: - add = False - if id2 not in p: p.append(id2) - if id1 not in p: p.append(id1) - break - if add: polyhedra.append([id1, id2]) - - for i in range(len(bm1.faces)): - for j in (False,True): - if j: id = i+1 - else: id = -(i+1) - join = [] - keep = [] - for p in polyhedra: - if id in p: join += p - else: keep.append(p) - if len(join) > 0: - keep.append(list(dict.fromkeys(join))) - polyhedra = keep - - for i, p in enumerate(polyhedra): - for j in p: - bm1.faces[j].material_index = i - - end_time = time.time() - print('Tissue: Polyhedra wireframe, found {} polyhedra in {:.4f} sec'.format(len(polyhedra), end_time-start_time)) - - - delete_faces = [] - wireframe_faces = [] - not_wireframe_faces = [] - flat_faces = [] + selective_dict = None + accurate = False + if props.selective_wireframe == 'THICKNESS': + filter_faces = True + accurate = True + area_threshold = (thickness*props.thickness_threshold_correction)**2 + elif props.selective_wireframe == 'AREA': + filter_faces = True + area_threshold = props.area_threshold + elif props.selective_wireframe == 'WEIGHT': + filter_faces = True + if(props.vertex_group_selective in ob.vertex_groups.keys()): + dvert_lay = bm.verts.layers.deform.active + group_index_selective = ob.vertex_groups[props.vertex_group_selective].index + thresh = props.vertex_group_selective_threshold + selective_weight = bmesh_get_weight_numpy(group_index_selective, dvert_lay, bm.verts) + selective_weight = selective_weight >= thresh + invert = False + if 'invert_vertex_group_selective' in props.keys(): + if props['invert_vertex_group_selective']: + invert = True + if invert: + selective_weight = selective_weight <= thresh + else: + selective_weight = selective_weight >= thresh + selective_dict = dict(zip([tuple(v.co) for v in bm.verts],selective_weight)) + else: + filter_faces = False + else: + filter_faces = False bm.free() - #bmesh.ops.bisect_edges(bm1, edges=bm1.edges, cuts=3) - end_time = time.time() - print('Tissue: Polyhedra wireframe, subdivide edges in {:.4f} sec'.format(end_time-start_time)) + tissue_time(start_time,'Found {} polyhedra'.format(len(polyhedra)),levels=1) + start_time = time.time() + bm1.faces.ensure_lookup_table() bm1.faces.index_update() - #merge_verts = [] - for p in polyhedra: + + #unique_verts_dict = dict(zip([tuple(v.co) for v in bm1.verts],bm1.verts)) + bm1, all_faces_dict, polyhedra_faces_id, polyhedra_faces_id_neg = combine_polyhedra_faces(bm1, polyhedra) + + if props.mode == 'POLYHEDRA': + poly_me = me.copy() + bm1.to_mesh(poly_me) + poly_me.update() + old_me = ob.data + ob.data = poly_me + mesh_name = old_me.name + bpy.data.meshes.remove(old_me) + bpy.data.meshes.remove(me) + ob.data.name = mesh_name + end_time = time.time() + print('Tissue: Polyhedral wireframe in {:.4f} sec'.format(end_time-start_time)) + return {'FINISHED'} + + delete_faces = set({}) + wireframe_faces = [] + not_wireframe_faces = [] + #flat_faces = [] + count = 0 + outer_faces = get_outer_faces(bm1) + for faces_id in polyhedra_faces_id: delete_faces_poly = [] wireframe_faces_poly = [] - faces_id = [(f-1)*2 if f > 0 else (-f-1)*2+1 for f in p] - faces_id_neg = [(-f-1)*2 if -f > 0 else (f-1)*2+1 for f in p] - merge_verts = [] - faces = [bm1.faces[f_id] for f_id in faces_id] - for f in faces: + for id in faces_id: + if id in delete_faces: continue delete = False - if f.index in delete_faces: continue - ''' - cen = f.calc_center_median() - for e in f.edges: - mid = (e.verts[0].co + e.verts[1].co)/2 - vec1 = e.verts[0].co - e.verts[1].co - vec2 = mid - cen - ang = Vector.angle(vec1,vec2) - length = vec2.length - #length = sin(ang)*length - if length < self.thickness/2: - delete = True - ''' - if False: - sides = len(f.verts) - for i in range(sides): - v = f.verts[i].co - v0 = f.verts[(i-1)%sides].co - v1 = f.verts[(i+1)%sides].co - vec0 = v0 - v - vec1 = v1 - v - ang = (pi - vec0.angle(vec1))/2 - length = min(vec0.length, vec1.length)*sin(ang) - if length < self.thickness/2: - delete = True - break - + cen = None + f = None + if filter_faces: + f = all_faces_dict[id] + if selective_dict: + for v in f.verts: + if selective_dict[tuple(v.co)]: + delete = True + break + elif accurate: + cen = f.calc_center_median() + for e in f.edges: + v0 = e.verts[0] + v1 = e.verts[1] + mid = (v0.co + v1.co)/2 + vec1 = v0.co - v1.co + vec2 = mid - cen + ang = Vector.angle(vec1,vec2) + length = vec2.length + length = sin(ang)*length + thick0 = thickness_dict[tuple(v0.co)] + thick1 = thickness_dict[tuple(v1.co)] + thick = (thick0 + thick1)/4 + if length < thick*props.thickness_threshold_correction: + delete = True + break + else: + delete = f.calc_area() < area_threshold if delete: - delete_faces_poly.append(f.index) + if props.thicken_all: + delete_faces_poly.append(id) else: - wireframe_faces_poly.append(f.index) - merge_verts += [v for v in f.verts] - if len(wireframe_faces_poly) < 2: - delete_faces += faces_id - not_wireframe_faces += faces_id_neg + wireframe_faces_poly.append(id) + if len(wireframe_faces_poly) <= 2: + delete_faces.update(set([id for id in faces_id])) + not_wireframe_faces += [polyhedra_faces_id_neg[id] for id in faces_id] else: wireframe_faces += wireframe_faces_poly - flat_faces += delete_faces_poly + #flat_faces += delete_faces_poly + wireframe_faces_id = [i for i in wireframe_faces if i not in not_wireframe_faces] + wireframe_faces = [all_faces_dict[i] for i in wireframe_faces_id] + #flat_faces = [all_faces_dict[i] for i in flat_faces] + delete_faces = [all_faces_dict[i] for i in delete_faces if all_faces_dict[i] not in outer_faces] - #wireframe_faces = list(dict.fromkeys(wireframe_faces)) - bmesh.ops.remove_doubles(bm1, verts=merge_verts, dist=merge_dist) - bm1.edges.ensure_lookup_table() - bm1.faces.ensure_lookup_table() - bm1.faces.index_update() - - - wireframe_faces = [i for i in wireframe_faces if i not in not_wireframe_faces] - wireframe_faces = list(dict.fromkeys(wireframe_faces)) - - flat_faces = list(dict.fromkeys(flat_faces)) - - end_time = time.time() - print('Tissue: Polyhedra wireframe, merge and delete in {:.4f} sec'.format(end_time-start_time)) - - poly_me = me.copy() - bm1.to_mesh(poly_me) - poly_me.update() - new_ob = bpy.data.objects.new("Polyhedra", poly_me) - context.collection.objects.link(new_ob) + tissue_time(start_time,'Merge and delete',levels=1) + start_time = time.time() ############# FRAME ############# - bm1.faces.index_update() - wireframe_faces = [bm1.faces[i] for i in wireframe_faces] - original_faces = wireframe_faces - #bmesh.ops.remove_doubles(bm1, verts=merge_verts, dist=0.001) - - # detect edge loops - - loops = [] - boundaries_mat = [] - neigh_face_center = [] - face_normals = [] - - # compute boundary frames - new_faces = [] - wire_length = [] - vert_ids = [] - - # append regular faces - - for f in original_faces: - loop = list(f.verts) - loops.append(loop) - boundaries_mat.append([f.material_index for v in loop]) - f.normal_update() - face_normals.append([f.normal for v in loop]) - - push_verts = [] - inner_loops = [] - - for loop_index, loop in enumerate(loops): - is_boundary = loop_index < len(neigh_face_center) - materials = boundaries_mat[loop_index] - new_loop = [] - loop_ext = [loop[-1]] + loop + [loop[0]] - - # calc tangents - tangents = [] - for i in range(len(loop)): - # vertices - vert0 = loop_ext[i] - vert = loop_ext[i+1] - vert1 = loop_ext[i+2] - # edge vectors - vec0 = (vert0.co - vert.co).normalized() - vec1 = (vert.co - vert1.co).normalized() - # tangent - _vec1 = -vec1 - _vec0 = -vec0 - ang = (pi - vec0.angle(vec1))/2 - normal = face_normals[loop_index][i] - tan0 = normal.cross(vec0) - tan1 = normal.cross(vec1) - tangent = (tan0 + tan1).normalized()/sin(ang)*self.thickness/2 - tangents.append(tangent) - - # calc correct direction for boundaries - mult = -1 - if is_boundary: - dir_val = 0 - for i in range(len(loop)): - surf_point = neigh_face_center[loop_index][i] - tangent = tangents[i] - vert = loop_ext[i+1] - dir_val += tangent.dot(vert.co - surf_point) - if dir_val > 0: mult = 1 - - # add vertices - for i in range(len(loop)): - vert = loop_ext[i+1] - area = 1 - new_co = vert.co + tangents[i] * mult * area - # add vertex - new_vert = bm1.verts.new(new_co) - new_loop.append(new_vert) - vert_ids.append(vert.index) - new_loop.append(new_loop[0]) - - # add faces - #materials += [materials[0]] - for i in range(len(loop)): - v0 = loop_ext[i+1] - v1 = loop_ext[i+2] - v2 = new_loop[i+1] - v3 = new_loop[i] - face_verts = [v1,v0,v3,v2] - if mult == -1: face_verts = [v0,v1,v2,v3] - new_face = bm1.faces.new(face_verts) - # Material by original edges - piece_id = 0 - new_face.select = True - new_faces.append(new_face) - wire_length.append((v0.co - v1.co).length) - max_segment = max(wire_length)/self.subdivisions - #for f,l in zip(new_faces,wire_length): - # f.material_index = min(int(l/max_segment), self.subdivisions-1) - bm1.verts.ensure_lookup_table() - push_verts += [v.index for v in loop_ext] - - # At this point topology han been build, but not yet thickened - - end_time = time.time() - print('Tissue: Polyhedra wireframe, frames in {:.4f} sec'.format(end_time-start_time)) + new_faces, outer_wireframe_faces = create_frame_faces( + bm1, + wireframe_faces, + wireframe_faces_id, + polyhedra_faces_id_neg, + thickness_dict, + outer_faces + ) + faces_to_delete = wireframe_faces+delete_faces + outer_wireframe_faces += [f for f in outer_faces if not f in faces_to_delete] + bmesh.ops.delete(bm1, geom=faces_to_delete, context='FACES') bm1.verts.ensure_lookup_table() bm1.edges.ensure_lookup_table() bm1.faces.ensure_lookup_table() bm1.verts.index_update() + wireframe_indexes = [f.index for f in new_faces] + outer_indexes = [f.index for f in outer_wireframe_faces] + edges_to_crease = [f.edges[2].index for f in new_faces] + layer_is_wireframe = bm1.faces.layers.int.new('tissue_is_wireframe') + for id in wireframe_indexes: + bm1.faces[id][layer_is_wireframe] = 1 + layer_is_outer = bm1.faces.layers.int.new('tissue_is_outer') + for id in outer_indexes: + bm1.faces[id][layer_is_outer] = 1 + if props.crease > 0 and props.dissolve != 'INNER': + crease_layer = bm1.edges.layers.float.new('crease_edge') + bm1.edges.index_update() + crease_edges = [] + for edge_index in edges_to_crease: + bm1.edges[edge_index][crease_layer] = props.crease + + tissue_time(start_time,'Generate frames',levels=1) + start_time = time.time() + ### Displace vertices ### - - circle_center = [0]*len(bm1.verts) - circle_normal = [0]*len(bm1.verts) - - smooth_corners = [True] * len(bm1.verts) corners = [[] for i in range(len(bm1.verts))] normals = [0]*len(bm1.verts) vertices = [0]*len(bm1.verts) # Define vectors direction - for f in new_faces: + for f in bm1.faces: v0 = f.verts[0] v1 = f.verts[1] id = v0.index corners[id].append((v1.co - v0.co).normalized()) + v0.normal_update() normals[id] = v0.normal.copy() vertices[id] = v0 - smooth_corners[id] = False # Displace vertices for i, vecs in enumerate(corners): if len(vecs) > 0: @@ -481,63 +637,437 @@ class polyhedra_wireframe(Operator): nor = normals[i] ang = 0 for vec in vecs: + if nor == Vector((0,0,0)): continue ang += nor.angle(vec) ang /= len(vecs) div = sin(ang) if div == 0: div = 1 - v.co += nor*self.thickness/2/div + v.co += nor*thickness_dict[tuple(v.co)]/div - end_time = time.time() - print('Tissue: Polyhedra wireframe, corners displace in {:.4f} sec'.format(end_time-start_time)) + tissue_time(start_time,'Corners displace',levels=1) + start_time = time.time() - # Removing original flat faces - - flat_faces = [bm1.faces[i] for i in flat_faces] - for f in flat_faces: - f.material_index = self.subdivisions+1 - for v in f.verts: - if smooth_corners[v.index]: - v.co += v.normal*self.thickness/2 - smooth_corners[v.index] = False - delete_faces = delete_faces + [f.index for f in original_faces] - delete_faces = list(dict.fromkeys(delete_faces)) - delete_faces = [bm1.faces[i] for i in delete_faces] - bmesh.ops.delete(bm1, geom=delete_faces, context='FACES') - - bmesh.ops.remove_doubles(bm1, verts=bm1.verts, dist=merge_dist) - bm1.faces.ensure_lookup_table() - bm1.edges.ensure_lookup_table() - bm1.verts.ensure_lookup_table() - - if self.dissolve_inners: + if props.dissolve != 'NONE': + if props.dissolve == 'INNER': dissolve_id = 2 + if props.dissolve == 'OUTER': dissolve_id = 0 bm1.edges.index_update() dissolve_edges = [] for f in bm1.faces: - e = f.edges[2] + e = f.edges[dissolve_id] if e not in dissolve_edges: dissolve_edges.append(e) - bmesh.ops.dissolve_edges(bm1, edges=dissolve_edges, use_verts=True, use_face_split=True) + bmesh.ops.dissolve_edges(bm1, edges=dissolve_edges, use_verts=True, use_face_split=False) - all_lines = [[] for e in me.edges] - all_end_points = [[] for e in me.edges] for v in bm1.verts: v.select_set(False) for f in bm1.faces: f.select_set(False) - _me = me.copy() + dissolve_verts = [v for v in bm1.verts if len(v.link_edges) < 3] + bmesh.ops.dissolve_verts(bm1, verts=dissolve_verts, use_face_split=False, use_boundary_tear=False) + + # clean meshes bm1.to_mesh(me) + if props.bool_smooth: me.shade_smooth() me.update() - new_ob = bpy.data.objects.new("Wireframe", me) - context.collection.objects.link(new_ob) - for o in context.scene.objects: o.select_set(False) - new_ob.select_set(True) - context.view_layer.objects.active = new_ob - me = _me - + old_me = ob.data + ob.data = me + mesh_name = old_me.name + bpy.data.meshes.remove(old_me) + ob.data.name = mesh_name bm1.free() - bpy.data.meshes.remove(_me) - #new_ob.location = ob.location - new_ob.matrix_world = ob.matrix_world - end_time = time.time() - print('Tissue: Polyhedra wireframe in {:.4f} sec'.format(end_time-start_time)) + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.uv.reset() + bpy.ops.object.mode_set(mode='OBJECT') + + tissue_time(start_time,'Clean mesh',levels=1) + start_time = time.time() + + tissue_time(begin_time,'Polyhedral Wireframe',levels=0) return {'FINISHED'} + +def pre_processing(bm): + delete = [e for e in bm.edges if len(e.link_faces) < 2] + while len(delete) > 0: + bmesh.ops.delete(bm, geom=delete, context='EDGES') + bm.faces.ensure_lookup_table() + bm.edges.ensure_lookup_table() + bm.verts.ensure_lookup_table() + delete = [e for e in bm.edges if len(e.link_faces) < 2] + return bm + +def get_outer_faces(bm): + bm_copy = bm.copy() + bmesh.ops.recalc_face_normals(bm_copy, faces=bm_copy.faces) + outer = [] + for f1, f2 in zip(bm.faces, bm_copy.faces): + f1.normal_update() + if f1.normal == f2.normal: + outer.append(f1) + return outer + +def create_frame_faces( + bm, + wireframe_faces, + wireframe_faces_id, + polyhedra_faces_id_neg, + thickness_dict, + outer_faces +): + new_faces = [] + for f in wireframe_faces: + f.normal_update() + all_loops = [[loop for loop in f.loops] for f in wireframe_faces] + is_outer = [f in outer_faces for f in wireframe_faces] + outer_wireframe_faces = [] + frames_verts_dict = {} + for loops_index, loops in enumerate(all_loops): + n_loop = len(loops) + frame_id = wireframe_faces_id[loops_index] + single_face_id = min(frame_id,polyhedra_faces_id_neg[frame_id]) + verts_inner = [] + loops_keys = [tuple(loop.vert.co) + tuple((single_face_id,)) for loop in loops] + if loops_keys[0] in frames_verts_dict: + verts_inner = [frames_verts_dict[key] for key in loops_keys] + else: + tangents = [] + nor = wireframe_faces[loops_index].normal + for loop in loops: + tan = loop.calc_tangent() #nor.cross(loop.calc_tangent().cross(nor)).normalized() + thickness = thickness_dict[tuple(loop.vert.co)] + tangents.append(tan/sin(loop.calc_angle()/2)*thickness) + for i in range(n_loop): + loop = loops[i] + new_co = loop.vert.co + tangents[i] + new_vert = bm.verts.new(new_co) + frames_verts_dict[loops_keys[i]] = new_vert + verts_inner.append(new_vert) + # add faces + loops += [loops[0]] + verts_inner += [verts_inner[0]] + for i in range(n_loop): + v0 = loops[i].vert + v1 = loops[i+1].vert + v2 = verts_inner[i+1] + v3 = verts_inner[i] + face_verts = [v0,v1,v2,v3] + new_face = bm.faces.new(face_verts) + new_face.select = True + new_faces.append(new_face) + if is_outer[loops_index]: + outer_wireframe_faces.append(new_face) + new_face.normal_update() + return new_faces, outer_wireframe_faces + +def polyhedral_subdivide_edges(bm, subs, proportional_segments): + if subs > 1: + if proportional_segments: + wire_length = [e.calc_length() for e in bm.edges] + all_edges = list(bm.edges) + max_segment = max(wire_length)/subs+0.00001 # prevent out_of_bounds + split_edges = [[] for i in range(subs)] + for e, l in zip(all_edges, wire_length): + split_edges[int(l//max_segment)].append(e) + for i in range(1,subs): + bmesh.ops.bisect_edges(bm, edges=split_edges[i], cuts=i) + else: + bmesh.ops.bisect_edges(bm, edges=bm.edges, cuts=subs-1) + +def get_double_faces_bmesh(bm): + double_faces = [] + for f in bm.faces: + verts0 = [v.co for v in f.verts] + verts1 = verts0.copy() + verts1.reverse() + double_faces.append(verts0) + double_faces.append(verts1) + bm1 = bmesh.new() + for verts_co in double_faces: + bm1.faces.new([bm1.verts.new(v) for v in verts_co]) + bm1.verts.ensure_lookup_table() + bm1.edges.ensure_lookup_table() + bm1.faces.ensure_lookup_table() + return bm1 + +def get_decomposed_polyhedra(bm): + polyhedra_from_facekey = {} + count = 0 + to_merge = [] + for e in bm.edges: + done = [] + # ERROR: Naked edges + link_faces = e.link_faces + n_radial_faces = len(link_faces) + if n_radial_faces < 2: + return "Naked edges are not allowed" + vert0 = e.verts[0] + vert1 = e.verts[1] + edge_vec = vert1.co - vert0.co + + for id1 in range(n_radial_faces-1): + f1 = link_faces[id1] + facekey1 = f1.index+1 + verts1 = [v.index for v in f1.verts] + v0_index = verts1.index(vert0.index) + v1_index = verts1.index(vert1.index) + + ref_loop_dir = v0_index == (v1_index+1)%len(verts1) + edge_vec1 = edge_vec if ref_loop_dir else -edge_vec + tan1 = f1.normal.cross(edge_vec1) + + # faces to compare with + faceskeys2, normals2 = get_second_faces( + link_faces, + vert0.index, + vert1.index, + ref_loop_dir, + f1 + ) + + tangents2 = [nor.cross(-edge_vec1) for nor in normals2] + + # positive side + facekey2_pos = get_closest_face( + faceskeys2, + tangents2, + tan1, + edge_vec1, + True + ) + polyhedra_from_facekey, count, to_merge = store_neighbor_faces( + facekey1, + facekey2_pos, + polyhedra_from_facekey, + count, + to_merge + ) + # negative side + facekey2_neg = get_closest_face( + faceskeys2, + tangents2, + tan1, + edge_vec1, + False + ) + polyhedra_from_facekey, count, to_merge = store_neighbor_faces( + -facekey1, + facekey2_neg, + polyhedra_from_facekey, + count, + to_merge + ) + + polyhedra = [ [] for i in range(count)] + unique_index = get_unique_polyhedra_index(count, to_merge) + for key, val in polyhedra_from_facekey.items(): + polyhedra[unique_index[val]].append(key) + polyhedra = list(set(tuple(i) for i in polyhedra if i)) + polyhedra = remove_double_faces_from_polyhedra(polyhedra) + return polyhedra + +def remove_double_faces_from_polyhedra(polyhedra): + new_polyhedra = [] + for polyhedron in polyhedra: + new_polyhedron = [key for key in polyhedron if not -key in polyhedron] + new_polyhedra.append(new_polyhedron) + return new_polyhedra + +def get_unique_polyhedra_index(count, to_merge): + out = list(range(count)) + keep_going = True + while keep_going: + keep_going = False + for pair in to_merge: + if out[pair[1]] != out[pair[0]]: + out[pair[0]] = out[pair[1]] = min(out[pair[0]], out[pair[1]]) + keep_going = True + return out + +def get_closest_face(faces, tangents, ref_vector, axis, is_positive): + facekey = None + min_angle = 1000000 + for fk, tangent in zip(faces, tangents): + rot_axis = -axis if is_positive else axis + angle = round_angle_with_axis(ref_vector, tangent, rot_axis) + if angle < min_angle: + facekey = fk + min_angle = angle + return facekey if is_positive else -facekey + +def get_second_faces(face_list, edge_v0, edge_v1, reference_loop_dir, self): + nFaces = len(face_list)-1 + facekeys = [None]*nFaces + normals = [None]*nFaces + count = 0 + for face in face_list: + if(face == self): continue + verts = [v.index for v in face.verts] + v0_index = verts.index(edge_v0) + v1_index = verts.index(edge_v1) + loop_dir = v0_index == (v1_index+1)%len(verts) + if reference_loop_dir != loop_dir: + facekeys[count] = face.index+1 + normals[count] = face.normal + else: + facekeys[count] = -(face.index+1) + normals[count] = -face.normal + count+=1 + return facekeys, normals + +def store_neighbor_faces( + key1, + key2, + polyhedra, + polyhedra_count, + to_merge +): + poly1 = polyhedra.get(key1) + poly2 = polyhedra.get(key2) + if poly1 and poly2: + if poly1 != poly2: + to_merge.append((poly1, poly2)) + elif poly1: + polyhedra[key2] = poly1 + elif poly2: + polyhedra[key1] = poly2 + else: + polyhedra[key1] = polyhedra[key2] = polyhedra_count + polyhedra_count += 1 + return polyhedra, polyhedra_count, to_merge + +def add_polyhedron(bm,source_faces): + faces_verts_key = [[tuple(v.co) for v in f.verts] for f in source_faces] + polyhedron_verts_key = [key for face_key in faces_verts_key for key in face_key] + polyhedron_verts = [bm.verts.new(co) for co in polyhedron_verts_key] + polyhedron_verts_dict = dict(zip(polyhedron_verts_key, polyhedron_verts)) + new_faces = [None]*len(faces_verts_key) + count = 0 + for verts_keys in faces_verts_key: + new_faces[count] = bm.faces.new([polyhedron_verts_dict.get(key) for key in verts_keys]) + count+=1 + + bm.faces.ensure_lookup_table() + bm.faces.index_update() + return new_faces + +def combine_polyhedra_faces(bm,polyhedra): + new_bm = bmesh.new() + polyhedra_faces_id = [None]*len(polyhedra) + all_faces_dict = {} + #polyhedra_faces_pos = {} + polyhedra_faces_id_neg = {} + vertices_key = [tuple(v.co) for v in bm.verts] + count = 0 + for p in polyhedra: + faces_id = [(f-1)*2 if f > 0 else (-f-1)*2+1 for f in p] + faces_id_neg = [(-f-1)*2 if f < 0 else (f-1)*2+1 for f in p] + new_faces = add_polyhedron(new_bm,[bm.faces[f_id] for f_id in faces_id]) + faces_dict = {} + for i in range(len(new_faces)): + face = new_faces[i] + id = faces_id[i] + id_neg = faces_id_neg[i] + polyhedra_faces_id_neg[id] = id_neg + all_faces_dict[id] = face + polyhedra_faces_id[count] = faces_id + count+=1 + return new_bm, all_faces_dict, polyhedra_faces_id, polyhedra_faces_id_neg + +class TISSUE_PT_polyhedra_object(Panel): + bl_space_type = 'PROPERTIES' + bl_region_type = 'WINDOW' + bl_context = "data" + bl_label = "Tissue Polyhedra" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + try: + ob = context.object + return ob.type == 'MESH' and ob.tissue.tissue_type == 'POLYHEDRA' + except: return False + + def draw(self, context): + ob = context.object + props = ob.tissue_polyhedra + tissue_props = ob.tissue + + bool_polyhedra = tissue_props.tissue_type == 'POLYHEDRA' + layout = self.layout + if not bool_polyhedra: + layout.label(text="The selected object is not a Polyhedral object", + icon='INFO') + else: + if props.error_message != "": + layout.label(text=props.error_message, + icon='ERROR') + col = layout.column(align=True) + row = col.row(align=True) + + #set_tessellate_handler(self,context) + row.operator("object.tissue_update_tessellate_deps", icon='FILE_REFRESH', text='Refresh') #### + lock_icon = 'LOCKED' if tissue_props.bool_lock else 'UNLOCKED' + #lock_icon = 'PINNED' if props.bool_lock else 'UNPINNED' + deps_icon = 'LINKED' if tissue_props.bool_dependencies else 'UNLINKED' + row.prop(tissue_props, "bool_dependencies", text="", icon=deps_icon) + row.prop(tissue_props, "bool_lock", text="", icon=lock_icon) + col2 = row.column(align=True) + col2.prop(tissue_props, "bool_run", text="",icon='TIME') + col2.enabled = not tissue_props.bool_lock + col2 = row.column(align=True) + col2.operator("mesh.tissue_remove", text="", icon='X') + #layout.use_property_split = True + #layout.use_property_decorate = False # No animation. + col = layout.column(align=True) + col.label(text='Polyhedral Mode:') + col.prop(props, 'mode', text='') + col.separator() + col.label(text='Source object:') + row = col.row(align=True) + row.prop_search(props, "object", context.scene, "objects", text='') + col2 = row.column(align=True) + col2.prop(props, "bool_modifiers", text='Use Modifiers',icon='MODIFIER') + if props.mode == 'WIREFRAME': + col.separator() + col.prop(props, 'thickness') + row = col.row(align=True) + ob0 = props.object + row.prop_search(props, 'vertex_group_thickness', + ob0, "vertex_groups", text='') + col2 = row.column(align=True) + row2 = col2.row(align=True) + row2.prop(props, "invert_vertex_group_thickness", text="", + toggle=True, icon='ARROW_LEFTRIGHT') + row2.prop(props, "vertex_group_thickness_factor") + row2.enabled = props.vertex_group_thickness in ob0.vertex_groups.keys() + col.prop(props, 'bool_smooth') + col.separator() + col.label(text='Selective Wireframe:') + col.prop(props, 'selective_wireframe', text='Mode') + col.separator() + if props.selective_wireframe == 'THICKNESS': + col.prop(props, 'thickness_threshold_correction') + elif props.selective_wireframe == 'AREA': + col.prop(props, 'area_threshold') + elif props.selective_wireframe == 'WEIGHT': + row = col.row(align=True) + row.prop_search(props, 'vertex_group_selective', + ob0, "vertex_groups", text='') + col2 = row.column(align=True) + row2 = col2.row(align=True) + row2.prop(props, "invert_vertex_group_selective", text="", + toggle=True, icon='ARROW_LEFTRIGHT') + row2.prop(props, "vertex_group_selective_threshold") + row2.enabled = props.vertex_group_selective in ob0.vertex_groups.keys() + #if props.selective_wireframe != 'NONE': + # col.prop(props, 'thicken_all') + col.separator() + col.label(text='Subdivide edges:') + row = col.row() + row.prop(props, 'segments') + row.prop(props, 'proportional_segments', text='Proportional') + col.separator() + col.label(text='Loops:') + col.prop(props, 'dissolve') + col.separator() + col.prop(props, 'crease') diff --git a/mesh_tissue/tessellate_numpy.py b/mesh_tissue/tessellate_numpy.py index 02173bce4..5c408abe6 100644 --- a/mesh_tissue/tessellate_numpy.py +++ b/mesh_tissue/tessellate_numpy.py @@ -1,6 +1,20 @@ -# SPDX-FileCopyrightText: 2017-2023 Blender Foundation +# ##### BEGIN GPL LICENSE BLOCK ##### # -# SPDX-License-Identifier: GPL-2.0-or-later +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### # ---------------------------- ADAPTIVE DUPLIFACES --------------------------- # # ------------------------------- version 0.84 ------------------------------- # @@ -47,26 +61,19 @@ from . import config def allowed_objects(): return ('MESH', 'CURVE', 'SURFACE', 'FONT', 'META') -def remove_temp_objects(): - # clean objects - for o in bpy.data.objects: - if "_tissue_tmp" in o.name: - bpy.data.objects.remove(o) - return - def tessellated(ob): - tess_props = ob.tissue_tessellate - if tess_props.generator not in list(bpy.data.objects): + props = ob.tissue_tessellate + if props.generator not in list(bpy.data.objects): return False - elif tess_props.component_mode == 'OBJECT': - return tess_props.component in list(bpy.data.objects) - elif tess_props.component_mode == 'COLLECTION': - if tess_props.component_coll in list(bpy.data.collections): - for o in list(tess_props.component_coll.objects): + elif props.component_mode == 'OBJECT': + return props.component in list(bpy.data.objects) + elif props.component_mode == 'COLLECTION': + if props.component_coll in list(bpy.data.collections): + for o in list(props.component_coll.objects): if o.type in allowed_objects(): return True else: - for mat in tess_props.generator.material_slots.keys(): + for mat in props.generator.material_slots.keys(): if mat in bpy.data.objects.keys(): if bpy.data.objects[mat].type in allowed_objects(): return True @@ -101,6 +108,10 @@ def tessellate_patch(props): vertex_group_thickness = props['vertex_group_thickness'] invert_vertex_group_thickness = props['invert_vertex_group_thickness'] vertex_group_thickness_factor = props['vertex_group_thickness_factor'] + vertex_group_frame_thickness = props['vertex_group_frame_thickness'] + invert_vertex_group_frame_thickness = props['invert_vertex_group_frame_thickness'] + vertex_group_frame_thickness_factor = props['vertex_group_frame_thickness_factor'] + face_weight_frame = props['face_weight_frame'] vertex_group_distribution = props['vertex_group_distribution'] invert_vertex_group_distribution = props['invert_vertex_group_distribution'] vertex_group_distribution_factor = props['vertex_group_distribution_factor'] @@ -128,6 +139,7 @@ def tessellate_patch(props): vertex_group_scale_normals = props['vertex_group_scale_normals'] invert_vertex_group_scale_normals = props['invert_vertex_group_scale_normals'] boundary_mat_offset = props['boundary_mat_offset'] + preserve_quads = props['preserve_quads'] _props = props.copy() @@ -154,7 +166,7 @@ def tessellate_patch(props): # Target mesh used for normals if normals_mode in ('SHAPEKEYS', 'OBJECT'): if fill_mode == 'PATCH': - ob0_sk = convert_object_to_mesh(target, True, True) + ob0_sk = convert_object_to_mesh(target, True, rotation_mode!='UV') else: use_modifiers = gen_modifiers if normals_mode == 'SHAPEKEYS' and not gen_modifiers: @@ -175,7 +187,7 @@ def tessellate_patch(props): for sk in _ob0.data.shape_keys.key_blocks: sk.value = 0 # Base mesh if fill_mode == 'PATCH': - ob0 = convert_object_to_mesh(_ob0) + ob0 = convert_object_to_mesh(_ob0, True, True, rotation_mode!='UV') if boundary_mat_offset != 0: bm=bmesh.new() @@ -230,7 +242,7 @@ def tessellate_patch(props): first_component = True for com in components: if com: - com = convert_object_to_mesh(com, com_modifiers, False) + com = convert_object_to_mesh(com, com_modifiers, False, False) com, com_area = tessellate_prepare_component(com, props) com_verts = get_vertices_numpy(com.data) bpy.data.objects.remove(com) @@ -319,7 +331,10 @@ def tessellate_patch(props): break else: before.modifiers.remove(m) - before_subsurf = simple_to_mesh(before) + if rotation_mode!='UV': + before_subsurf = simple_to_mesh_mirror(before) + else: + before_subsurf = simple_to_mesh(before) if boundary_mat_offset != 0: bm=bmesh.new() @@ -360,6 +375,7 @@ def tessellate_patch(props): if not vertex_group_rotation in ob0.vertex_groups.keys(): rotation_mode = 'DEFAULT' + bool_vertex_group = bool_vertex_group and len(ob0.vertex_groups.keys()) > 0 bool_weight_smooth_normals = vertex_group_smooth_normals in ob0.vertex_groups.keys() bool_weight_thickness = vertex_group_thickness in ob0.vertex_groups.keys() bool_weight_distribution = vertex_group_distribution in ob0.vertex_groups.keys() @@ -422,7 +438,10 @@ def tessellate_patch(props): v01 = all_verts[:,0,-1] v10 = all_verts[:,-1,0] v11 = all_verts[:,-1,-1] - face_weight = (weight_distribution[v00] + weight_distribution[v01] + weight_distribution[v10] + weight_distribution[v11])/4 * len(components) + # Average method + face_weight = np.average(weight_distribution[all_verts.reshape((all_verts.shape[0], -1))], axis=1) * len(components) + # Corners Method + #face_weight = (weight_distribution[v00] + weight_distribution[v01] + weight_distribution[v10] + weight_distribution[v11])/4 * len(components) if fill_mode == 'FAN' and consistent_wedges: for i in range(n_original_faces): face_mask = faces_id == i @@ -578,7 +597,7 @@ def tessellate_patch(props): for mat_id, _ob1 in enumerate(components): if _ob1 == None: continue - # Set original values (for next components) + # Set original values (for next commponents) com_modifiers = _com_modifiers bool_shapekeys = _bool_shapekeys @@ -620,7 +639,7 @@ def tessellate_patch(props): mod_visibility.append(m.show_viewport) m.show_viewport = False com_modifiers = True - ob1 = convert_object_to_mesh(_ob1, com_modifiers, False) + ob1 = convert_object_to_mesh(_ob1, com_modifiers, False, False) ob1, com_area = tessellate_prepare_component(ob1, props) ob1.name = "_tissue_tmp_ob1" @@ -753,6 +772,11 @@ def tessellate_patch(props): if vertex_group_thickness in ob0.vertex_groups.keys(): vg_id = ob0.vertex_groups[vertex_group_thickness].index weight_thickness = store_weight[vg_id,:,:] + if invert_vertex_group_thickness: + weight_thickness = 1-weight_thickness + fact = vertex_group_thickness_factor + if fact > 0: + weight_thickness = weight_thickness*(1-fact) + fact if vertex_group_smooth_normals in ob0.vertex_groups.keys(): vg_id = ob0.vertex_groups[vertex_group_smooth_normals].index weight_smooth_normals = store_weight[vg_id,:,:] @@ -802,6 +826,7 @@ def tessellate_patch(props): n2 = n2[masked_faces][:,None,:] else: if normals_mode == 'CUSTOM': + me0.calc_normals_split() normals_split = [0]*len(me0.loops)*3 vertex_indexes = [0]*len(me0.loops) me0.loops.foreach_get('normal', normals_split) @@ -863,7 +888,7 @@ def tessellate_patch(props): # thickness variation mean_area = [] a2 = None - if scale_mode == 'ADAPTIVE' and normals_mode not in ('SHAPEKEYS','OBJECT'): + if scale_mode == 'ADAPTIVE':# and normals_mode not in ('SHAPEKEYS','OBJECT'): #com_area = bb[0]*bb[1] if mode != 'BOUNDS' or com_area == 0: com_area = 1 if normals_mode == 'FACES': @@ -878,6 +903,9 @@ def tessellate_patch(props): verts_area = verts_area[masked_verts] verts_area = verts_area.mean(axis=(1,2)).reshape((n_patches,1,1)) a2 = verts_area + if normals_mode in ('SHAPEKEYS','OBJECT'): + verts_area = np.ones(n_verts0) + verts_area = verts_area[masked_verts] else: areas = calc_verts_area_bmesh(me0) verts_area = np.sqrt(areas*patch_faces/com_area) @@ -900,7 +928,6 @@ def tessellate_patch(props): np_v = np.clip(sk_uv_quads[:,:,1], 0, sides).astype('int')[:,None,:] np_u1 = np.clip(sk_uv_quads[:,:,2], 0, sides).astype('int')[:,None,:] np_v1 = np.clip(sk_uv_quads[:,:,3], 0, sides).astype('int')[:,None,:] - print(np_v1) # face corners for each vertex (n_patches, n_sk, n_verts1, 4) v00 = verts_xyz[:,np_u,np_v].reshape((n_patches,n_sk,n_verts1,3))#.swapaxes(0,1) v10 = verts_xyz[:,np_u1,np_v].reshape((n_patches,n_sk,n_verts1,3))#.swapaxes(0,1) @@ -973,12 +1000,18 @@ def tessellate_patch(props): except: pass tt = tissue_time(tt, "Inject coordinates", levels=2) + # Vertex Group for vg in ob1.vertex_groups: vg_name = vg.name if vg_name in ob0.vertex_groups.keys(): - vg_name = '_{}_'.format(vg_name) - new_patch.vertex_groups.new(name=vg_name) + if bool_vertex_group: + vg_name = '{} (Component)'.format(vg_name) + else: + vg_name = vg_name + #new_patch.vertex_groups.new(name=vg_name) + new_patch.vertex_groups[vg.name].name = vg_name + if bool_vertex_group: new_groups = [] for vg in ob0.vertex_groups: @@ -1253,7 +1286,7 @@ class tissue_tessellate(Operator): min=1, soft_max=5, description="Automatically repeat the Tessellation using the " - + "generated geometry as new base object.\nUseful for " + + "generated geometry as new base object.\nUsefull for " + "for branching systems. Dangerous!" ) bool_combine : BoolProperty( @@ -1350,13 +1383,21 @@ class tissue_tessellate(Operator): name="Frame Thickness", default=0.2, min=0, - soft_max=2, + soft_max=1, description="Frame Thickness" ) + frame_boundary_thickness : FloatProperty( + name="Frame Boundary Thickness", + default=0, + min=0, + soft_max=1, + description="Frame Boundary Thickness (if zero, it uses the Frame Thickness instead)" + ) frame_mode : EnumProperty( items=( ('CONSTANT', 'Constant', 'Even thickness'), - ('RELATIVE', 'Relative', 'Frame offset depends on face areas')), + ('RELATIVE', 'Relative', 'Frame offset depends on face areas'), + ('CENTER', 'Center', 'Toward the center of the face (uses Incenter for Triangles)')), default='CONSTANT', name="Offset" ) @@ -1387,7 +1428,7 @@ class tissue_tessellate(Operator): ) use_origin_offset : BoolProperty( name="Align to Origins", - default=False, + default=True, description="Define offset according to components origin and local Z coordinate" ) @@ -1407,6 +1448,27 @@ class tissue_tessellate(Operator): description="Thickness factor to use for zero vertex group influence" ) + vertex_group_frame_thickness : StringProperty( + name="Frame thickness weight", default='', + description="Vertex Group used for frame thickness" + ) + invert_vertex_group_frame_thickness : BoolProperty( + name="Invert", default=False, + description="Invert the vertex group influence" + ) + vertex_group_frame_thickness_factor : FloatProperty( + name="Factor", + default=0, + min=0, + max=1, + description="Thickness factor to use for zero vertex group influence" + ) + face_weight_frame : BoolProperty( + name="Face Weight", + default=True, + description="Uniform weight for individual faces" + ) + vertex_group_distribution : StringProperty( name="Distribution weight", default='', description="Vertex Group used for gradient distribution" @@ -1520,6 +1582,11 @@ class tissue_tessellate(Operator): name="Automatic Rotation", default=False, description="Automatically rotate the boundary faces" ) + preserve_quads : BoolProperty( + name="Preserve Quads", + default=False, + description="Quad faces are tessellated using QUAD mode" + ) working_on = "" @@ -1568,7 +1635,7 @@ class tissue_tessellate(Operator): if no_components: layout = self.layout layout.label(icon='OUTLINER_COLLECTION', text='Components from Active Collection') - layout.label(icon='INFO', text="The Active Collection does not contain any Mesh,") + layout.label(icon='INFO', text="The Active Collection does not containt any Mesh,") layout.label(text="Curve, Surface, Meta or Text object.") return elif self.component_mode == 'MATERIALS': @@ -1646,9 +1713,21 @@ class tissue_tessellate(Operator): if self.fill_mode == 'FRAME': col.separator() col.label(text="Frame Settings:") + col.prop(self, "preserve_quads", expand=True) + col.separator() row = col.row(align=True) row.prop(self, "frame_mode", expand=True) col.prop(self, "frame_thickness", text='Thickness', icon='NONE') + # Vertex Group Frame Thickness + row = col.row(align=True) + row.prop_search(self, 'vertex_group_frame_thickness', + ob0, "vertex_groups", text='') + col2 = row.column(align=True) + row2 = col2.row(align=True) + row2.prop(self, "invert_vertex_group_frame_thickness", text="", + toggle=True, icon='ARROW_LEFTRIGHT') + row2.prop(self, "vertex_group_frame_thickness_factor") + row2.enabled = self.vertex_group_frame_thickness in ob0.vertex_groups.keys() col.separator() row = col.row(align=True) row.prop(self, "fill_frame", icon='NONE') @@ -1661,6 +1740,10 @@ class tissue_tessellate(Operator): col2 = row.column(align=True) col2.prop(self, "boundary_mat_offset", icon='NONE') col2.enabled = self.frame_boundary and show_frame_mat + if self.frame_boundary: + col.separator() + row = col.row(align=True) + col.prop(self, "frame_boundary_thickness", icon='NONE') if self.rotation_mode == 'UV': uv_error = False @@ -1742,7 +1825,7 @@ class tissue_tessellate(Operator): bool_update = False if context.object == ob0: auto_layer_collection() - new_ob = convert_object_to_mesh(ob0,False,False) + new_ob = convert_object_to_mesh(ob0, False, False, self.rotation_mode!='UV') #/// new_ob.data.name = self.object_name new_ob.name = self.object_name else: @@ -1781,7 +1864,7 @@ class tissue_update_tessellate_deps(Operator): bl_idname = "object.tissue_update_tessellate_deps" bl_label = "Tissue Refresh" bl_description = ("Fast update the tessellated mesh according to base and " - "component changes") + "component changes.") bl_options = {'REGISTER', 'UNDO'} go = False @@ -1809,20 +1892,30 @@ class tissue_update_tessellate_deps(Operator): update_objects = list(reversed(update_dependencies(ob, update_objects))) #update_objects = list(reversed(update_dependencies(ob, [ob]))) for o in update_objects: - override = { - 'object': o, - 'selected_objects' : [o] - } - if o.type == 'MESH': - try: - bpy.ops.object.tissue_update_tessellate(override) - except: - self.report({'ERROR'}, "Can't Tessellate :-(") - else: - try: - bpy.ops.object.tissue_convert_to_curve_update(override) - except: - self.report({'ERROR'}, "Can't compute Curve :-(") + override = {'object': o, 'selected_objects': [o]} + with context.temp_override(**override): + if o.type == 'MESH': + if o.tissue.tissue_type == 'TESSELLATE': + try: + bpy.ops.object.tissue_update_tessellate() + except: + self.report({'ERROR'}, "Can't Tessellate :-(") + if o.tissue.tissue_type == 'POLYHEDRA': + try: + bpy.ops.object.tissue_update_polyhedra() + except: + self.report({'ERROR'}, "Can't compute Polyhedra :-(") + else: + if o.tissue.tissue_type == 'TO_CURVE': + try: + bpy.ops.object.tissue_update_convert_to_curve() + except: + self.report({'ERROR'}, "Can't compute Curve :-(") + if o.tissue.tissue_type == 'CONTOUR_CURVES': + try: + bpy.ops.object.tissue_update_contour_curves() + except: + self.report({'ERROR'}, "Can't compute Contour Curves :-(") context.view_layer.objects.active = active_ob for o in context.view_layer.objects: @@ -1849,13 +1942,11 @@ class tissue_update_tessellate(Operator): return False def execute(self, context): - - tissue_time(None,'Tissue: Tessellating...', levels=0) + ob = context.object + tissue_time(None,'Tissue: Tessellate of "{}"...'.format(ob.name), levels=0) start_time = time.time() - - ob = context.object - tess_props = props_to_dict(ob) + props = props_to_dict(ob) if not self.go: generator = ob.tissue_tessellate.generator component = ob.tissue_tessellate.component @@ -1897,6 +1988,7 @@ class tissue_update_tessellate(Operator): bridge_edges_crease = ob.tissue_tessellate.bridge_edges_crease bridge_smoothness = ob.tissue_tessellate.bridge_smoothness frame_thickness = ob.tissue_tessellate.frame_thickness + frame_boundary_thickness = ob.tissue_tessellate.frame_boundary_thickness frame_mode = ob.tissue_tessellate.frame_mode frame_boundary = ob.tissue_tessellate.frame_boundary fill_frame = ob.tissue_tessellate.fill_frame @@ -1910,6 +2002,10 @@ class tissue_update_tessellate(Operator): vertex_group_thickness = ob.tissue_tessellate.vertex_group_thickness invert_vertex_group_thickness = ob.tissue_tessellate.invert_vertex_group_thickness vertex_group_thickness_factor = ob.tissue_tessellate.vertex_group_thickness_factor + vertex_group_frame_thickness = ob.tissue_tessellate.vertex_group_frame_thickness + invert_vertex_group_frame_thickness = ob.tissue_tessellate.invert_vertex_group_frame_thickness + vertex_group_frame_thickness_factor = ob.tissue_tessellate.vertex_group_frame_thickness_factor + face_weight_frame = ob.tissue_tessellate.face_weight_frame vertex_group_distribution = ob.tissue_tessellate.vertex_group_distribution invert_vertex_group_distribution = ob.tissue_tessellate.invert_vertex_group_distribution vertex_group_distribution_factor = ob.tissue_tessellate.vertex_group_distribution_factor @@ -1941,7 +2037,7 @@ class tissue_update_tessellate(Operator): # reset messages ob.tissue_tessellate.warning_message_merge = '' - tess_props = props_to_dict(ob) + props = props_to_dict(ob) # Solve Local View issues local_spaces = [] @@ -2000,7 +2096,7 @@ class tissue_update_tessellate(Operator): components.append(ob1) if ob0.type == 'META': - base_ob = convert_object_to_mesh(ob0, False, True) + base_ob = convert_object_to_mesh(ob0, False, True, props['rotation_mode']!='UV') else: base_ob = ob0.copy() base_ob.data = ob0.data @@ -2018,7 +2114,8 @@ class tissue_update_tessellate(Operator): for mod in base_ob.modifiers: if mod.type == 'CLOTH': override = {'scene': scene, 'active_object': base_ob, 'point_cache': mod.point_cache} - bpy.ops.ptcache.bake(override, bake=True) + with context.temp_override(**override): + bpy.ops.ptcache.bake(bake=True) break base_ob.modifiers.update() @@ -2047,11 +2144,11 @@ class tissue_update_tessellate(Operator): ob.data.clear_geometry() # Faster with heavy geometries (from previous tessellations) for iter in range(iterations): - tess_props['generator'] = base_ob + props['generator'] = base_ob if iter > 0 and len(iter_objects) == 0: break if iter > 0 and normals_mode in ('SHAPEKEYS','OBJECT'): - tess_props['normals_mode'] = 'VERTS' + props['normals_mode'] = 'VERTS' same_iteration = [] matched_materials = [] @@ -2069,7 +2166,7 @@ class tissue_update_tessellate(Operator): components.append(None) else: components.append(None) - tess_props['component'] = components + props['component'] = components # patch subdivisions for additional iterations if iter > 0 and fill_mode == 'PATCH': temp_mod = base_ob.modifiers.new('Tissue_Subsurf', type='SUBSURF') @@ -2078,9 +2175,8 @@ class tissue_update_tessellate(Operator): # patch tessellation tissue_time(None,"Tessellate iteration...",levels=1) tt = time.time() - same_iteration = tessellate_patch(tess_props) + same_iteration = tessellate_patch(props) tissue_time(tt, "Tessellate iteration",levels=1) - tt = time.time() # if empty or error, continue @@ -2121,7 +2217,11 @@ class tissue_update_tessellate(Operator): # remove faces from last mesh bm = bmesh.new() if (fill_mode == 'PATCH' or gen_modifiers) and iter == 0: - last_mesh = simple_to_mesh(base_ob)#(ob0) + + if props['rotation_mode']!='UV': + last_mesh = simple_to_mesh_mirror(base_ob)#(ob0) + else: + last_mesh = simple_to_mesh(base_ob)#(ob0) else: last_mesh = iter_objects[-1].data.copy() bm.from_mesh(last_mesh) @@ -2152,7 +2252,7 @@ class tissue_update_tessellate(Operator): bpy.data.objects.remove(iter_objects[-1]) iter_objects = iter_objects[:-1] # set new base object for next iteration - base_ob = convert_object_to_mesh(new_ob,True,True) + base_ob = convert_object_to_mesh(new_ob,True,True, props['rotation_mode']!='UV') if iter < iterations-1: new_ob.data = base_ob.data # store new iteration and set transformations iter_objects.append(new_ob) @@ -2208,8 +2308,8 @@ class tissue_update_tessellate(Operator): for o in iter_objects: try: bpy.data.objects.remove(o) except: pass - try: bpy.data.meshes.remove(data1) - except: pass + #try: bpy.data.meshes.remove(data1) + #except: pass context.view_layer.objects.active = ob ob.select_set(True) message = errors[new_ob] @@ -2221,7 +2321,7 @@ class tissue_update_tessellate(Operator): # update data and preserve name if ob.type != 'MESH': loc, matr = ob.location, ob.matrix_world - ob = convert_object_to_mesh(ob,False,True) + ob = convert_object_to_mesh(ob,False,True,props['rotation_mode']!='UV') ob.location, ob.matrix_world = loc, matr data_name = ob.data.name old_data = ob.data @@ -2238,10 +2338,12 @@ class tissue_update_tessellate(Operator): ob.data.name = data_name bpy.data.meshes.remove(old_data) + ''' # copy vertex group for vg in new_ob.vertex_groups: if not vg.name in ob.vertex_groups.keys(): ob.vertex_groups.new(name=vg.name) + ''' selected_objects = [o for o in context.selected_objects] for o in selected_objects: o.select_set(False) @@ -2254,10 +2356,7 @@ class tissue_update_tessellate(Operator): use_bmesh = not (bool_shapekeys and fill_mode == 'PATCH' and component_mode != 'OBJECT') merge_components(new_ob, ob.tissue_tessellate, use_bmesh) - if bool_smooth: - bpy.ops.object.shade_smooth() - else: - bpy.ops.object.shade_flat() + if bool_smooth: bpy.ops.object.shade_smooth() for mesh in bpy.data.meshes: if not mesh.users: bpy.data.meshes.remove(mesh) @@ -2288,7 +2387,7 @@ class tissue_update_tessellate(Operator): tissue_time(tt, "Closing tessellation", levels=1) - tissue_time(start_time,'Tessellation of "{}"'.format(ob.name),levels=0) + tissue_time(start_time,'Tessellate',levels=0) return {'FINISHED'} def check(self, context): @@ -2320,9 +2419,17 @@ class TISSUE_PT_tessellate(Panel): col.operator("object.dual_mesh_tessellated", text='Dual Mesh', icon='SEQ_CHROMA_SCOPE') col.separator() + op = col.operator("object.polyhedral_wireframe", icon='MESH_CUBE', text='Polyhedral Decomposition') + op.mode = 'POLYHEDRA' + op = col.operator("object.polyhedral_wireframe", icon='MOD_WIREFRAME', text='Polyhedral Wireframe') + op.mode = 'WIREFRAME' + col.separator() + #col.label(text="Curves:") col.operator("object.tissue_convert_to_curve", icon='OUTLINER_OB_CURVE', text="Convert to Curve") - #row.operator("object.tissue_convert_to_curve_update", icon='FILE_REFRESH', text='') + col.operator("object.tissue_weight_contour_curves_pattern", icon='FORCE_TURBULENCE', text="Contour Curves") + + #row.operator("object.tissue_update_convert_to_curve", icon='FILE_REFRESH', text='') col.separator() col.operator("object.tissue_update_tessellate_deps", icon='FILE_REFRESH', text='Refresh') ##### @@ -2337,7 +2444,6 @@ class TISSUE_PT_tessellate(Panel): col.separator() col.label(text="Other:") col.operator("object.dual_mesh", icon='SEQ_CHROMA_SCOPE') - col.operator("object.polyhedra_wireframe", icon='MOD_WIREFRAME', text='Polyhedra Wireframe') col.operator("object.lattice_along_surface", icon="OUTLINER_OB_LATTICE") act = context.object @@ -2389,7 +2495,7 @@ class TISSUE_PT_tessellate_object(Panel): col = layout.column(align=True) row = col.row(align=True) - set_tessellate_handler(self,context) + set_tissue_handler(self,context) ###### set_animatable_fix_handler(self,context) row.operator("object.tissue_update_tessellate_deps", icon='FILE_REFRESH', text='Refresh') #### lock_icon = 'LOCKED' if tissue_props.bool_lock else 'UNLOCKED' @@ -2398,8 +2504,10 @@ class TISSUE_PT_tessellate_object(Panel): row.prop(tissue_props, "bool_dependencies", text="", icon=deps_icon) row.prop(tissue_props, "bool_lock", text="", icon=lock_icon) col2 = row.column(align=True) - col2.prop(tissue_props, "bool_run", text="",icon='TIME') + col2.prop(tissue_props, "bool_run", text="", icon='TIME') col2.enabled = not tissue_props.bool_lock + col2 = row.column(align=True) + col2.operator("mesh.tissue_remove", text="", icon='X') #layout.use_property_split = True #layout.use_property_decorate = False # No animation. col = layout.column(align=True) @@ -2408,14 +2516,6 @@ class TISSUE_PT_tessellate_object(Panel): row.prop_search(props, "generator", context.scene, "objects") col2 = row.column(align=True) col2.prop(props, "gen_modifiers", text='Use Modifiers',icon='MODIFIER') - ''' - try: - if not (props.generator.modifiers or props.generator.data.shape_keys): - col2.enabled = False - except: - col2.enabled = False - ''' - #col.separator() layout.use_property_split = False # Fill @@ -2446,7 +2546,7 @@ class TISSUE_PT_tessellate_frame(Panel): try: bool_frame = context.object.tissue_tessellate.fill_mode == 'FRAME' bool_tessellated = context.object.tissue_tessellate.generator != None - return context.object.type == 'MESH' and bool_frame and bool_tessellated + return context.object.type == 'MESH' and bool_frame and bool_tessellated and context.object.tissue.tissue_type == 'TESSELLATE' except: return False @@ -2455,10 +2555,28 @@ class TISSUE_PT_tessellate_frame(Panel): props = ob.tissue_tessellate layout = self.layout col = layout.column(align=True) + col.prop(props, "preserve_quads") + col.separator() row = col.row(align=True) row.prop(props, "frame_mode", expand=True) row = col.row(align=True) row.prop(props, "frame_thickness", icon='NONE', expand=True) + + # Vertex Group Frame Thickness + row = col.row(align=True) + ob0 = props.generator + row.prop_search(props, 'vertex_group_frame_thickness', + ob0, "vertex_groups", text='') + col2 = row.column(align=True) + row2 = col2.row(align=True) + row2.prop(props, "invert_vertex_group_frame_thickness", text="", + toggle=True, icon='ARROW_LEFTRIGHT') + row2.prop(props, "vertex_group_frame_thickness_factor") + row2.enabled = props.vertex_group_frame_thickness in ob0.vertex_groups.keys() + row = col.row(align=True) + row.prop(props, "face_weight_frame") + row.enabled = props.vertex_group_frame_thickness in ob0.vertex_groups.keys() + col.separator() row = col.row(align=True) row.prop(props, "fill_frame", icon='NONE') @@ -2471,6 +2589,10 @@ class TISSUE_PT_tessellate_frame(Panel): col2 = row.column(align=True) col2.prop(props, "boundary_mat_offset", icon='NONE') col2.enabled = props.frame_boundary and show_frame_mat + if props.frame_boundary: + col.separator() + row = col.row(align=True) + col.prop(props, "frame_boundary_thickness", icon='NONE') class TISSUE_PT_tessellate_component(Panel): @@ -2507,13 +2629,6 @@ class TISSUE_PT_tessellate_component(Panel): row.prop_search(props, "component", context.scene, "objects") col2 = row.column(align=True) col2.prop(props, "com_modifiers", text='Use Modifiers',icon='MODIFIER') - ''' - try: - if not (props.component.modifiers or props.component.data.shape_keys): - col2.enabled = False - except: - col2.enabled = False - ''' elif props.component_mode == 'COLLECTION': col.separator() @@ -2653,6 +2768,7 @@ class TISSUE_PT_tessellate_rotation(Panel): row.separator() row.separator() row.separator() + ob0 = props['generator'] row.prop_search(props, 'vertex_group_rotation', ob0, "vertex_groups", text='Vertex Group') col2 = row.column(align=True) @@ -2820,6 +2936,8 @@ class TISSUE_PT_tessellate_options(Panel): def draw(self, context): ob = context.object props = ob.tissue_tessellate + ob0 = props.generator + ob1 = props.component layout = self.layout layout.use_property_split = True layout.use_property_decorate = False # No animation. @@ -3026,6 +3144,26 @@ class TISSUE_PT_tessellate_iterations(Panel): slider=False, toggle=False, icon_only=False, event=False, full_event=False, emboss=True, index=-1) +class tissue_remove(Operator): + bl_idname = "mesh.tissue_remove" + bl_label = "Tissue Remove" + bl_description = "Remove Tissue properties" + bl_options = {'REGISTER', 'UNDO'} + + def invoke(self, context, event): + return context.window_manager.invoke_props_dialog(self) + + def draw(self, context): + ob = context.object + layout = self.layout + col = layout.column(align=True) + col.label(text='This is a destructive operation! Are you sure?', icon='ERROR') + + def execute(self, context): + ob = context.active_object + ob.tissue.tissue_type = 'NONE' + return {'FINISHED'} + class tissue_rotate_face_right(Operator): bl_idname = "mesh.tissue_rotate_face_right" bl_label = "Tissue Rotate Faces Right" @@ -3190,9 +3328,8 @@ class tissue_rotate_face_left(Operator): return {'FINISHED'} - -def convert_to_frame(ob, props, use_modifiers): - new_ob = convert_object_to_mesh(ob, use_modifiers, True) +def convert_to_frame(ob, props, use_modifiers=True): + new_ob = convert_object_to_mesh(ob, use_modifiers, True,props['rotation_mode']!='UV') # create bmesh bm = bmesh.new() @@ -3202,14 +3339,18 @@ def convert_to_frame(ob, props, use_modifiers): bm.faces.ensure_lookup_table() if props['bool_selection']: original_faces = [f for f in bm.faces if f.select] + elif props['preserve_quads']: + original_faces = [f for f in bm.faces if len(f.verts)!=4] else: original_faces = list(bm.faces) + # detect edge loops loops = [] boundaries_mat = [] neigh_face_center = [] face_normals = [] + # append boundary loops if props['frame_boundary']: #selected_edges = [e for e in bm.edges if e.select] @@ -3223,7 +3364,7 @@ def convert_to_frame(ob, props, use_modifiers): face_center = [face.calc_center_median()] loop_normals = [face.normal] selected_edges = selected_edges[1:] - if props['bool_vertex_group']: + if props['bool_vertex_group'] or True: n_verts = len(new_ob.data.vertices) base_vg = [get_weight(vg,n_verts) for vg in new_ob.vertex_groups] while True: @@ -3265,7 +3406,7 @@ def convert_to_frame(ob, props, use_modifiers): vert_ids = [] # append regular faces - for f in original_faces:#bm.faces: + for f in original_faces: loop = list(f.verts) loops.append(loop) boundaries_mat.append([f.material_index for v in loop]) @@ -3282,6 +3423,21 @@ def convert_to_frame(ob, props, use_modifiers): else: area = 0 verts_area.append(area) + bool_weight_thick = props['vertex_group_frame_thickness'] in new_ob.vertex_groups.keys() + if bool_weight_thick: + vg = new_ob.vertex_groups[props['vertex_group_frame_thickness']] + weight_frame = get_weight_numpy(vg, len(bm.verts)) + if props['invert_vertex_group_frame_thickness']: + weight_frame = 1-weight_frame + fact = props['vertex_group_frame_thickness_factor'] + if fact > 0: + weight_frame = weight_frame*(1-fact) + fact + else: + weight_frame = np.ones((len(bm.verts))) + + centers_neigh = [] + centers_id = [] + verts_count = len(bm.verts)-1 for loop_index, loop in enumerate(loops): is_boundary = loop_index < len(neigh_face_center) materials = boundaries_mat[loop_index] @@ -3305,7 +3461,11 @@ def convert_to_frame(ob, props, use_modifiers): normal = face_normals[loop_index][i] tan0 = normal.cross(vec0) tan1 = normal.cross(vec1) - tangent = (tan0 + tan1).normalized()/sin(ang)*props['frame_thickness'] + if is_boundary and props['frame_boundary_thickness'] != 0: + thickness = props['frame_boundary_thickness'] + else: + thickness = props['frame_thickness'] + tangent = (tan0 + tan1).normalized()/sin(ang)*thickness tangents.append(tangent) # calc correct direction for boundaries @@ -3319,16 +3479,54 @@ def convert_to_frame(ob, props, use_modifiers): dir_val += tangent.dot(vert.co - surf_point) if dir_val > 0: mult = 1 + if props['frame_mode'] == 'CENTER': + # uses incenter for triangular loops and average point for generic polygons + polygon_loop = list(dict.fromkeys(loop_ext)) + if len(polygon_loop) == 3: + loop_center = incenter([v.co for v in polygon_loop]) + else: + loop_center = Vector((0,0,0)) + for v in polygon_loop: + loop_center += v.co + loop_center /= len(polygon_loop) + # add vertices + central_vertex = None + skip_vertex = False for i in range(len(loop)): vert = loop_ext[i+1] if props['frame_mode'] == 'RELATIVE': area = verts_area[vert.index] else: area = 1 - new_co = vert.co + tangents[i] * mult * area + if props['face_weight_frame']: + weight_factor = [weight_frame[v.index] for v in loop_ext] + weight_factor = sum(weight_factor)/len(weight_factor) + else: + weight_factor = weight_frame[vert.index] + if props['frame_mode'] == 'CENTER': + if is_boundary: + new_co = vert.co + tangents[i] * mult * weight_factor + else: + factor = weight_factor*props['frame_thickness'] + if factor == 1 and props['frame_thickness']: + skip_vertex = True + else: + new_co = vert.co + (loop_center-vert.co)*factor + else: + new_co = vert.co + tangents[i] * mult * area * weight_factor # add vertex - new_vert = bm.verts.new(new_co) + if skip_vertex: + # prevents dublicates in the center of the loop + if central_vertex: + new_vert = central_vertex + else: + central_vertex = bm.verts.new(loop_center) + new_vert = central_vertex + vert_ids.append(vert.index) + skip_vertex = False + else: + new_vert = bm.verts.new(new_co) + vert_ids.append(vert.index) new_loop.append(new_vert) - vert_ids.append(vert.index) new_loop.append(new_loop[0]) # add faces @@ -3340,31 +3538,40 @@ def convert_to_frame(ob, props, use_modifiers): v3 = new_loop[i] face_verts = [v1,v0,v3,v2] if mult == -1: face_verts = [v0,v1,v2,v3] + face_verts = list(dict.fromkeys(face_verts)) new_face = bm.faces.new(face_verts) new_face.material_index = materials[i+1] new_face.select = True new_faces.append(new_face) # fill frame if props['fill_frame'] and not is_boundary: + center_neigh = [] n_verts = len(new_loop)-1 loop_center = Vector((0,0,0)) - for v in new_loop[1:]: loop_center += v.co + for v in new_loop[1:]: + loop_center += v.co + verts_count += 1 + center_neigh.append(verts_count) + centers_neigh.append(center_neigh) loop_center /= n_verts center = bm.verts.new(loop_center) + verts_count += 1 + vert_ids.append(center.index) + centers_id.append(verts_count) for i in range(n_verts): v0 = new_loop[i+1] v1 = new_loop[i] face_verts = [v1,v0,center] + face_verts = list(dict.fromkeys(face_verts)) + if len(face_verts) < 3: continue new_face = bm.faces.new(face_verts) new_face.material_index = materials[i] + props['fill_frame_mat'] new_face.select = True new_faces.append(new_face) - #bpy.ops.object.mode_set(mode='OBJECT') - #for f in bm.faces: f.select_set(f not in new_faces) for f in original_faces: bm.faces.remove(f) bm.to_mesh(new_ob.data) # propagate vertex groups - if props['bool_vertex_group']: + if props['bool_vertex_group'] or bool_weight_thick: base_vg = [] for vg in new_ob.vertex_groups: vertex_group = [] @@ -3378,6 +3585,13 @@ def convert_to_frame(ob, props, use_modifiers): for vg_id, vg in enumerate(new_ob.vertex_groups): for ii, jj in zip(vert_ids, new_vert_ids): vg.add([jj], base_vg[vg_id][ii], 'REPLACE') + # set weight for the central points + if props['fill_frame']: + for cn, ii in zip(centers_neigh, centers_id): + cw = [vg.weight(cni) for cni in cn] + cw = sum(cw)/len(cw) + vg.add([ii], cw, 'REPLACE') + new_ob.data.update() bm.free() return new_ob @@ -3386,7 +3600,7 @@ def reduce_to_quads(ob, props): ''' Convert an input object to a mesh with polygons that have maximum 4 vertices ''' - new_ob = convert_object_to_mesh(ob, props['gen_modifiers'], True) + new_ob = convert_object_to_mesh(ob, props['gen_modifiers'], True, props['rotation_mode']!='UV') me = new_ob.data # Check if there are polygons with more than 4 sides @@ -3445,7 +3659,7 @@ def reduce_to_quads(ob, props): return new_ob def convert_to_fan(ob, props, add_id_layer=False): - new_ob = convert_object_to_mesh(ob, props['gen_modifiers'], True) + new_ob = convert_object_to_mesh(ob, props['gen_modifiers'], True, props['rotation_mode']!='UV') bm = bmesh.new() bm.from_mesh(new_ob.data) if add_id_layer: @@ -3464,7 +3678,7 @@ def convert_to_fan(ob, props, add_id_layer=False): return new_ob def convert_to_triangles(ob, props): - new_ob = convert_object_to_mesh(ob, props['gen_modifiers'], True) + new_ob = convert_object_to_mesh(ob, props['gen_modifiers'], True, props['rotation_mode']!='UV') bm = bmesh.new() bm.from_mesh(new_ob.data) bmesh.ops.triangulate(bm, faces=bm.faces, quad_method='FIXED', ngon_method='BEAUTY') @@ -3588,6 +3802,8 @@ def merge_components(ob, props, use_bmesh): bpy.ops.object.mode_set(mode='OBJECT') except: pass else: + if(props.bridge_edges_crease>0 or props.open_edges_crease>0): + ob.data.edge_creases_ensure() bm = bmesh.new() bm.from_mesh(ob.data.copy()) if props.merge_open_edges_only: @@ -3602,14 +3818,17 @@ def merge_components(ob, props, use_bmesh): if props.close_mesh != 'NONE': bm.edges.ensure_lookup_table() # set crease - crease_layer = bm.edges.layers.float.new("crease_edge") + crease_layer = bm.edges.layers.float.new('crease_edge') boundary_edges = [e for e in bm.edges if e.is_boundary or e.is_wire] + n_materials = len(ob.material_slots)-1 if props.close_mesh == 'BRIDGE': try: for e in boundary_edges: e[crease_layer] = props.bridge_edges_crease closed = bmesh.ops.bridge_loops(bm, edges=boundary_edges, use_pairs=True) - for f in closed['faces']: f.material_index += props.bridge_material_offset + if n_materials >= 0: + for f in closed['faces']: + f.material_index = min(f.material_index + props.bridge_material_offset, n_materials) except: bm.to_mesh(ob.data) return 'bridge_error' @@ -3617,7 +3836,9 @@ def merge_components(ob, props, use_bmesh): for e in boundary_edges: e[crease_layer] = props.open_edges_crease closed = bmesh.ops.holes_fill(bm, edges=boundary_edges) - for f in closed['faces']: f.material_index += props.cap_material_offset + if n_materials >= 0: + for f in closed['faces']: + f.material_index = min(f.material_index + props.cap_material_offset, n_materials) elif props.close_mesh == 'BRIDGE_CAP': # BRIDGE dvert_lay = bm.verts.layers.deform.active @@ -3630,7 +3851,9 @@ def merge_components(ob, props, use_bmesh): for e in bridge_edges: e[crease_layer] = props.bridge_edges_crease closed = bmesh.ops.bridge_loops(bm, edges=bridge_edges, use_pairs=True) - for f in closed['faces']: f.material_index += props.bridge_material_offset + if n_materials >= 0: + for f in closed['faces']: + f.material_index = min(f.material_index + props.bridge_material_offset, n_materials) boundary_edges = [e for e in bm.edges if e.is_boundary] except: pass # CAP @@ -3643,7 +3866,9 @@ def merge_components(ob, props, use_bmesh): for e in cap_edges: e[crease_layer] = props.open_edges_crease closed = bmesh.ops.holes_fill(bm, edges=cap_edges) - for f in closed['faces']: f.material_index += props.cap_material_offset + if n_materials >= 0: + for f in closed['faces']: + f.material_index = min(f.material_index + props.bridge_material_offset, n_materials) except: pass bm.to_mesh(ob.data) @@ -3676,13 +3901,13 @@ class tissue_render_animation(Operator): self.report({'ERROR'}, message) return {'CANCELLED'} ''' - remove_tessellate_handler() + remove_tissue_handler() scene = context.scene if event.type == 'ESC' or scene.frame_current >= scene.frame_end: scene.render.filepath = self.path # set again the handler blender_handlers = bpy.app.handlers.frame_change_post - blender_handlers.append(anim_tessellate) + blender_handlers.append(anim_tissue) blender_handlers.append(reaction_diffusion_scene) context.window_manager.event_timer_remove(self.timer) if event.type == 'ESC': @@ -3705,7 +3930,7 @@ class tissue_render_animation(Operator): scene = context.scene if self.start: - remove_tessellate_handler() + remove_tissue_handler() reaction_diffusion_remove_handler(self, context) scene = context.scene scene.frame_current = scene.frame_start @@ -3715,7 +3940,7 @@ class tissue_render_animation(Operator): self.start = False else: scene.frame_current += scene.frame_step - anim_tessellate(scene) + anim_tissue(scene) reaction_diffusion_scene(scene) scene.render.filepath = "{}{:04d}".format(self.path,scene.frame_current) bpy.ops.render.render(write_still=True) @@ -3729,7 +3954,7 @@ def offset_boundary_materials(bm, boundary_mat_offset=0, boundary_variable_offse bound_verts_value = [0]*len(bm.faces) bound_edges_value = [0]*len(bm.faces) shift_faces = [0]*len(bm.faces) - # store boundaries information + # store boundaries informations for v in bm.verts: if v.is_boundary: for f in v.link_faces: diff --git a/mesh_tissue/texture_reaction_diffusion.py b/mesh_tissue/texture_reaction_diffusion.py new file mode 100644 index 000000000..e77294b5f --- /dev/null +++ b/mesh_tissue/texture_reaction_diffusion.py @@ -0,0 +1,694 @@ +# SPDX-License-Identifier: GPL-2.0-or-later + +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +#-------------------------- COLORS / GROUPS EXCHANGER -------------------------# +# # +# Vertex Color to Vertex Group allow you to convert colors channles to weight # +# maps. # +# The main purpose is to use vertex colors to store information when importing # +# files from other softwares. The script works with the active vertex color # +# slot. # +# For use the command "Vertex Clors to Vertex Groups" use the search bar # +# (space bar). # +# # +# (c) Alessandro Zomparelli # +# (2017) # +# # +# http://www.co-de-it.com/ # +# # +################################################################################ + +import bpy, bmesh, os +import numpy as np +import math, timeit, time +from math import pi +from statistics import mean, stdev +from mathutils import Vector +from mathutils.kdtree import KDTree +from numpy import * +try: from .numba_functions import run_tex_rd, run_tex_rd_ani +except: pass +#from .numba_functions import integrate_field +#from .numba_functions import numba_reaction_diffusion +try: import numexpr as ne +except: pass + +# Reaction-Diffusion cache +from pathlib import Path +import random as rnd +import string + +from bpy.types import ( + Operator, + Panel, + PropertyGroup, + ) + +from bpy.props import ( + BoolProperty, + EnumProperty, + FloatProperty, + IntProperty, + StringProperty, + FloatVectorProperty, + IntVectorProperty +) + +from .utils import * + + +def tex_reaction_diffusion_add_handler(self, context): + # remove existing handlers + tex_reaction_diffusion_remove_handler(self, context) + # add new handler + bpy.app.handlers.frame_change_post.append(tex_rd_scene) + + +def tex_reaction_diffusion_remove_handler(self, context): + # remove existing handlers + old_handlers = [] + for h in bpy.app.handlers.frame_change_post: + if "tex_rd" in str(h): + old_handlers.append(h) + for h in old_handlers: bpy.app.handlers.frame_change_post.remove(h) + + +class tex_reaction_diffusion_prop(PropertyGroup): + run : BoolProperty(default=False, update = tex_reaction_diffusion_add_handler, + description='Compute a new iteration on frame changes. Currently is not working during Render Animation') + + res_x : IntProperty( + name="Resolution X", default=512, min=2, soft_max=1000, + description="Resolution of the simulation") + + res_y : IntProperty( + name="Resolution Y", default=512, min=2, soft_max=1000, + description="Resolution of the simulation") + + time_steps : IntProperty( + name="Steps", default=10, min=0, soft_max=50, + description="Number of Steps") + + dt : FloatProperty( + name="dt", default=1, min=0, soft_max=0.2, + description="Time Step") + + diff_a : FloatProperty( + name="Diff A", default=0.14, min=0, soft_max=2, precision=3, + description="Diffusion A") + + diff_b : FloatProperty( + name="Diff B", default=0.07, min=0, soft_max=2, precision=3, + description="Diffusion B") + + f : FloatProperty( + name="f", default=0.055, soft_min=0.01, soft_max=0.06, precision=4, step=0.05, + description="Feed Rate") + + k : FloatProperty( + name="k", default=0.062, soft_min=0.035, soft_max=0.065, precision=4, step=0.05, + description="Kill Rate") + + diff_mult : FloatProperty( + name="Scale", default=1, min=0, soft_max=1, max=10, precision=2, + description="Multiplier for the diffusion of both substances") + + anisotropy : FloatProperty( + name="Anisotropy", default=0.5, min=0, max=1, precision=2, + description="Influence of the Vector Field") + + img_vector_field : StringProperty( + name="Vector Field", default='', + description="Image used for the Vector Field. RGB to XY") + + img_a : StringProperty( + name="A", default='', + description="Image used for the chemical A") + + img_b : StringProperty( + name="B", default='', + description="Image used for the chemical B") + + img_diff_a : StringProperty( + name="Diff A", default='', + description="Image used for A diffusion") + + img_diff_b : StringProperty( + name="Diff B", default='', + description="Image used for B diffusion") + + img_scale : StringProperty( + name="Scale", default='', + description="Image used for Scale value") + + img_f : StringProperty( + name="f", default='', + description="Image used for Feed value (f)") + + img_k : StringProperty( + name="k", default='', + description="Image used for Kill value (k)") + + img_brush : StringProperty( + name="Brush", default='', + description="Image used for adding/removing B") + + invert_img_diff_a : BoolProperty(default=False, + description='Invert the value of the Vertex Group Diff A') + + invert_img_diff_b : BoolProperty(default=False, + description='Invert the value of the Vertex Group Diff B') + + invert_img_scale : BoolProperty(default=False, + description='Invert the value of the Vertex Group Scale') + + invert_img_f : BoolProperty(default=False, + description='Invert the value of the Vertex Group f') + + invert_img_k : BoolProperty(default=False, + description='Invert the value of the Vertex Group k') + + invert_img_vector_field : BoolProperty(default=False, + description='Use the perpendicular direction') + + min_diff_a : FloatProperty( + name="Min Diff A", default=0.1, min=0, soft_max=2, precision=3, + description="Min Diff A") + + max_diff_a : FloatProperty( + name="Max Diff A", default=0.1, min=0, soft_max=2, precision=3, + description="Max Diff A") + + min_diff_b : FloatProperty( + name="Min Diff B", default=0.1, min=0, soft_max=2, precision=3, + description="Min Diff B") + + max_diff_b : FloatProperty( + name="Max Diff B", default=0.1, min=0, soft_max=2, precision=3, + description="Max Diff B") + + min_scale : FloatProperty( + name="Scale", default=0.35, min=0, soft_max=1, max=10, precision=2, + description="Min Scale Value") + + max_scale : FloatProperty( + name="Scale", default=1, min=0, soft_max=1, max=10, precision=2, + description="Max Scale value") + + min_f : FloatProperty( + name="Min f", default=0.02, min=0, soft_min=0.01, soft_max=0.06, max=0.2, precision=4, step=0.05, + description="Min Feed Rate") + + max_f : FloatProperty( + name="Max f", default=0.055, min=0, soft_min=0.01, soft_max=0.06, max=0.2, precision=4, step=0.05, + description="Max Feed Rate") + + min_k : FloatProperty( + name="Min k", default=0.035, min=0, soft_min=0.035, soft_max=0.065, max=0.2, precision=4, step=0.05, + description="Min Kill Rate") + + max_k : FloatProperty( + name="Max k", default=0.062, min=0, soft_min=0.035, soft_max=0.065, max=0.2, precision=4, step=0.05, + description="Max Kill Rate") + + brush_mult : FloatProperty( + name="Mult", default=0.5, min=-1, max=1, precision=3, step=0.05, + description="Multiplier for brush value") + + bool_cache : BoolProperty( + name="Use Cache", default=False, + description="Read modifiers affect the vertex groups") + + cache_frame_start : IntProperty( + name="Start", default=1, + description="Frame on which the simulation starts") + + cache_frame_end : IntProperty( + name="End", default=250, + description="Frame on which the simulation ends") + + cache_dir : StringProperty( + name="Cache directory", default="", subtype='FILE_PATH', + description = 'Directory that contains Reaction-Diffusion cache files' + ) + + normalize : BoolProperty( + name="Normalize values", default=False, + description="Normalize values from 0 to 1") + +def tex_rd_scene(scene, bake=False): + for ob in bpy.context.scene.objects: + if ob.tex_reaction_diffusion_settings.run: + tex_reaction_diffusion_def(ob) + +def tex_reaction_diffusion_def(ob, bake=False): + try: + props = ob.tex_reaction_diffusion_settings + except: + return + scene = bpy.context.scene + print("Texture Reaction Diffusion: " + str(scene.frame_current)) + start_time = timeit.default_timer() + img_a = bpy.data.images[props.img_a] + img_b = bpy.data.images[props.img_b] + diff_a = props.diff_a + diff_b = props.diff_b + diff_a_min = props.min_diff_a + diff_a_max = props.max_diff_a + diff_b_min = props.min_diff_b + diff_b_max = props.max_diff_b + f_min = props.min_f + f_max = props.max_f + k_min = props.min_k + k_max = props.max_k + ani = props.anisotropy + dt = props.dt + time_steps = props.time_steps + res_x = props.res_x #int(img_b.size[0]) + res_y = props.res_y #int(img_b.size[1]) + + min_scale = props.min_scale + max_scale = props.max_scale + + images = bpy.data.images.keys() + rd_images = [img_a, img_b] + img_diff_a = None + img_diff_b = None + img_vector_field = None + img_f = None + img_k = None + img_scale = None + img_brush = None + if props.img_vector_field in images: + img_vector_field = bpy.data.images[props.img_vector_field] + rd_images.append(img_vector_field) + if props.img_diff_a in images: + img_diff_a = bpy.data.images[props.img_diff_a] + rd_images.append(img_diff_a) + if props.img_diff_b in images: + img_diff_b = bpy.data.images[props.img_diff_b] + rd_images.append(img_diff_b) + if props.img_f in images: + img_f = bpy.data.images[props.img_f] + rd_images.append(img_f) + if props.img_k in images: + img_k = bpy.data.images[props.img_k] + rd_images.append(img_k) + if props.img_scale in images: + img_scale = bpy.data.images[props.img_scale] + rd_images.append(img_scale) + if props.img_brush in images: + img_brush = bpy.data.images[props.img_brush] + rd_images.append(img_brush) + for im in rd_images: + im.scale(res_x ,res_y) + im.pixels.update() + nx = res_y + ny = res_x + + a_px = np.float32(np.zeros(nx*ny*4)) + img_a.pixels.foreach_get(a_px) + b_px = np.float32(np.zeros(nx*ny*4)) + img_b.pixels.foreach_get(b_px) + if img_vector_field: + vf_px = np.float32(np.zeros(nx*ny*4)) + img_vector_field.pixels.foreach_get(vf_px) + vf_px = np.array(vf_px).reshape((-1,4)) + vf_x = vf_px[:,1]*2-1 + vf_x = vf_x.reshape((nx,ny)) + vf_y = vf_px[:,0]*2-1 + vf_y = vf_y.reshape((nx,ny)) + + # original field + vf_x_ = sqrt(2)/2*vf_x + vf_y_ = sqrt(2)/2*vf_y + vf_xy1_ = abs(vf_x_ + vf_y_) + vf_xy2_ = abs(vf_x_ - vf_y_) + vf_xy1 = (vf_xy1_*ani + (1-ani))*sqrt(2)/2 + vf_xy2 = (vf_xy2_*ani + (1-ani))*sqrt(2)/2 + vf_x_ = abs(vf_x)*ani + (1-ani) + vf_y_ = abs(vf_y)*ani + (1-ani) + vf1 = np.concatenate((vf_x_[np.newaxis,:,:], vf_y_[np.newaxis,:,:], vf_xy1[np.newaxis,:,:], vf_xy2[np.newaxis,:,:]), axis=0) + + # perpendicular field + vf_x, vf_y = -vf_y, vf_x + vf_x_ = sqrt(2)/2*vf_x + vf_y_ = sqrt(2)/2*vf_y + vf_xy1_ = abs(vf_x_ + vf_y_) + vf_xy2_ = abs(vf_x_ - vf_y_) + vf_xy1 = (vf_xy1_*ani + (1-ani))*sqrt(2)/2 + vf_xy2 = (vf_xy2_*ani + (1-ani))*sqrt(2)/2 + vf_x = abs(vf_x)*ani + (1-ani) + vf_y = abs(vf_y)*ani + (1-ani) + vf2 = np.concatenate((vf_x[np.newaxis,:,:], vf_y[np.newaxis,:,:], vf_xy1[np.newaxis,:,:], vf_xy2[np.newaxis,:,:]), axis=0) + if props.invert_img_vector_field: + vf1, vf2 = vf2, vf1 + else: + vf = np.ones((1,nx,ny)) + vf_diag = np.ones((1,nx,ny))*sqrt(2)/2 + vf1 = np.concatenate((vf, vf, vf_diag, vf_diag), axis=0) + vf2 = vf1 + + + if img_diff_a: + diff_a = np_remap_image_values(img_diff_a, channel=0, min=diff_a_min, max=diff_a_max, invert=props.invert_img_diff_a) + else: + diff_a = np.ones((nx,ny))*props.diff_a + + if img_diff_b: + diff_b = np_remap_image_values(img_diff_b, channel=0, min=diff_b_min, max=diff_b_max, invert=props.invert_img_diff_b) + else: + diff_b = np.ones((nx,ny))*props.diff_b + + if img_scale: + scale = np_remap_image_values(img_scale, channel=0, min=min_scale, max=max_scale, invert=props.invert_img_scale) + diff_a *= scale + diff_b *= scale + else: + diff_a *= props.diff_mult + diff_b *= props.diff_mult + + if img_f: + f = np_remap_image_values(img_f, channel=0, min=f_min, max=f_max, invert=props.invert_img_f) + else: + f = np.ones((nx,ny))*props.f + + if img_k: + k = np_remap_image_values(img_k, channel=0, min=k_min, max=k_max, invert=props.invert_img_k) + else: + k = np.ones((nx,ny))*props.k + + if img_brush: + brush = np_remap_image_values(img_brush)*props.brush_mult + else: + brush = np.zeros((nx,ny)) + + print("Load images: " + str(timeit.default_timer() - start_time) + " sec") + + start_time = timeit.default_timer() + + a_px = np.array(a_px).reshape((-1,4)) + a = a_px[:,0] + a = a.reshape((nx,ny)) + lap_a = np.zeros((nx,ny)) + + b_px = np.array(b_px).reshape((-1,4)) + b = b_px[:,0] + b = b.reshape((nx,ny)) + lap_b = np.zeros((nx,ny)) + + print("Reshape data time: " + str(timeit.default_timer() - start_time) + " sec") + + start_time = timeit.default_timer() + run_tex_rd_ani(a, b, lap_a, lap_b, diff_a, diff_b, f, k, dt, time_steps, vf1, vf2, brush) + print("Simulation time: " + str(timeit.default_timer() - start_time) + " sec") + + start_time = timeit.default_timer() + np.clip(a,0,1,out=a) + np.clip(b,0,1,out=b) + a = a.flatten() + b = b.flatten() + a_px[:,0] = a + a_px[:,1] = a + a_px[:,2] = a + b_px[:,0] = b + b_px[:,1] = b + b_px[:,2] = b + img_a.pixels.foreach_set(np.float32(a_px.flatten())) + img_b.pixels.foreach_set(np.float32(b_px.flatten())) + img_a.pixels.update() + img_b.pixels.update() + img_a.update() + img_b.update() + print("Stored Images: " + str(timeit.default_timer() - start_time) + " sec") + +class reset_tex_reaction_diffusion(Operator): + bl_idname = "object.reset_tex_reaction_diffusion" + bl_label = "Reset Texture Reaction Diffusion" + bl_description = ("Run a Reaction-Diffusion based on images: A and B") + bl_options = {'REGISTER', 'UNDO'} + + run : BoolProperty( + name="Run Reaction-Diffusion", default=True, description="Compute a new iteration on frame changes") + + time_steps : IntProperty( + name="Steps", default=10, min=0, soft_max=50, + description="Number of Steps") + + dt : FloatProperty( + name="dt", default=1, min=0, soft_max=0.2, + description="Time Step") + + diff_a : FloatProperty( + name="Diff A", default=0.14, min=0, soft_max=2, + description="Diffusion A") + + diff_b : FloatProperty( + name="Diff B", default=0.07, min=0, soft_max=2, + description="Diffusion B") + + f : FloatProperty( + name="f", default=0.055, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4, + description="Feed Rate") + + k : FloatProperty( + name="k", default=0.062, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4, + description="Kill Rate") + + def execute(self, context): + props = context.object.tex_reaction_diffusion_settings + props.dt = self.dt + props.time_steps = self.time_steps + props.f = self.f + props.k = self.k + props.diff_a = self.diff_a + props.diff_b = self.diff_b + res_x = props.res_x + res_y = props.res_y + img_a = bpy.data.images[props.img_a] + img_b = bpy.data.images[props.img_b] + img_a.scale(width=res_x, height=res_y) + img_b.scale(width=res_x, height=res_y) + img_a.pixels.foreach_set([1]*res_x*res_y*4) + img_b.pixels.foreach_set([0,0,0,1]*res_x*res_y) + img_a.pixels.update() + img_b.pixels.update() + img_a.update() + img_b.update() + + return {'FINISHED'} + +class start_tex_reaction_diffusion(Operator): + bl_idname = "object.start_tex_reaction_diffusion" + bl_label = "Start Texture Reaction Diffusion" + bl_description = ("Run a Reaction-Diffusion based on images: A and B") + bl_options = {'REGISTER', 'UNDO'} + + #res_x : IntProperty( + # name="Resolution X", default=512, min=2, soft_max=1000, + # description="Resolution of the simulation") + #res_y : IntProperty( + # name="Resolution Y", default=512, min=2, soft_max=1000, + # description="Resolution of the simulation") + + @classmethod + def poll(cls, context): + return True + + #def invoke(self, context, event): + # return context.window_manager.invoke_props_dialog(self) + + def execute(self, context): + tex_reaction_diffusion_add_handler(self, context) + set_animatable_fix_handler(self, context) + + ob = context.object + props = ob.tex_reaction_diffusion_settings + if props.img_a in bpy.data.images.keys(): + img_a = bpy.data.images[props.img_a] + img_a.scale(props.res_x, props.res_y) + else: + img_a = bpy.data.images.new(name="A", width=props.res_x, height=props.res_y) + if props.img_b in bpy.data.images.keys(): + img_b = bpy.data.images[props.img_b] + img_b.scale(props.res_x, props.res_y) + else: + img_b = bpy.data.images.new(name="B", width=props.res_x, height=props.res_y) + props.run = True + #props.res_x = self.res_x + #props.res_y = self.res_y + props.img_a = img_a.name + props.img_b = img_b.name + + #props.run = self.run + #props.dt = self.dt + #props.time_steps = self.time_steps + #props.f = self.f + #props.k = self.k + #props.diff_a = self.diff_a + #props.diff_b = self.diff_b + + return {'FINISHED'} + + +class TISSUE_PT_tex_reaction_diffusion(Panel): + bl_space_type = 'PROPERTIES' + bl_region_type = 'WINDOW' + bl_context = "object" + bl_label = "Tissue Texture Reaction-Diffusion" + bl_options = {'DEFAULT_CLOSED'} + + #@classmethod + #def poll(cls, context): + # return True + + def draw(self, context): + tex_reaction_diffusion_add_handler(self, context) + ob = bpy.context.object + props = ob.tex_reaction_diffusion_settings + img_a = props.img_a + img_b = props.img_b + layout = self.layout + col = layout.column(align=True) + row = col.row(align=True) + if not (img_a and img_b in bpy.data.images): + row.operator("object.start_tex_reaction_diffusion", + icon="EXPERIMENTAL") + col = layout.column(align=True) + row = col.row(align=True) + row.prop(props, 'res_x') + row.prop(props, 'res_y') + col.separator() + col.prop_search(props, 'img_a', bpy.data, "images") + col.prop_search(props, 'img_b', bpy.data, "images") + else: + row.operator("object.reset_tex_reaction_diffusion", + icon="EXPERIMENTAL") + row = col.row(align=True) + row.prop(props, "run", text="Run Reaction-Diffusion") + col = layout.column(align=True) + row = col.row(align=True) + row.prop(props, 'res_x') + row.prop(props, 'res_y') + col.separator() + col.prop_search(props, 'img_a', bpy.data, "images") + col.prop_search(props, 'img_b', bpy.data, "images") + col.separator() + row = col.row(align=True) + row.prop(props, "time_steps") + row.prop(props, "dt") + row.enabled = not props.bool_cache + col.separator() + row = col.row(align=True) + col1 = row.column(align=True) + col1.prop(props, "diff_a") + col1.enabled = props.img_diff_a == '' and not props.bool_cache + col1 = row.column(align=True) + col1.prop(props, "diff_b") + col1.enabled = props.img_diff_b == '' and not props.bool_cache + row = col.row(align=True) + row.prop(props, "diff_mult") + row.enabled = props.img_scale == '' and not props.bool_cache + #col.separator() + row = col.row(align=True) + col1 = row.column(align=True) + col1.prop(props, "f") + col1.enabled = props.img_f == '' and not props.bool_cache + col1 = row.column(align=True) + col1.prop(props, "k") + col1.enabled = props.img_k == '' and not props.bool_cache + ''' + col.separator() + col.label(text='Cache:') + #col.prop(props, "bool_cache") + col.prop(props, "cache_dir", text='') + col.separator() + row = col.row(align=True) + row.prop(props, "cache_frame_start") + row.prop(props, "cache_frame_end") + col.separator() + if props.bool_cache: + col.operator("object.reaction_diffusion_free_data") + else: + row = col.row(align=True) + row.operator("object.bake_reaction_diffusion") + file = bpy.context.blend_data.filepath + temp = bpy.context.preferences.filepaths.temporary_directory + if file == temp == props.cache_dir == '': + row.enabled = False + col.label(text="Cannot use cache", icon='ERROR') + col.label(text='please save the Blender or set a Cache directory') + ''' + +class TISSUE_PT_tex_reaction_diffusion_images(Panel): + bl_space_type = 'PROPERTIES' + bl_region_type = 'WINDOW' + bl_context = "object" + bl_parent_id = "TISSUE_PT_tex_reaction_diffusion" + bl_label = "Image Maps" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + props = context.object.tex_reaction_diffusion_settings + if props.img_a and props.img_b in bpy.data.images.keys(): + return True + else: + return False + + def draw(self, context): + ob = context.object + props = ob.tex_reaction_diffusion_settings + layout = self.layout + #layout.use_property_split = True + col = layout.column(align=True) + insert_image_parameter(col, ob, 'brush', text='Brush:') + insert_image_parameter(col, ob, 'diff_a', text='Diff A:') + insert_image_parameter(col, ob, 'diff_b', text='Diff B:') + insert_image_parameter(col, ob, 'scale', text='Scale:') + insert_image_parameter(col, ob, 'f', text='f:') + insert_image_parameter(col, ob, 'k', text='k:') + insert_image_parameter(col, ob, 'vector_field', text='Vector Field:') + col.enabled = not props.bool_cache + +def insert_image_parameter(col, ob, name, text=''): + props = ob.tex_reaction_diffusion_settings + split = col.split(factor=0.25, align=True) + col2 = split.column(align=True) + col2.label(text=text) + col2 = split.column(align=True) + row2 = col2.row(align=True) + row2.prop_search(props, 'img_' + name, bpy.data, "images", text='') + if name not in ('brush'): + if name == 'vector_field': icon = 'DRIVER_ROTATIONAL_DIFFERENCE'#'ORIENTATION_VIEW' + else: icon = 'ARROW_LEFTRIGHT' + row2.prop(props, "invert_img_" + name, text="", toggle=True, icon=icon) + if 'img_' + name in props: + if props['img_' + name] != '': + if name == 'brush': + col2.prop(props, "brush_mult") + elif name == 'vector_field': + col2.prop(props, "anisotropy") + else: + row2 = col2.row(align=True) + row2.prop(props, "min_" + name, text="Min") + row2 = col2.row(align=True) + row2.prop(props, "max_" + name, text="Max") + col.separator() diff --git a/mesh_tissue/tissue_properties.py b/mesh_tissue/tissue_properties.py index 7f3f6b9a3..a0b4d9148 100644 --- a/mesh_tissue/tissue_properties.py +++ b/mesh_tissue/tissue_properties.py @@ -1,7 +1,23 @@ -# SPDX-FileCopyrightText: 2022-2023 Blender Foundation -# # SPDX-License-Identifier: GPL-2.0-or-later +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + # ---------------------------- ADAPTIVE DUPLIFACES --------------------------- # # ------------------------------- version 0.84 ------------------------------- # # # @@ -29,7 +45,9 @@ from bpy.props import ( StringProperty, PointerProperty ) +from .utils import tissue_time from . import config +import time def update_dependencies(ob, objects): @@ -50,6 +68,8 @@ def get_deps(ob): return [ob.tissue_tessellate.generator, ob.tissue_tessellate.component] elif type == 'TO_CURVE': return [ob.tissue_to_curve.object] + elif type == 'POLYHEDRA': + return [ob.tissue_polyhedra.object] else: return [] def anim_tessellate_active(self, context): @@ -75,10 +95,9 @@ def anim_tessellate_object(ob): #from bpy.app.handlers import persistent -def anim_tessellate(scene, depsgraph=None): - print('Tissue: animating tessellations...') - - #config.evaluatedDepsgraph = depsgraph +def anim_tissue(scene, depsgraph=None): + tissue_time(None,'Tissue: Animating Tissue objects at frame {}...'.format(scene.frame_current), levels=0) + start_time = time.time() try: active_object = bpy.context.object @@ -109,10 +128,15 @@ def anim_tessellate(scene, depsgraph=None): override['mode'] = 'OBJECT' override['view_layer'] = scene.view_layers[0] break - if ob.tissue.tissue_type == 'TESSELLATE': - bpy.ops.object.tissue_update_tessellate(override) - elif ob.tissue.tissue_type == 'TO_CURVE': - bpy.ops.object.tissue_convert_to_curve_update(override) + with bpy.context.temp_override(**override): + if ob.tissue.tissue_type == 'TESSELLATE': + bpy.ops.object.tissue_update_tessellate() + elif ob.tissue.tissue_type == 'TO_CURVE': + bpy.ops.object.tissue_update_convert_to_curve() + elif ob.tissue.tissue_type == 'POLYHEDRA': + bpy.ops.object.tissue_update_polyhedra() + elif ob.tissue.tissue_type == 'CONTOUR_CURVES': + bpy.ops.object.tissue_update_contour_curves() if old_mode != None: objects = bpy.context.view_layer.objects @@ -121,68 +145,40 @@ def anim_tessellate(scene, depsgraph=None): bpy.ops.object.mode_set(mode=old_mode) config.evaluatedDepsgraph = None - print('end') + tissue_time(start_time,'Animated Tissue objects at frame {}'.format(scene.frame_current), levels=0) return -''' -def OLD_anim_tessellate(scene, depsgraph): - print('Tissue: animating tessellations...') - #global evaluatedDepsgraph - #print(evaluatedDepsgraph) - print(config.evaluatedDepsgraph) - config.evaluatedDepsgraph = depsgraph - print(config.evaluatedDepsgraph) - - try: - active_object = bpy.context.object - old_mode = bpy.context.object.mode - selected_objects = bpy.context.selected_objects - except: active_object = old_mode = selected_objects = None - - if old_mode in ('OBJECT', 'PAINT_WEIGHT') or True: - update_objects = [] - for ob in scene.objects: - if ob.tissue.bool_run and not ob.tissue.bool_lock: - if ob not in update_objects: update_objects.append(ob) - update_objects = list(reversed(update_dependencies(ob, update_objects))) - for ob in update_objects: - for window in bpy.context.window_manager.windows: - screen = window.screen - for area in screen.areas: - if area.type == 'VIEW_3D': - override = bpy.context.copy() - override['window'] = window - override['screen'] = screen - override['area'] = area - override['selected_objects'] = [ob] - override['object'] = ob - override['active_object'] = ob - override['selected_editable_objects'] = [ob] - override['mode'] = 'OBJECT' - override['view_layer'] = scene.view_layers[0] - break - bpy.ops.object.tissue_update_tessellate(override) - - config.evaluatedDepsgraph = None - print('end') - print(config.evaluatedDepsgraph) - return -''' -def remove_tessellate_handler(): +def remove_tissue_handler(): tissue_handlers = [] blender_handlers = bpy.app.handlers.frame_change_post for h in blender_handlers: - if "anim_tessellate" in str(h): + if "anim_tissue" in str(h): tissue_handlers.append(h) for h in tissue_handlers: blender_handlers.remove(h) -def set_tessellate_handler(self, context): - - remove_tessellate_handler() +def set_tissue_handler(self, context): + remove_tissue_handler() for o in context.scene.objects: if o.tissue.bool_run: blender_handlers = bpy.app.handlers.frame_change_post - blender_handlers.append(anim_tessellate) + blender_handlers.append(anim_tissue) + break + return + +def remove_polyhedra_handler(): + tissue_handlers = [] + blender_handlers = bpy.app.handlers.frame_change_post + for h in blender_handlers: + if "anim_polyhedra" in str(h): + tissue_handlers.append(h) + for h in tissue_handlers: blender_handlers.remove(h) + +def set_polyhedra_handler(self, context): + remove_polyhedra_handler() + for o in context.scene.objects: + if o.tissue.bool_run: + blender_handlers = bpy.app.handlers.frame_change_post + blender_handlers.append(anim_polyhedra) break return @@ -190,7 +186,7 @@ def set_tessellate_handler(self, context): class tissue_prop(PropertyGroup): bool_lock : BoolProperty( name="Lock", - description="Prevent automatic update on settings changes or if other objects have it in the hierarchy", + description="Prevent automatic update on settings changes or if other objects have it in the hierarchy.", default=False ) bool_dependencies : BoolProperty( @@ -202,17 +198,24 @@ class tissue_prop(PropertyGroup): name="Animatable", description="Automatically recompute the geometry when the frame is changed. Tessellations may not work using the default Render Animation", default = False, - update = set_tessellate_handler + update = set_tissue_handler ) tissue_type : EnumProperty( items=( ('NONE', "None", ""), ('TESSELLATE', "Tessellate", ""), - ('TO_CURVE', "To Curve", "") + ('TO_CURVE', "To Curve", ""), + ('POLYHEDRA', "Polyhedra", ""), + ('CONTOUR_CURVES', "Contour Curves", "") ), default='NONE', name="" ) + bool_hold : BoolProperty( + name="Hold", + description="Wait...", + default=False + ) class tissue_tessellate_prop(PropertyGroup): bool_hold : BoolProperty( @@ -561,13 +564,13 @@ class tissue_tessellate_prop(PropertyGroup): boundary_mat_offset : IntProperty( name="Material Offset", default=0, - description="Material Offset for boundaries (with Multi Components or Material ID)", + description="Material Offset for boundaries (with components based on Materials)", update = anim_tessellate_active ) fill_frame_mat : IntProperty( name="Material Offset", default=0, - description="Material Offset for inner faces (with Multi Components or Material ID)", + description="Material Offset for inner faces (with components based on Materials)", update = anim_tessellate_active ) open_edges_crease : FloatProperty( @@ -598,14 +601,23 @@ class tissue_tessellate_prop(PropertyGroup): name="Frame Thickness", default=0.2, min=0, - soft_max=2, + soft_max=1, description="Frame Thickness", update = anim_tessellate_active ) + frame_boundary_thickness : FloatProperty( + name="Frame Boundary Thickness", + default=0, + min=0, + soft_max=1, + description="Frame Boundary Thickness (when zero it uses the Frame Thickness instead)", + update = anim_tessellate_active + ) frame_mode : EnumProperty( items=( ('CONSTANT', 'Constant', 'Even thickness'), - ('RELATIVE', 'Relative', 'Frame offset depends on face areas')), + ('RELATIVE', 'Relative', 'Frame offset depends on face areas'), + ('CENTER', 'Center', 'Toward the center of the face (uses Incenter for Triangles)')), default='CONSTANT', name="Offset", update = anim_tessellate_active @@ -641,7 +653,7 @@ class tissue_tessellate_prop(PropertyGroup): ) use_origin_offset : BoolProperty( name="Align to Origins", - default=False, + default=True, description="Define offset according to components origin and local Z coordinate", update = anim_tessellate_active ) @@ -665,6 +677,31 @@ class tissue_tessellate_prop(PropertyGroup): update = anim_tessellate_active ) + vertex_group_frame_thickness : StringProperty( + name="Frame Thickness weight", default='', + description="Vertex Group used for frame thickness", + update = anim_tessellate_active + ) + invert_vertex_group_frame_thickness : BoolProperty( + name="Invert", default=False, + description="Invert the vertex group influence", + update = anim_tessellate_active + ) + vertex_group_frame_thickness_factor : FloatProperty( + name="Factor", + default=0, + min=0, + max=1, + description="Frame thickness factor to use for zero vertex group influence", + update = anim_tessellate_active + ) + face_weight_frame : BoolProperty( + name="Face Weight", + default=True, + description="Uniform weight for individual faces", + update = anim_tessellate_active + ) + vertex_group_cap_owner : EnumProperty( items=( ('BASE', 'Base', 'Use base vertex group'), @@ -802,6 +839,12 @@ class tissue_tessellate_prop(PropertyGroup): description="Automatically rotate the boundary faces", update = anim_tessellate_active ) + preserve_quads : BoolProperty( + name="Preserve Quads", + default=False, + description="Quad faces are tessellated using QUAD mode", + update = anim_tessellate_active + ) def store_parameters(operator, ob): ob.tissue_tessellate.bool_hold = True @@ -852,6 +895,7 @@ def store_parameters(operator, ob): ob.tissue_tessellate.bridge_cuts = operator.bridge_cuts ob.tissue_tessellate.bridge_smoothness = operator.bridge_smoothness ob.tissue_tessellate.frame_thickness = operator.frame_thickness + ob.tissue_tessellate.frame_boundary_thickness = operator.frame_boundary_thickness ob.tissue_tessellate.frame_mode = operator.frame_mode ob.tissue_tessellate.frame_boundary = operator.frame_boundary ob.tissue_tessellate.fill_frame = operator.fill_frame @@ -863,6 +907,10 @@ def store_parameters(operator, ob): ob.tissue_tessellate.vertex_group_thickness = operator.vertex_group_thickness ob.tissue_tessellate.invert_vertex_group_thickness = operator.invert_vertex_group_thickness ob.tissue_tessellate.vertex_group_thickness_factor = operator.vertex_group_thickness_factor + ob.tissue_tessellate.vertex_group_frame_thickness = operator.vertex_group_frame_thickness + ob.tissue_tessellate.invert_vertex_group_frame_thickness = operator.invert_vertex_group_frame_thickness + ob.tissue_tessellate.vertex_group_frame_thickness_factor = operator.vertex_group_frame_thickness_factor + ob.tissue_tessellate.face_weight_frame = operator.face_weight_frame ob.tissue_tessellate.vertex_group_distribution = operator.vertex_group_distribution ob.tissue_tessellate.invert_vertex_group_distribution = operator.invert_vertex_group_distribution ob.tissue_tessellate.vertex_group_distribution_factor = operator.vertex_group_distribution_factor @@ -888,6 +936,7 @@ def store_parameters(operator, ob): ob.tissue_tessellate.invert_vertex_group_scale_normals = operator.invert_vertex_group_scale_normals ob.tissue_tessellate.boundary_variable_offset = operator.boundary_variable_offset ob.tissue_tessellate.auto_rotate_boundary = operator.auto_rotate_boundary + ob.tissue_tessellate.preserve_quads = operator.preserve_quads ob.tissue_tessellate.bool_hold = False return ob @@ -938,11 +987,16 @@ def load_parameters(operator, ob): operator.boundary_mat_offset = ob.tissue_tessellate.boundary_mat_offset operator.fill_frame_mat = ob.tissue_tessellate.fill_frame_mat operator.frame_thickness = ob.tissue_tessellate.frame_thickness + operator.frame_boundary_thickness = ob.tissue_tessellate.frame_boundary_thickness operator.frame_mode = ob.tissue_tessellate.frame_mode operator.use_origin_offset = ob.tissue_tessellate.use_origin_offset operator.vertex_group_thickness = ob.tissue_tessellate.vertex_group_thickness operator.invert_vertex_group_thickness = ob.tissue_tessellate.invert_vertex_group_thickness operator.vertex_group_thickness_factor = ob.tissue_tessellate.vertex_group_thickness_factor + operator.vertex_group_frame_thickness = ob.tissue_tessellate.vertex_group_frame_thickness + operator.invert_vertex_group_frame_thickness = ob.tissue_tessellate.invert_vertex_group_frame_thickness + operator.vertex_group_frame_thickness_factor = ob.tissue_tessellate.vertex_group_frame_thickness_factor + operator.face_weight_frame = ob.tissue_tessellate.face_weight_frame operator.vertex_group_distribution = ob.tissue_tessellate.vertex_group_distribution operator.invert_vertex_group_distribution = ob.tissue_tessellate.invert_vertex_group_distribution operator.vertex_group_distribution_factor = ob.tissue_tessellate.vertex_group_distribution_factor @@ -968,6 +1022,7 @@ def load_parameters(operator, ob): operator.invert_vertex_group_scale_normals = ob.tissue_tessellate.invert_vertex_group_scale_normals operator.boundary_variable_offset = ob.tissue_tessellate.boundary_variable_offset operator.auto_rotate_boundary = ob.tissue_tessellate.auto_rotate_boundary + operator.preserve_quads = ob.tissue_tessellate.preserve_quads return ob def props_to_dict(ob): @@ -1003,6 +1058,7 @@ def props_to_dict(ob): tessellate_dict['even_thickness'] = props.even_thickness tessellate_dict['even_thickness_iter'] = props.even_thickness_iter tessellate_dict['frame_thickness'] = props.frame_thickness + tessellate_dict['frame_boundary_thickness'] = props.frame_boundary_thickness tessellate_dict['frame_mode'] = props.frame_mode tessellate_dict['frame_boundary'] = props.frame_boundary tessellate_dict['fill_frame'] = props.fill_frame @@ -1011,6 +1067,10 @@ def props_to_dict(ob): tessellate_dict['vertex_group_thickness'] = props.vertex_group_thickness tessellate_dict['invert_vertex_group_thickness'] = props.invert_vertex_group_thickness tessellate_dict['vertex_group_thickness_factor'] = props.vertex_group_thickness_factor + tessellate_dict['vertex_group_frame_thickness'] = props.vertex_group_frame_thickness + tessellate_dict['invert_vertex_group_frame_thickness'] = props.invert_vertex_group_frame_thickness + tessellate_dict['vertex_group_frame_thickness_factor'] = props.vertex_group_frame_thickness_factor + tessellate_dict['face_weight_frame'] = props.face_weight_frame tessellate_dict['vertex_group_distribution'] = props.vertex_group_distribution tessellate_dict['invert_vertex_group_distribution'] = props.invert_vertex_group_distribution tessellate_dict['vertex_group_distribution_factor'] = props.vertex_group_distribution_factor @@ -1036,6 +1096,10 @@ def props_to_dict(ob): tessellate_dict["invert_vertex_group_scale_normals"] = props.invert_vertex_group_scale_normals tessellate_dict["boundary_variable_offset"] = props.boundary_variable_offset tessellate_dict["auto_rotate_boundary"] = props.auto_rotate_boundary + tessellate_dict["merge"] = props.merge + tessellate_dict["merge_thres"] = props.merge_thres + tessellate_dict["merge_open_edges_only"] = props.merge_open_edges_only + tessellate_dict["preserve_quads"] = props.preserve_quads return tessellate_dict def copy_tessellate_props(source_ob, target_ob): diff --git a/mesh_tissue/utils.py b/mesh_tissue/utils.py index a3427c515..ece9ef892 100644 --- a/mesh_tissue/utils.py +++ b/mesh_tissue/utils.py @@ -1,5 +1,3 @@ -# SPDX-FileCopyrightText: 2019-2023 Blender Foundation -# # SPDX-License-Identifier: GPL-2.0-or-later import bpy, bmesh @@ -183,6 +181,25 @@ def vector_rotation(vec): if ang < 0: ang = 2*pi + ang return ang +def signed_angle_with_axis(va, vb, axis): + return atan2(va.cross(vb).dot(axis.normalized()), va.dot(vb)) + +def round_angle_with_axis(va, vb, axis): + angle = signed_angle_with_axis(va, vb, axis) + return 2*pi + angle if angle < 0 else angle + +def incenter(vecs): + lengths = x = y = z = 0 + mid = len(vecs)//2+1 + for vi, vj, vk in zip(vecs, vecs[1:]+vecs[:1], vecs[mid:]+vecs[:mid]): + length = (vj-vi).length + lengths += length + x += length*vk.x + y += length*vk.y + z += length*vk.z + inc = Vector((x/lengths, y/lengths, z/lengths)) + return inc + # ------------------------------------------------------------------ # SCENE # ------------------------------------------------------------------ @@ -215,7 +232,15 @@ def turn_off_animatable(scene): # OBJECTS # ------------------------------------------------------------------ -def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True): +def remove_temp_objects(): + # clean objects + for o in bpy.data.objects: + if "_tissue_tmp" in o.name: + bpy.data.objects.remove(o) + return + +def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True, mirror_correction = True): + #mirror_correction = False try: ob.name except: return None if ob.type != 'MESH': @@ -226,7 +251,10 @@ def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True): #dg = bpy.context.evaluated_depsgraph_get() #ob_eval = ob.evaluated_get(dg) #me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg) - me = simple_to_mesh(ob) + if mirror_correction: + me = simple_to_mesh_mirror(ob) + else: + me = simple_to_mesh(ob) new_ob = bpy.data.objects.new(ob.data.name, me) new_ob.location, new_ob.matrix_world = ob.location, ob.matrix_world if not apply_modifiers: @@ -234,7 +262,10 @@ def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True): else: if apply_modifiers: new_ob = ob.copy() - new_me = simple_to_mesh(ob) + if mirror_correction: + new_me = simple_to_mesh_mirror(ob) + else: + new_me = simple_to_mesh(ob) new_ob.modifiers.clear() new_ob.data = new_me else: @@ -250,6 +281,76 @@ def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True): bpy.context.view_layer.objects.active = new_ob return new_ob +def simple_to_mesh_mirror(ob, depsgraph=None): + ''' + Convert object to mesh applying Modifiers and Shape Keys. + Automatically correct Faces rotation for Tessellations. + ''' + if 'MIRROR' in [m.type for m in ob.modifiers]: + + _ob = ob.copy() + _ob.name = _ob.name + "_mirror" + bpy.context.collection.objects.link(_ob) + # Store modifiers + mods = list(_ob.modifiers) + # Store visibility setting + mods_vis = [m.show_viewport for m in _ob.modifiers] + # Turn modifiers off + for m in _ob.modifiers: + m.show_viewport = False + while True: + if len(mods) == 0: break + remove_mods = [] + + for m, vis in zip(mods, mods_vis): + m.show_viewport = vis + remove_mods.append(m) + if m.type == 'MIRROR' and vis: + n_axis = m.use_axis[0] + m.use_axis[1] + m.use_axis[2] + fraction = 2**n_axis + me = simple_to_mesh(_ob, depsgraph) + bm = bmesh.new() + bm.from_mesh(me) + bm.faces.ensure_lookup_table() + n_faces = len(bm.faces) + if n_axis > 0: + bm.faces.ensure_lookup_table() + rotate_faces = bm.faces + rot_index = [] + if n_axis == 1: fraction_val = [0,1] + elif n_axis == 2: fraction_val = [0,1,1,0] + elif n_axis == 3: fraction_val = [0,1,1,0,1,0,0,1] + for i in fraction_val: + for j in range(n_faces//fraction): + rot_index.append(i) + for face, shift in zip(rotate_faces, rot_index): + if shift == 0: continue + vs = face.verts[:] + vs2 = vs[-shift:]+vs[:-shift] + material_index = face.material_index + bm.faces.remove(face) + f2 = bm.faces.new(vs2) + f2.select = True + f2.material_index = material_index + bm.normal_update() + bm.to_mesh(me) + bm.free() + for rm in remove_mods: + _ob.modifiers.remove(rm) + _ob.data = me + mods = mods[1:] + mods_vis = mods_vis[1:] + remove_mods = [] + break + if m == mods[-1]: + mods = [] + me = simple_to_mesh(_ob, depsgraph) + _ob.data = me + _ob.modifiers.clear() + else: + me = simple_to_mesh(ob, depsgraph) + return me + def simple_to_mesh(ob, depsgraph=None): ''' Convert object to mesh applying Modifiers and Shape Keys @@ -263,6 +364,7 @@ def simple_to_mesh(ob, depsgraph=None): dg = depsgraph ob_eval = ob.evaluated_get(dg) me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg) + #me.calc_normals() return me def _join_objects(context, objects, link_to_scene=True, make_active=True): @@ -320,11 +422,10 @@ def join_objects(context, objects): return new_ob def join_objects(objects): - override = bpy.context.copy() new_ob = objects[0] - override['active_object'] = new_ob - override['selected_editable_objects'] = objects - bpy.ops.object.join(override) + override = {'active_object': new_ob, 'selected_editable_objects': objects} + with bpy.context.temp_override(**override): + bpy.ops.object.join() return new_ob def repeat_mesh(me, n): @@ -345,9 +446,8 @@ def array_mesh(ob, n): arr = ob.modifiers.new('Repeat','ARRAY') arr.relative_offset_displace[0] = 0 arr.count = n - # with bpy.context.temp_override(active_object=ob): - # bpy.ops.object.modifier_apply(modifier='Repeat') - # me = ob.data + #bpy.ops.object.modifier_apply({'active_object':ob},modifier='Repeat') + #me = ob.data ob.modifiers.update() dg = bpy.context.evaluated_depsgraph_get() @@ -366,7 +466,8 @@ def array_mesh_object(ob, n): override = bpy.context.copy() override['active_object'] = ob override = {'active_object': ob} - bpy.ops.object.modifier_apply(override, modifier=arr.name) + with bpy.context.temp_override(**override): + bpy.ops.object.modifier_apply(modifier=arr.name) return ob @@ -389,7 +490,7 @@ def get_mesh_before_subs(ob): hide_mods = [] mods_visibility = [] for m in hide_mods: m.show_viewport = False - me = simple_to_mesh(ob) + me = simple_to_mesh_mirror(ob) for m, vis in zip(hide_mods,mods_visibility): m.show_viewport = vis return me, subs @@ -542,9 +643,6 @@ def get_patches____(me_low, me_high, sides, subs, bool_selection, bool_material_ # fill inners patches[:,1:-1,1:-1] = inners[None,:,:] + ips[:,None,None] - #end_time = time.time() - #print('Tissue: Got Patches in {:.4f} sec'.format(end_time-start_time)) - return patches, mask def tessellate_prepare_component(ob1, props): @@ -673,10 +771,11 @@ def tessellate_prepare_component(ob1, props): cut_edges = [g for g in bisect['geom_cut'] if type(g)==bmesh.types.BMEdge] cut_verts = [g for g in bisect['geom_cut'] if type(g)==bmesh.types.BMVert] - if bound!='CLIP': + if True or bound!='CLIP': for e in cut_edges: seam = True # Prevent glitches + ''' for e1 in original_edges: match_00 = (e.verts[0].co-e1.verts[0].co).length < thres match_11 = (e.verts[1].co-e1.verts[1].co).length < thres @@ -685,6 +784,7 @@ def tessellate_prepare_component(ob1, props): if (match_00 and match_11) or (match_01 and match_10): seam = False break + ''' e.seam = seam if bound == 'CYCLIC': @@ -912,6 +1012,26 @@ def get_edges_id_numpy(mesh): edges = np.concatenate((edges,indexes), axis=1) return edges +def get_edges_numpy_ex(mesh): + ''' + Create a numpy array with the edges of a given mesh, or all the possible + between the vertices of a same face + ''' + edges_verts = get_edges_numpy(mesh) + polygons_diag = [] + for f in mesh.polygons: + sides = len(f.vertices) + if sides < 4: continue + for i in range(sides-2): + v0 = f.vertices[i] + for j in range(i+2, sides-1 if i == 0 else sides): + v1 = f.vertices[j] + polygons_diag.append((v0,v1)) + if len(polygons_diag) == 0: + return edges_verts + polygons_diag = np.array(polygons_diag,dtype=np.int32) + return np.concatenate((edges_verts, polygons_diag), axis=0) + def get_polygons_select_numpy(mesh): n_polys = len(mesh.polygons) selections = [0]*n_polys*2 @@ -919,13 +1039,16 @@ def get_polygons_select_numpy(mesh): selections = np.array(selections) return selections -def get_attribute_numpy(elements_list, attribute='select', mult=1): +def get_attribute_numpy(elements_list, attribute='select', mult=1, size=None): ''' Generate a numpy array getting attribute from a list of element using the foreach_get() function. ''' - n_elements = len(elements_list) - values = [0]*n_elements*mult + if size: + n_elements = size + else: + n_elements = len(elements_list) + values = np.zeros(int(n_elements*mult)) elements_list.foreach_get(attribute, values) values = np.array(values) if mult > 1: values = values.reshape((n_elements,mult)) @@ -1006,6 +1129,73 @@ def find_curves(edges, n_verts): curves.append(curve) return curves +def find_curves_attribute(edges, n_verts, attribute): + # dictionary with a list for every point + verts_dict = {key:[] for key in range(n_verts)} + # get neighbors for every point + for e in edges: + verts_dict[e[0]].append(e[1]) + verts_dict[e[1]].append(e[0]) + curves = [] + ordered_attr = [] + while True: + if len(verts_dict) == 0: break + # next starting point + v = list(verts_dict.keys())[0] + # neighbors + v01 = verts_dict[v] + if len(v01) == 0: + verts_dict.pop(v) + continue + curve = [] + attr = [] + if len(v01) > 1: + curve.append(v01[1]) # add neighbors + attr.append(attribute[v01[1]]) # add neighbors + curve.append(v) # add starting point + attr.append(attribute[v]) + curve.append(v01[0]) # add neighbors + attr.append(attribute[v01[0]]) + verts_dict.pop(v) + # start building curve + while True: + #last_point = curve[-1] + #if last_point not in verts_dict: break + + # try to change direction if needed + if curve[-1] in verts_dict: pass + elif curve[0] in verts_dict: + curve.reverse() + attr.reverse() + else: break + + # neighbors points + last_point = curve[-1] + v01 = verts_dict[last_point] + + # curve end + if len(v01) == 1: + verts_dict.pop(last_point) + if curve[0] in verts_dict: continue + else: break + + # chose next point + new_point = None + if v01[0] == curve[-2]: new_point = v01[1] + elif v01[1] == curve[-2]: new_point = v01[0] + #else: break + + #if new_point != curve[1]: + curve.append(new_point) + ordered_attr.append(attr) + verts_dict.pop(last_point) + if curve[0] == curve[-1]: + verts_dict.pop(new_point) + break + if(len(curve)>0): + curves.append(curve) + return curves, ordered_attr + def curve_from_points(points, name='Curve'): curve = bpy.data.curves.new(name,'CURVE') for c in points: @@ -1015,8 +1205,54 @@ def curve_from_points(points, name='Curve'): ob_curve = bpy.data.objects.new(name,curve) return ob_curve -def curve_from_pydata(points, radii, indexes, name='Curve', skip_open=False, merge_distance=1, set_active=True, only_data=False): - curve = bpy.data.curves.new(name,'CURVE') +def curve_from_pydata(points, radii, indexes, name='Curve', skip_open=False, merge_distance=1, set_active=True, only_data=False, curve=None, spline_type='POLY'): + if not curve: + curve = bpy.data.curves.new(name,'CURVE') + curve.dimensions = '3D' + use_rad = True + for c in indexes: + bool_cyclic = c[0] == c[-1] + if bool_cyclic: c.pop(-1) + # cleanup + pts = np.array([points[i] for i in c]) + try: + rad = np.array([radii[i] for i in c]) + except: + use_rad = False + rad = 1 + if merge_distance > 0: + pts1 = np.roll(pts,1,axis=0) + dist = np.linalg.norm(np.array(pts1-pts, dtype=np.float64), axis=1) + count = 0 + n = len(dist) + mask = np.ones(n).astype('bool') + for i in range(n): + count += dist[i] + if count > merge_distance: count = 0 + else: mask[i] = False + pts = pts[mask] + if use_rad: rad = rad[mask] + + if skip_open and not bool_cyclic: continue + s = curve.splines.new(spline_type) + n_pts = len(pts) + s.points.add(n_pts-1) + w = np.ones(n_pts).reshape((n_pts,1)) + co = np.concatenate((pts,w),axis=1).reshape((n_pts*4)) + s.points.foreach_set('co',co) + if use_rad: s.points.foreach_set('radius',rad) + s.use_cyclic_u = bool_cyclic + if only_data: + return curve + else: + ob_curve = bpy.data.objects.new(name,curve) + bpy.context.collection.objects.link(ob_curve) + if set_active: + bpy.context.view_layer.objects.active = ob_curve + return ob_curve + +def update_curve_from_pydata_simple(curve, points, radii, indexes, skip_open=False, merge_distance=1, set_active=True, only_data=False, spline_type='POLY'): + curve.splines.clear() curve.dimensions = '3D' use_rad = True for c in indexes: @@ -1043,7 +1279,7 @@ def curve_from_pydata(points, radii, indexes, name='Curve', skip_open=False, mer if use_rad: rad = rad[mask] if skip_open and not bool_cyclic: continue - s = curve.splines.new('POLY') + s = curve.splines.new(spline_type) n_pts = len(pts) s.points.add(n_pts-1) w = np.ones(n_pts).reshape((n_pts,1)) @@ -1091,13 +1327,14 @@ def update_curve_from_pydata(curve, points, normals, radii, indexes, merge_dista #if skip_open and not bool_cyclic: continue n_pts = len(pts) series = np.arange(n_pts) - patt1 = series + (series-series%pattern[1])/pattern[1]*pattern[0]+pattern[0] - patt1 = patt1[patt1 dist: + weight[v1.index] = dist + neigh[v1] = 0 + if len(neigh) == 0: return weight + else: return fill_neighbors_attribute(neigh.keys(), weight, attribute) + # ------------------------------------------------------------------ # MODIFIERS # ------------------------------------------------------------------ @@ -1434,7 +1689,7 @@ def mod_preserve_shape(mod): def recurLayerCollection(layerColl, collName): ''' - Recursively transverse layer_collection for a particular name. + Recursivly transverse layer_collection for a particular name. ''' found = None if (layerColl.name == collName): @@ -1456,3 +1711,15 @@ def auto_layer_collection(): lc = recurLayerCollection(layer_collection, c.name) if not c.hide_viewport and not lc.hide_viewport: bpy.context.view_layer.active_layer_collection = lc + +def np_remap_image_values(img, channel=0, min=0, max=1, invert=False): + nx = img.size[1] + ny = img.size[0] + px = np.float32(np.zeros(nx*ny*4)) + img.pixels.foreach_get(px) + px = np.array(px).reshape((-1,4)) + values = px[:,channel] + values = values.reshape((nx,ny)) + if invert: + values = 1-values + return min + values*(max-min) diff --git a/mesh_tissue/utils_pip.py b/mesh_tissue/utils_pip.py index 5ae981c58..04503ce90 100644 --- a/mesh_tissue/utils_pip.py +++ b/mesh_tissue/utils_pip.py @@ -1,7 +1,25 @@ -# SPDX-FileCopyrightText: 2022 Blender Foundation -# # SPDX-License-Identifier: GPL-2.0-or-later +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +# + # ---------------------------------------------------------- # Author: Stephen Leger (s-leger) # @@ -26,7 +44,7 @@ class Pip: import sys site_package = site.getusersitepackages() if not os.path.exists(site_package): - site_package = bpy.utils.user_resource('SCRIPTS', "site_package", create=True) + site_package = bpy.utils.user_resource('SCRIPTS', path="site_package", create=True) site.addsitedir(site_package) if site_package not in sys.path: sys.path.append(site_package) diff --git a/mesh_tissue/uv_to_mesh.py b/mesh_tissue/uv_to_mesh.py index a44d9da80..024bb93df 100644 --- a/mesh_tissue/uv_to_mesh.py +++ b/mesh_tissue/uv_to_mesh.py @@ -1,7 +1,23 @@ -# SPDX-FileCopyrightText: 2017-2022 Blender Foundation -# # SPDX-License-Identifier: GPL-2.0-or-later +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + # --------------------------------- UV to MESH ------------------------------- # # -------------------------------- version 0.1.1 ----------------------------- # # # diff --git a/mesh_tissue/weight_reaction_diffusion.py b/mesh_tissue/weight_reaction_diffusion.py new file mode 100644 index 000000000..51a96ed71 --- /dev/null +++ b/mesh_tissue/weight_reaction_diffusion.py @@ -0,0 +1,1180 @@ +# SPDX-License-Identifier: GPL-2.0-or-later + +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +#-------------------------- COLORS / GROUPS EXCHANGER -------------------------# +# # +# Vertex Color to Vertex Group allow you to convert colors channles to weight # +# maps. # +# The main purpose is to use vertex colors to store information when importing # +# files from other softwares. The script works with the active vertex color # +# slot. # +# For use the command "Vertex Clors to Vertex Groups" use the search bar # +# (space bar). # +# # +# (c) Alessandro Zomparelli # +# (2017) # +# # +# http://www.co-de-it.com/ # +# # +################################################################################ + +import bpy, bmesh, os +import numpy as np +import math, timeit, time +from math import pi +from statistics import mean, stdev +from mathutils import Vector +from mathutils.kdtree import KDTree +from numpy import * +try: from .numba_functions import numba_reaction_diffusion, numba_reaction_diffusion_anisotropic, integrate_field +except: pass +try: import numexpr as ne +except: pass + +# Reaction-Diffusion cache +from pathlib import Path +import random as rnd +import string + +from bpy.types import ( + Operator, + Panel, + PropertyGroup, + ) + +from bpy.props import ( + BoolProperty, + EnumProperty, + FloatProperty, + IntProperty, + StringProperty, + FloatVectorProperty, + IntVectorProperty, + PointerProperty +) + +from .utils import * + +def force_geometry_data_update(self, context): + ob = context.object + props = ob.reaction_diffusion_settings + if props.input_mode == 'STATIC': + props.update_geometry_data = True + +def reaction_diffusion_add_handler(self, context): + # remove existing handlers + reaction_diffusion_remove_handler(self, context) + # add new handler + bpy.app.handlers.frame_change_post.append(reaction_diffusion_scene) + +def reaction_diffusion_remove_handler(self, context): + # remove existing handlers + old_handlers = [] + for h in bpy.app.handlers.frame_change_post: + if "reaction_diffusion" in str(h): + old_handlers.append(h) + for h in old_handlers: bpy.app.handlers.frame_change_post.remove(h) + +class start_reaction_diffusion(Operator): + bl_idname = "object.start_reaction_diffusion" + bl_label = "Start Reaction Diffusion" + bl_description = ("Run a Reaction-Diffusion based on existing Vertex Groups: A and B") + bl_options = {'REGISTER', 'UNDO'} + + run : BoolProperty( + name="Run Reaction-Diffusion", default=True, description="Compute a new iteration on frame changes") + + time_steps : IntProperty( + name="Steps", default=10, min=0, soft_max=50, + description="Number of Steps") + + dt : FloatProperty( + name="dt", default=0.5, min=0, soft_max=1, + description="Time Step") + + diff_a : FloatProperty( + name="Diff A", default=0.18, min=0, soft_max=2, + description="Diffusion A") + + diff_b : FloatProperty( + name="Diff B", default=0.09, min=0, soft_max=2, + description="Diffusion B") + + f : FloatProperty( + name="f", default=0.055, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4, + description="Feed Rate") + + k : FloatProperty( + name="k", default=0.062, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4, + description="Kill Rate") + + @classmethod + def poll(cls, context): + return context.object.type == 'MESH' and context.mode != 'EDIT_MESH' + + def execute(self, context): + reaction_diffusion_add_handler(self, context) + set_animatable_fix_handler(self, context) + + ob = context.object + + ob.reaction_diffusion_settings.run = self.run + ob.reaction_diffusion_settings.dt = self.dt + ob.reaction_diffusion_settings.time_steps = self.time_steps + ob.reaction_diffusion_settings.f = self.f + ob.reaction_diffusion_settings.k = self.k + ob.reaction_diffusion_settings.diff_a = self.diff_a + ob.reaction_diffusion_settings.diff_b = self.diff_b + + # check vertex group A + try: + vg = ob.vertex_groups['A'] + except: + ob.vertex_groups.new(name='A') + # check vertex group B + try: + vg = ob.vertex_groups['B'] + except: + ob.vertex_groups.new(name='B') + + for v in ob.data.vertices: + ob.vertex_groups['A'].add([v.index], 1, 'REPLACE') + ob.vertex_groups['B'].add([v.index], 0, 'REPLACE') + + ob.vertex_groups.update() + ob.data.update() + bpy.ops.object.mode_set(mode='WEIGHT_PAINT') + + return {'FINISHED'} + +class reset_reaction_diffusion_weight(Operator): + bl_idname = "object.reset_reaction_diffusion_weight" + bl_label = "Reset Reaction Diffusion Weight" + bl_description = ("Set A and B weight to default values") + bl_options = {'REGISTER', 'UNDO'} + + @classmethod + def poll(cls, context): + return context.object.type == 'MESH' and context.mode != 'EDIT_MESH' + + def execute(self, context): + reaction_diffusion_add_handler(self, context) + set_animatable_fix_handler(self, context) + + ob = context.object + + # check vertex group A + try: + vg = ob.vertex_groups['A'] + except: + ob.vertex_groups.new(name='A') + # check vertex group B + try: + vg = ob.vertex_groups['B'] + except: + ob.vertex_groups.new(name='B') + + for v in ob.data.vertices: + ob.vertex_groups['A'].add([v.index], 1, 'REPLACE') + ob.vertex_groups['B'].add([v.index], 0, 'REPLACE') + + ob.vertex_groups.update() + ob.data.update() + bpy.ops.object.mode_set(mode='WEIGHT_PAINT') + + return {'FINISHED'} + +class reaction_diffusion_prop(PropertyGroup): + run : BoolProperty(default=False, update = reaction_diffusion_add_handler, + description='Compute a new iteration on frame changes. Currently is not working during Render Animation') + + time_steps : IntProperty( + name="Steps", default=10, min=0, soft_max=50, + description="Number of Steps" + ) + + dt : FloatProperty( + name="dt", default=0.5, min=0, soft_max=1, + description="Time Step" + ) + + diff_a : FloatProperty( + name="Diff A", default=0.1, min=0, soft_max=2, precision=3, + description="Diffusion A" + ) + + diff_b : FloatProperty( + name="Diff B", default=0.05, min=0, soft_max=2, precision=3, + description="Diffusion B" + ) + + f : FloatProperty( + name="f", default=0.055, soft_min=0.01, soft_max=0.06, precision=4, + step=0.05, description="Feed Rate" + ) + + k : FloatProperty( + name="k", default=0.062, soft_min=0.035, soft_max=0.065, precision=4, + step=0.05, description="Kill Rate" + ) + + diff_mult : FloatProperty( + name="Scale", default=1, min=0, soft_max=1, max=10, precision=2, + description="Multiplier for the diffusion of both substances" + ) + + vertex_group_diff_a : StringProperty( + name="Diff A", default='', + description="Vertex Group used for A diffusion" + ) + + vertex_group_diff_b : StringProperty( + name="Diff B", default='', + description="Vertex Group used for B diffusion" + ) + + vertex_group_scale : StringProperty( + name="Scale", default='', + description="Vertex Group used for Scale value" + ) + + vertex_group_f : StringProperty( + name="f", default='', + description="Vertex Group used for Feed value (f)" + ) + + vertex_group_k : StringProperty( + name="k", default='', + description="Vertex Group used for Kill value (k)" + ) + + vertex_group_brush : StringProperty( + name="Brush", default='', + description="Vertex Group used for adding/removing B" + ) + + invert_vertex_group_diff_a : BoolProperty(default=False, + description='Inverte the value of the Vertex Group Diff A' + ) + + invert_vertex_group_diff_b : BoolProperty(default=False, + description='Inverte the value of the Vertex Group Diff B' + ) + + invert_vertex_group_scale : BoolProperty(default=False, + description='Inverte the value of the Vertex Group Scale' + ) + + invert_vertex_group_f : BoolProperty(default=False, + description='Inverte the value of the Vertex Group f' + ) + + invert_vertex_group_k : BoolProperty(default=False, + description='Inverte the value of the Vertex Group k' + ) + + min_diff_a : FloatProperty( + name="Min Diff A", default=0.1, min=0, soft_max=2, precision=3, + description="Min Diff A" + ) + + max_diff_a : FloatProperty( + name="Max Diff A", default=0.1, min=0, soft_max=2, precision=3, + description="Max Diff A" + ) + + min_diff_b : FloatProperty( + name="Min Diff B", default=0.1, min=0, soft_max=2, precision=3, + description="Min Diff B" + ) + + max_diff_b : FloatProperty( + name="Max Diff B", default=0.1, min=0, soft_max=2, precision=3, + description="Max Diff B" + ) + + min_scale : FloatProperty( + name="Scale", default=0.35, min=0, soft_max=1, max=10, precision=2, + description="Min Scale Value" + ) + + max_scale : FloatProperty( + name="Scale", default=1, min=0, soft_max=1, max=10, precision=2, + description="Max Scale value" + ) + + min_f : FloatProperty( + name="Min f", default=0.02, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4, step=0.05, + description="Min Feed Rate" + ) + + max_f : FloatProperty( + name="Max f", default=0.055, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4, step=0.05, + description="Max Feed Rate" + ) + + min_k : FloatProperty( + name="Min k", default=0.035, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4, step=0.05, + description="Min Kill Rate" + ) + + max_k : FloatProperty( + name="Max k", default=0.062, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4, step=0.05, + description="Max Kill Rate" + ) + + brush_mult : FloatProperty( + name="Mult", default=0.5, min=-1, max=1, precision=3, step=0.05, + description="Multiplier for brush value" + ) + + bool_mod : BoolProperty( + name="Use Modifiers", default=False, + description="Read modifiers affect the vertex groups or attributes" + ) + + bool_cache : BoolProperty( + name="Use Cache", default=False, + description="Read modifiers affect the vertex groups" + ) + + cache_frame_start : IntProperty( + name="Start", default=1, + description="Frame on which the simulation starts" + ) + + cache_frame_end : IntProperty( + name="End", default=250, + description="Frame on which the simulation ends" + ) + + cache_dir : StringProperty( + name="Cache directory", default="", subtype='FILE_PATH', + description = 'Directory that contains Reaction-Diffusion cache files' + ) + + reload_at_start : BoolProperty( + name="Reload at Start", default=True, + description="Values from A and B are loaded from Vertex Groups or Modifiers after the first frame" + ) + + update_geometry_data : BoolProperty( + name="Update Geometry Data", default=True, + description="Update geometry data and vector field data at the next frame" + ) + + update_baked_geometry : BoolProperty( + name="Update Baked Geometry", default=False, + description="Force to update geometry data on the next iteration" + ) + + vector_field_mode : EnumProperty( + items=( + ('NONE', "None", "Isotropic Reaction-Diffusion"), + ('VECTOR', "Vector", "Uniform vector"), + ('OBJECT', "Object", "Orient the field with a target object's Z"), + ('GRADIENT', "Gradient", "Gradient vertex group"), + ('XYZ', "x, y, z", "Vector field defined by vertex groups 'x', 'y' and 'z'"), + ('VECTOR_ATTRIBUTE', "Vector Field", "'RD_vector_field' attribute (Vertex > Vector)") + ), + default='NONE', + name="Vector Field controlling the direction of the Reaction-Diffusion", + update = force_geometry_data_update + ) + + anisotropy : FloatProperty( + name="Anisotropy", default=0.5, min=0, max=1, precision=2, + description="Influence of the Vector Field" + ) + + vector : FloatVectorProperty( + name='Vector', description='Constant Vector', default=(0.0, 0.0, 1.0), + update = force_geometry_data_update + ) + + perp_vector_field : BoolProperty(default=False, + description='Use the perpendicular direction', + update = force_geometry_data_update + ) + + vector_field_object : PointerProperty( + type=bpy.types.Object, + name="", + description="Target Object", + update = force_geometry_data_update + ) + + vertex_group_gradient : StringProperty( + name="Gradient", default='', + description="Vertex Group for the gradient vector field", + update = force_geometry_data_update + ) + + input_mode : EnumProperty( + items=( + ('STATIC', "Static input (faster)", "Information about geometry and input values are loaded once in the first frame and then stored as attributes. This includes also the effects of modifiers on vertex groups or attributes. Geometry data and Vector Field data are stored instead in a newly mesh."), + ('INTERACTIVE', "Interactive (slower)", "Information about geometry and input values are updated dynamically. This includes also the effects of modifiers on vertex groups or attributes.") + ), + default='INTERACTIVE', + name="Input Mode", + update = force_geometry_data_update + ) + + input_data : EnumProperty( + items=( + ('WEIGHT', "Vertex Groups (default)", "The fields A and B are loaded from vertex groups. If 'Input Mode' is 'Static', then the Vertex Groups are loaded only for the first frame."), + ('ATTRIBUTES', "Attributes (faster)", "The fields A and B are loaded from the attributes 'RD_A' and 'RD_B'. If 'Input Mode' is 'Static', then this is the automatic mode for every frame except the first one.") + ), + default='WEIGHT', + name="Input Data" + ) + + output_data : EnumProperty( + items=( + ('WEIGHT', "Vertex Groups (default)", "The fields A and B are saved as Vertex Group at evry frame."), + ('ATTRIBUTES', "Attributes (faster)", "The fields A and B are saved as attributes 'RD_A' and 'RD_B' at every frame. If 'Input Mode' is 'Static', then this happens automatically.") + ), + default='WEIGHT', + name="Output Data" + ) + + cache_mesh : StringProperty( + name="Cache Mesh", default='', + description="Mesh used to store data for 'Static' mode." + ) + +class bake_reaction_diffusion(Operator): + bl_idname = "object.bake_reaction_diffusion" + bl_label = "Bake Data" + bl_description = ("Bake the Reaction-Diffusion to the cache directory") + bl_options = {'REGISTER', 'UNDO'} + + @classmethod + def poll(cls, context): + return context.object.type == 'MESH' and context.mode != 'EDIT_MESH' + + def execute(self, context): + ob = context.object + props = ob.reaction_diffusion_settings + frames = range(props.cache_frame_start, props.cache_frame_end) if props.input_mode == 'INTERACTIVE' else [props.cache_frame_start] + props.run = False if props.input_mode == 'STATIC' else True + for frame in frames: + context.scene.frame_current = frame + message = reaction_diffusion_def(ob, bake=True) + if type(message) is str: + self.report({'ERROR'}, message) + props.bool_cache = True + props.run = True + context.scene.frame_current = props.cache_frame_start + return {'FINISHED'} + +class reaction_diffusion_free_data(Operator): + bl_idname = "object.reaction_diffusion_free_data" + bl_label = "Free Data" + bl_description = ("Free Reaction-Diffusion data") + bl_options = {'REGISTER', 'UNDO'} + + @classmethod + def poll(cls, context): + return context.object.type == 'MESH' + + def execute(self, context): + ob = context.object + props = ob.reaction_diffusion_settings + props.bool_cache = False + + folder = Path(props.cache_dir) + for i in range(props.cache_frame_start, props.cache_frame_end): + data_a = folder / "a_{:04d}".format(i) + if os.path.exists(data_a): + os.remove(data_a) + data_a = folder / "b_{:04d}".format(i) + if os.path.exists(data_a): + os.remove(data_a) + return {'FINISHED'} + +from bpy.app.handlers import persistent + +def reaction_diffusion_scene(scene, bake=False): + tissue_time(None,'{:7d} Tissue: Reaction-Diffusion...'.format(scene.frame_current), levels=0) + for ob in scene.objects: + if ob.reaction_diffusion_settings.run: + message = reaction_diffusion_def(ob) + if type(message) is str: + print(message) + +def load_attribute_parameter(mesh, name, default, domain, data_type): + if name in mesh.attributes: + att = mesh.attributes[name] + if att.domain == domain and att.data_type == data_type: + values = np.zeros((len(att.data))) + att.data.foreach_get('value', values) + return values + return default + +def store_attribute_parameter(mesh, name, values, domain, data_type): + if name not in mesh.attributes: + mesh.attributes.new(name, data_type, domain) + att = mesh.attributes[name] + if att.domain == domain and att.data_type == data_type and len(values) == len(att.data): + att.data.foreach_set('value', values) + +def reaction_diffusion_def(ob, bake=False): + scene = bpy.context.scene + start = time.time() + beginning = time.time() + if type(ob) == bpy.types.Scene: return None + props = ob.reaction_diffusion_settings + + if bake or props.bool_cache: + if props.cache_dir == '': + letters = string.ascii_letters + random_name = ''.join(rnd.choice(letters) for i in range(6)) + if bpy.context.blend_data.filepath == '': + folder = Path(bpy.context.preferences.filepaths.temporary_directory) + folder = folder / 'reaction_diffusion_cache' / random_name + else: + folder = '//' + Path(bpy.context.blend_data.filepath).stem + folder = Path(bpy.path.abspath(folder)) / 'reaction_diffusion_cache' / random_name + folder.mkdir(parents=True, exist_ok=True) + props.cache_dir = str(folder) + else: + folder = Path(props.cache_dir) + + me = ob.data + bm = None + n_verts = len(me.vertices) + a = np.zeros(n_verts) + b = np.zeros(n_verts) + + if bake and props.input_mode == 'INTERACTIVE': + tissue_time(None,'{:7d} Tissue: Reaction-Diffusion...'.format(scene.frame_current), levels=0) + tissue_time(None,"Running on {}...".format(ob.name),levels=0) + is_static = props.input_mode == 'STATIC' + if props.reload_at_start and scene.frame_current == props.cache_frame_start: + is_static = False + use_modifiers = props.bool_mod and not is_static + + if props.bool_cache: + try: + file_name = folder / "a_{:04d}".format(scene.frame_current) + a = np.fromfile(file_name) + file_name = folder / "b_{:04d}".format(scene.frame_current) + b = np.fromfile(file_name) + except: + print(' Cannot read cache.') + return + else: + if use_modifiers: + me = rd_apply_modifiers(ob) + if type(me) is str: + return me + + dt = props.dt + time_steps = props.time_steps + f = props.f + k = props.k + diff_a = props.diff_a + diff_b = props.diff_b + scale = props.diff_mult + brush_mult = props.brush_mult + brush = 0 + + if is_static or props.input_data == 'ATTRIBUTES': + if not 'RD_A' in me.attributes: + me.attributes.new('RD_A', 'FLOAT', 'POINT') + if not 'RD_B' in me.attributes: + me.attributes.new('RD_B', 'FLOAT', 'POINT') + a = np.zeros((n_verts)) + b = np.zeros((n_verts)) + me.attributes['RD_A'].data.foreach_get('value', a) + me.attributes['RD_B'].data.foreach_get('value', b) + a = load_attribute_parameter(me, 'RD_A', np.zeros((n_verts)), 'POINT', 'FLOAT') + b = load_attribute_parameter(me, 'RD_B', np.zeros((n_verts)), 'POINT', 'FLOAT') + if not (props.input_data == 'WEIGHT' and not props.vertex_group_brush in ob.vertex_groups): + brush = load_attribute_parameter(me, 'RD_brush', 0, 'POINT', 'FLOAT') + if not (props.input_data == 'WEIGHT' and not props.vertex_group_diff_a in ob.vertex_groups): + diff_a = load_attribute_parameter(me, 'RD_diff_a', diff_a, 'POINT', 'FLOAT') + if not (props.input_data == 'WEIGHT' and not props.vertex_group_diff_b in ob.vertex_groups): + diff_b = load_attribute_parameter(me, 'RD_diff_b', diff_b, 'POINT', 'FLOAT') + if not (props.input_data == 'WEIGHT' and not props.vertex_group_scale in ob.vertex_groups): + scale = load_attribute_parameter(me, 'RD_scale', scale, 'POINT', 'FLOAT') + if not (props.input_data == 'WEIGHT' and not props.vertex_group_f in ob.vertex_groups): + f = load_attribute_parameter(me, 'RD_f', f, 'POINT', 'FLOAT') + if not (props.input_data == 'WEIGHT' and not props.vertex_group_k in ob.vertex_groups): + k = load_attribute_parameter(me, 'RD_k', k, 'POINT', 'FLOAT') + else: + if props.vertex_group_diff_a != '': + diff_a = np.zeros(n_verts) + if props.vertex_group_diff_b != '': + diff_b = np.zeros(n_verts) + if props.vertex_group_scale != '': + scale = np.zeros(n_verts) + if props.vertex_group_f != '': + f = np.zeros(n_verts) + if props.vertex_group_k != '': + k = np.zeros(n_verts) + if props.vertex_group_brush != '': + brush = np.zeros(n_verts) + else: brush = 0 + + bm = bmesh.new() # create an empty BMesh + bm.from_mesh(me) # fill it in from a Mesh + dvert_lay = bm.verts.layers.deform.active + + group_index_a = ob.vertex_groups["A"].index + group_index_b = ob.vertex_groups["B"].index + a = bmesh_get_weight_numpy(group_index_a, dvert_lay, bm.verts) + b = bmesh_get_weight_numpy(group_index_b, dvert_lay, bm.verts) + + if props.vertex_group_diff_a != '': + group_index = ob.vertex_groups[props.vertex_group_diff_a].index + diff_a = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts, normalized=True) + if props.invert_vertex_group_diff_a: + vg_bounds = (props.min_diff_a, props.max_diff_a) + else: + vg_bounds = (props.max_diff_a, props.min_diff_a) + diff_a = np.interp(diff_a, (0,1), vg_bounds) + if props.input_mode == 'STATIC': + store_attribute_parameter(me, 'RD_diff_a', diff_a, 'POINT', 'FLOAT') + + if props.vertex_group_diff_b != '': + group_index = ob.vertex_groups[props.vertex_group_diff_b].index + diff_b = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts, normalized=True) + if props.invert_vertex_group_diff_b: + vg_bounds = (props.max_diff_b, props.min_diff_b) + else: + vg_bounds = (props.min_diff_b, props.max_diff_b) + diff_b = np.interp(diff_b, (0,1), vg_bounds) + if props.input_mode == 'STATIC': + store_attribute_parameter(me, 'RD_diff_b', diff_b, 'POINT', 'FLOAT') + + if props.vertex_group_scale != '': + group_index = ob.vertex_groups[props.vertex_group_scale].index + scale = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts, normalized=True) + if props.invert_vertex_group_scale: + vg_bounds = (props.max_scale, props.min_scale) + else: + vg_bounds = (props.min_scale, props.max_scale) + scale = np.interp(scale, (0,1), vg_bounds) + if props.input_mode == 'STATIC': + store_attribute_parameter(me, 'RD_scale', scale, 'POINT', 'FLOAT') + + if props.vertex_group_f != '': + group_index = ob.vertex_groups[props.vertex_group_f].index + f = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts, normalized=True) + if props.invert_vertex_group_f: + vg_bounds = (props.max_f, props.min_f) + else: + vg_bounds = (props.min_f, props.max_f) + f = np.interp(f, (0,1), vg_bounds, ) + if props.input_mode == 'STATIC': + store_attribute_parameter(me, 'RD_f', f, 'POINT', 'FLOAT') + + if props.vertex_group_k != '': + group_index = ob.vertex_groups[props.vertex_group_k].index + k = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts, normalized=True) + if props.invert_vertex_group_k: + vg_bounds = (props.max_k, props.min_k) + else: + vg_bounds = (props.min_k, props.max_k) + k = np.interp(k, (0,1), vg_bounds) + if props.input_mode == 'STATIC': + store_attribute_parameter(me, 'RD_k', k, 'POINT', 'FLOAT') + + if props.vertex_group_brush != '': + group_index = ob.vertex_groups[props.vertex_group_brush].index + brush = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts, normalized=True) + brush *= brush_mult + if props.input_mode == 'STATIC': + store_attribute_parameter(me, 'RD_brush', brush, 'POINT', 'FLOAT') + + diff_a *= scale + diff_b *= scale + + edge_verts = None + field_mult = np.zeros((1)) + if is_static and props.cache_mesh in bpy.data.meshes and not props.update_geometry_data: + rd_mesh = bpy.data.meshes[props.cache_mesh] + edge_verts = get_edges_numpy_ex(rd_mesh) + n_edges = len(edge_verts) + if props.vector_field_mode != 'NONE' and 'RD_vector_field' in rd_mesh.attributes: + field_mult = load_attribute_parameter(rd_mesh, 'RD_vector_field', np.ones((n_edges)), 'EDGE', 'FLOAT') + else: + edge_verts = get_edges_numpy_ex(me) + n_edges = len(edge_verts) + if props.cache_mesh in bpy.data.meshes: + rd_mesh = bpy.data.meshes[props.cache_mesh] + rd_mesh.clear_geometry() + else: + rd_mesh = bpy.data.meshes.new('RD_' + me.name) + props.cache_mesh = rd_mesh.name + rd_mesh.from_pydata(get_vertices_numpy(me), edge_verts, []) + + is_vector_field = True + + if props.vector_field_mode != 'NONE': + if props.vector_field_mode == 'VECTOR': + vec = Vector(props.vector) + vector_field = [vec]*n_edges + + if props.vector_field_mode == 'OBJECT': + if props.vector_field_object: + mat = props.vector_field_object.matrix_world + else: + mat = ob.matrix_world + vec = Vector((mat[0][2],mat[1][2],mat[2][2])) + vector_field = [vec]*n_edges + + if props.vector_field_mode == 'XYZ': + vgk = ob.vertex_groups.keys() + if 'x' in vgk and 'y' in vgk and 'z' in vgk: + if not bm: + bm = bmesh.new() # create an empty BMesh + bm.from_mesh(me) # fill it in from a Mesh + dvert_lay = bm.verts.layers.deform.active + group_index = ob.vertex_groups["x"].index + field_x = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) + group_index = ob.vertex_groups["y"].index + field_y = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) + group_index = ob.vertex_groups["z"].index + field_z = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) + field_x = field_x*2-1 + field_y = field_y*2-1 + field_z = field_z*2-1 + vector_field = [] + for x,y,z in zip(field_x, field_y, field_z): + vector_field.append(Vector((x,y,z)).normalized()) + else: + is_vector_field = False + + if props.vector_field_mode == 'GRADIENT': + if props.vertex_group_gradient: + if props.vertex_group_gradient in ob.vertex_groups.keys(): + if not bm: + bm = bmesh.new() # create an empty BMesh + bm.from_mesh(me) # fill it in from a Mesh + dvert_lay = bm.verts.layers.deform.active + group_index = ob.vertex_groups[props.vertex_group_gradient].index + weight = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) + vector_field = [None]*n_verts + for i,v0 in enumerate(bm.verts): + vec = Vector((0,0,0)) + w0 = weight[v0.index] + for e in v0.link_edges: + v1 = e.other_vert(v0) + dw = weight[v1.index]-w0 + vec += (v1.co-v0.co)*dw + vector_field[i] = vec.normalized() + else: + is_vector_field = False + else: + is_vector_field = False + + if props.vector_field_mode == 'VECTOR_ATTRIBUTE': + if 'RD_vector_field' in me.attributes: + vectors_components = [0]*n_verts*3 + me.attributes['RD_vector_field'].data.foreach_get('vector', vectors_components) + vector_field = [None]*n_verts + for i in range(n_verts): + x = vectors_components[i*3] + y = vectors_components[i*3+1] + z = vectors_components[i*3+2] + vector_field[i] = Vector((x,y,z)).normalized() + else: + is_vector_field = False + + if is_vector_field: + if props.perp_vector_field: + for i, vert in enumerate(bm.verts): + vector_field[i] = vector_field[i].cross(vert.normal) + field_mult = [1]*n_edges + for i, pair in enumerate(edge_verts): + id0 = pair[0] + id1 = pair[1] + v0 = me.vertices[id0].co + v1 = me.vertices[id1].co + vec = (v1-v0).normalized() + mult0 = abs(vec.dot(vector_field[id0])) + mult1 = abs(vec.dot(vector_field[id1])) + field_mult[i] = (mult0 + mult1)/2 + field_mult = np.array(field_mult) + if props.cache_mesh in bpy.data.meshes and props.input_mode == 'STATIC': + rd_mesh = bpy.data.meshes[props.cache_mesh] + store_attribute_parameter(rd_mesh, 'RD_vector_field', field_mult, 'EDGE', 'FLOAT') + else: + is_vector_field = False + props.update_geometry_data = False + + edge_verts = edge_verts.reshape((-1)) + field_mult = field_mult*props.anisotropy + (1-props.anisotropy) + + tissue_time(start, "Preparation", levels=1) + start = time.time() + + frames = range(props.cache_frame_start, props.cache_frame_end+1) if bake and props.input_mode == 'STATIC' else [scene.frame_current] + for frame in frames: + if bake and props.input_mode == 'STATIC': + tissue_time(None,'{:7d} Tissue: Baking Reaction-Diffusion on {}...'.format(frame, ob.name), levels=0) + try: + _f = f if type(f) is np.ndarray else np.array((f,)) + _k = k if type(k) is np.ndarray else np.array((k,)) + _diff_a = diff_a if type(diff_a) is np.ndarray else np.array((diff_a,)) + _diff_a *= scale + _diff_b = diff_b if type(diff_b) is np.ndarray else np.array((diff_b,)) + _diff_b *= scale + _brush = brush if type(brush) is np.ndarray else np.array((brush,)) + if len(field_mult) == 1: + a, b = numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, _brush, _diff_a, _diff_b, _f, _k, dt, time_steps) + else: + a, b = numba_reaction_diffusion_anisotropic(n_verts, n_edges, edge_verts, a, b, _brush, _diff_a, _diff_b, _f, _k, dt, time_steps, field_mult) + except: + print('Not using Numba! The simulation could be slow.') + arr = np.arange(n_edges) + id0 = edge_verts[arr*2] # first vertex indices for each edge + id1 = edge_verts[arr*2+1] # second vertex indices for each edge + if len(field_mult) == 1: mult = 1 + else: mult = field_mult[arr] # second vertex indices for each edge + _diff_a = diff_a*scale + _diff_b = diff_b*scale + for i in range(time_steps): + b += brush + lap_a = np.zeros(n_verts) + lap_b = np.zeros(n_verts) + lap_a0 = (a[id1] - a[id0])*mult # laplacian increment for first vertex of each edge + lap_b0 = (b[id1] - b[id0])*mult # laplacian increment for first vertex of each edge + + np.add.at(lap_a, id0, lap_a0) + np.add.at(lap_b, id0, lap_b0) + np.add.at(lap_a, id1, -lap_a0) + np.add.at(lap_b, id1, -lap_b0) + ab2 = a*b**2 + a += eval("(_diff_a*lap_a - ab2 + f*(1-a))*dt") + b += eval("(_diff_b*lap_b + ab2 - (k+f)*b)*dt") + + a = nan_to_num(a) + b = nan_to_num(b) + tissue_time(start, "Simulation", levels=1) + start = time.time() + if bake: + if not(os.path.exists(folder)): + os.mkdir(folder) + file_name = folder / "a_{:04d}".format(frame) + a.tofile(file_name) + file_name = folder / "b_{:04d}".format(frame) + b.tofile(file_name) + if props.input_mode == 'STATIC': + tissue_time(start, "Baked", levels=1) + tissue_time(beginning, "Reaction-Diffusion on {}".format(ob.name), levels=0) + + start = time.time() + if props.output_data == 'ATTRIBUTES': + store_attribute_parameter(ob.data, 'RD_A', a, 'POINT', 'FLOAT') + store_attribute_parameter(ob.data, 'RD_B', b, 'POINT', 'FLOAT') + ob.data.update() + else: + if props.input_mode == 'STATIC': + store_attribute_parameter(ob.data, 'RD_A', a, 'POINT', 'FLOAT') + store_attribute_parameter(ob.data, 'RD_B', b, 'POINT', 'FLOAT') + ob.data.update() + if 'A' in ob.vertex_groups.keys(): + vg_a = ob.vertex_groups['A'] + else: + vg_a = ob.vertex_groups.new(name='A') + if 'B' in ob.vertex_groups.keys(): + vg_b = ob.vertex_groups['B'] + else: + vg_b = ob.vertex_groups.new(name='B') + if ob.mode == 'WEIGHT_PAINT': + # slower, but prevent crashes + for i in range(n_verts): + if vg_a: vg_a.add([i], a[i], 'REPLACE') + if vg_b: vg_b.add([i], b[i], 'REPLACE') + else: + if use_modifiers or props.bool_cache: + #bm.free() # release old bmesh + bm = bmesh.new() # create an empty BMesh + bm.from_mesh(ob.data) # fill it in from a Mesh + dvert_lay = bm.verts.layers.deform.active + # faster, but can cause crashes while painting weight + if vg_a: index_a = vg_a.index + if vg_b: index_b = vg_b.index + for i, v in enumerate(bm.verts): + dvert = v[dvert_lay] + if vg_a: dvert[index_a] = a[i] + if vg_b: dvert[index_b] = b[i] + bm.to_mesh(ob.data) + bm.free() + + for ps in ob.particle_systems: + if ps.vertex_group_density == 'B' or ps.vertex_group_density == 'A': + ps.invert_vertex_group_density = not ps.invert_vertex_group_density + ps.invert_vertex_group_density = not ps.invert_vertex_group_density + + if use_modifiers and not props.bool_cache: bpy.data.meshes.remove(me) + tissue_time(start, "Writing data", levels=1) + tissue_time(beginning, "Reaction-Diffusion on {}".format(ob.name), levels=0) + +class TISSUE_PT_reaction_diffusion(Panel): + bl_space_type = 'PROPERTIES' + bl_region_type = 'WINDOW' + bl_context = "data" + bl_label = "Tissue Reaction-Diffusion" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + return 'A' and 'B' in context.object.vertex_groups + + def draw(self, context): + reaction_diffusion_add_handler(self, context) + + ob = context.object + props = ob.reaction_diffusion_settings + layout = self.layout + col = layout.column(align=True) + row = col.row(align=True) + if not ("A" and "B" in ob.vertex_groups): + row.operator("object.start_reaction_diffusion", + icon="EXPERIMENTAL", + text="Reaction-Diffusion") + else: + row.operator("object.start_reaction_diffusion", + icon="EXPERIMENTAL", + text="Reset Reaction-Diffusion") + row.prop(props, "bool_mod", text="", icon='MODIFIER') + row.prop(props, "run", text="", icon='TIME') + col.separator() + col = layout.column(align=True) + row = col.row(align=True) + row.prop(props, "time_steps") + row.prop(props, "dt") + row.enabled = not props.bool_cache + col.separator() + row = col.row(align=True) + col1 = row.column(align=True) + col1.prop(props, "diff_a") + col1.enabled = props.vertex_group_diff_a == '' and not props.bool_cache + col1 = row.column(align=True) + col1.prop(props, "diff_b") + col1.enabled = props.vertex_group_diff_b == '' and not props.bool_cache + row = col.row(align=True) + row.prop(props, "diff_mult") + row.enabled = props.vertex_group_scale == '' and not props.bool_cache + row = col.row(align=True) + col1 = row.column(align=True) + col1.prop(props, "f") + col1.enabled = props.vertex_group_f == '' and not props.bool_cache + col1 = row.column(align=True) + col1.prop(props, "k") + col1.enabled = props.vertex_group_k == '' and not props.bool_cache + +class TISSUE_PT_reaction_diffusion_vector_field(Panel): + bl_space_type = 'PROPERTIES' + bl_region_type = 'WINDOW' + bl_context = "data" + bl_parent_id = "TISSUE_PT_reaction_diffusion" + bl_label = "Anisotropic" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + return 'A' and 'B' in context.object.vertex_groups + + def draw(self, context): + ob = context.object + props = ob.reaction_diffusion_settings + layout = self.layout + col = layout.column(align=True) + col.prop(props, "vector_field_mode", text="Mode") + if props.vector_field_mode == 'OBJECT': + col.prop_search(props, "vector_field_object", context.scene, "objects", text='Object') + if props.vector_field_mode == 'GRADIENT': + col.prop_search(props, 'vertex_group_gradient', ob, "vertex_groups") + if props.vector_field_mode == 'XYZ': + vgk = ob.vertex_groups.keys() + if 'x' not in vgk: + col.label(text="Vertex Group 'x' is missing", icon='ERROR') + if 'y' not in vgk: + col.label(text="Vertex Group 'y' is missing", icon='ERROR') + if 'z' not in vgk: + col.label(text="Vertex Group 'z' is missing", icon='ERROR') + if props.vector_field_mode == 'VECTOR_ATTRIBUTE': + vgk = ob.vertex_groups.keys() + if 'RD_vector_field' not in ob.data.attributes: + col.label(text="Vector Attribute 'RD_vector_field' is missing", icon='ERROR') + if props.vector_field_mode == 'VECTOR': + row = col.row() + row.prop(props, "vector") + if props.vector_field_mode != 'NONE': + col.separator() + row = col.row() + row.prop(props, 'perp_vector_field', text='Perpendicular') + row.prop(props, "anisotropy") + +class TISSUE_PT_reaction_diffusion_performance(Panel): + bl_space_type = 'PROPERTIES' + bl_region_type = 'WINDOW' + bl_context = "data" + bl_parent_id = "TISSUE_PT_reaction_diffusion" + bl_label = "Performance" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + return 'A' and 'B' in context.object.vertex_groups + + def draw(self, context): + ob = context.object + props = ob.reaction_diffusion_settings + layout = self.layout + col = layout.column(align=True) + row = col.row(align=True) + row.prop(props, "input_mode", text='Mode') + if props.input_mode == 'STATIC': + col.separator() + row = col.row(align=True) + row.prop(props, "reload_at_start", icon = 'SORTTIME') + row.prop(props, "update_geometry_data", icon ='MOD_DATA_TRANSFER') + col.separator() + col.prop(props, "input_data", text='Read from') + col.prop(props, "output_data", text='Write to') + col.separator() + +class TISSUE_PT_reaction_diffusion_weight(Panel): + bl_space_type = 'PROPERTIES' + bl_region_type = 'WINDOW' + bl_context = "data" + bl_parent_id = "TISSUE_PT_reaction_diffusion" + bl_label = "Variable Parameters" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + return 'A' and 'B' in context.object.vertex_groups + + def draw(self, context): + ob = context.object + props = ob.reaction_diffusion_settings + layout = self.layout + col = layout.column(align=True) + if props.input_data == 'WEIGHT': + insert_weight_parameter(col, ob, 'brush', text='Brush:') + insert_weight_parameter(col, ob, 'diff_a', text='Diff A:') + insert_weight_parameter(col, ob, 'diff_b', text='Diff B:') + insert_weight_parameter(col, ob, 'scale', text='Scale:') + insert_weight_parameter(col, ob, 'f', text='f:') + insert_weight_parameter(col, ob, 'k', text='k:') + else: + col.label(text='Using Attributes (Vertex > Float) if existing:') + insert_attribute_parameter(col, ob, 'RD_brush', text='Brush:') + insert_attribute_parameter(col, ob, 'RD_diff_a', text='Diff A:') + insert_attribute_parameter(col, ob, 'RD_diff_b', text='Diff B:') + insert_attribute_parameter(col, ob, 'RD_scale', text='Scale:') + insert_attribute_parameter(col, ob, 'RD_f', text='f:') + insert_attribute_parameter(col, ob, 'RD_k', text='k:') + if not props.bool_mod: + col.label(text="'Use Modifiers' is disabled.", icon='INFO') + col.enabled = not props.bool_cache + +class TISSUE_PT_reaction_diffusion_cache(Panel): + bl_space_type = 'PROPERTIES' + bl_region_type = 'WINDOW' + bl_context = "data" + bl_parent_id = "TISSUE_PT_reaction_diffusion" + bl_label = "Cache" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + return 'A' and 'B' in context.object.vertex_groups + + def draw(self, context): + ob = context.object + props = ob.reaction_diffusion_settings + layout = self.layout + col = layout.column(align=True) + col.label(text='Cache:') + col.prop(props, "cache_dir", text='') + col.separator() + row = col.row(align=True) + row.prop(props, "cache_frame_start") + row.prop(props, "cache_frame_end") + col.separator() + if props.bool_cache: + col.operator("object.reaction_diffusion_free_data") + else: + row = col.row(align=True) + row.operator("object.bake_reaction_diffusion") + file = bpy.context.blend_data.filepath + temp = bpy.context.preferences.filepaths.temporary_directory + if file == temp == props.cache_dir == '': + row.enabled = False + col.label(text="Cannot use cache", icon='ERROR') + col.label(text='please save the Blender or set a Cache directory') + +def insert_weight_parameter(col, ob, name, text=''): + props = ob.reaction_diffusion_settings + split = col.split(factor=0.25, align=True) + col2 = split.column(align=True) + col2.label(text=text) + col2 = split.column(align=True) + row2 = col2.row(align=True) + row2.prop_search(props, 'vertex_group_' + name, ob, "vertex_groups", text='') + if name != 'brush': + row2.prop(props, "invert_vertex_group_" + name, text="", toggle=True, icon='ARROW_LEFTRIGHT') + if 'vertex_group_' + name in props: + if props['vertex_group_' + name] != '': + if name == 'brush': + col2.prop(props, "brush_mult") + else: + row2 = col2.row(align=True) + row2.prop(props, "min_" + name, text="Min") + row2 = col2.row(align=True) + row2.prop(props, "max_" + name, text="Max") + col.separator() + +def insert_attribute_parameter(col, ob, name, text=''): + props = ob.reaction_diffusion_settings + if name in ob.data.attributes.keys(): + col.label(text = text + ' Attribute "' + name + '" found!', icon='KEYFRAME_HLT') + else: + col.label(text = text + ' Attribute "' + name + '" not found.', icon='KEYFRAME') + col.separator() + +def rd_apply_modifiers(ob): + # hide deforming modifiers + mod_visibility = [] + for m in ob.modifiers: + mod_visibility.append(m.show_viewport) + if not (mod_preserve_shape(m) or 'RD' in m.name): m.show_viewport = False + + # evaluated mesh + dg = bpy.context.evaluated_depsgraph_get() + ob_eval = ob.evaluated_get(dg) + me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg) + if len(me.vertices) != len(ob.data.vertices): + return "TISSUE: Modifiers used for Reaction-Diffusion cannot change the number of vertices." + + # set original visibility + for v, m in zip(mod_visibility, ob.modifiers): + m.show_viewport = v + ob.modifiers.update() + return me diff --git a/mesh_tissue/weight_tools.py b/mesh_tissue/weight_tools.py index 9e4854ba2..071c8feb9 100644 --- a/mesh_tissue/weight_tools.py +++ b/mesh_tissue/weight_tools.py @@ -1,13 +1,29 @@ -# SPDX-FileCopyrightText: 2022-2023 Blender Foundation -# # SPDX-License-Identifier: GPL-2.0-or-later +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + #-------------------------- COLORS / GROUPS EXCHANGER -------------------------# # # -# Vertex Color to Vertex Group allow you to convert colors channels to weight # +# Vertex Color to Vertex Group allow you to convert colors channles to weight # # maps. # # The main purpose is to use vertex colors to store information when importing # -# files from other software. The script works with the active vertex color # +# files from other softwares. The script works with the active vertex color # # slot. # # For use the command "Vertex Clors to Vertex Groups" use the search bar # # (space bar). # @@ -27,18 +43,9 @@ from statistics import mean, stdev from mathutils import Vector from mathutils.kdtree import KDTree from numpy import * -try: from .numba_functions import numba_reaction_diffusion, numba_reaction_diffusion_anisotropic, integrate_field -except: pass -#from .numba_functions import integrate_field -#from .numba_functions import numba_reaction_diffusion try: import numexpr as ne except: pass -# Reaction-Diffusion cache -from pathlib import Path -import random as rnd -import string - from bpy.types import ( Operator, Panel, @@ -52,200 +59,18 @@ from bpy.props import ( IntProperty, StringProperty, FloatVectorProperty, - IntVectorProperty + IntVectorProperty, + PointerProperty ) from .utils import * -def reaction_diffusion_add_handler(self, context): - # remove existing handlers - reaction_diffusion_remove_handler(self, context) - # add new handler - bpy.app.handlers.frame_change_post.append(reaction_diffusion_scene) - -def reaction_diffusion_remove_handler(self, context): - # remove existing handlers - old_handlers = [] - for h in bpy.app.handlers.frame_change_post: - if "reaction_diffusion" in str(h): - old_handlers.append(h) - for h in old_handlers: bpy.app.handlers.frame_change_post.remove(h) - class formula_prop(PropertyGroup): name : StringProperty() formula : StringProperty() float_var : FloatVectorProperty(name="", description="", default=(0, 0, 0, 0, 0), size=5) int_var : IntVectorProperty(name="", description="", default=(0, 0, 0, 0, 0), size=5) -class reaction_diffusion_prop(PropertyGroup): - run : BoolProperty(default=False, update = reaction_diffusion_add_handler, - description='Compute a new iteration on frame changes. Currently is not working during Render Animation') - - time_steps : IntProperty( - name="Steps", default=10, min=0, soft_max=50, - description="Number of Steps") - - dt : FloatProperty( - name="dt", default=1, min=0, soft_max=0.2, - description="Time Step") - - diff_a : FloatProperty( - name="Diff A", default=0.1, min=0, soft_max=2, precision=3, - description="Diffusion A") - - diff_b : FloatProperty( - name="Diff B", default=0.05, min=0, soft_max=2, precision=3, - description="Diffusion B") - - f : FloatProperty( - name="f", default=0.055, soft_min=0.01, soft_max=0.06, precision=4, step=0.05, - description="Feed Rate") - - k : FloatProperty( - name="k", default=0.062, soft_min=0.035, soft_max=0.065, precision=4, step=0.05, - description="Kill Rate") - - diff_mult : FloatProperty( - name="Scale", default=1, min=0, soft_max=1, max=10, precision=2, - description="Multiplier for the diffusion of both substances") - - vertex_group_diff_a : StringProperty( - name="Diff A", default='', - description="Vertex Group used for A diffusion") - - vertex_group_diff_b : StringProperty( - name="Diff B", default='', - description="Vertex Group used for B diffusion") - - vertex_group_scale : StringProperty( - name="Scale", default='', - description="Vertex Group used for Scale value") - - vertex_group_f : StringProperty( - name="f", default='', - description="Vertex Group used for Feed value (f)") - - vertex_group_k : StringProperty( - name="k", default='', - description="Vertex Group used for Kill value (k)") - - vertex_group_brush : StringProperty( - name="Brush", default='', - description="Vertex Group used for adding/removing B") - - invert_vertex_group_diff_a : BoolProperty(default=False, - description='Inverte the value of the Vertex Group Diff A') - - invert_vertex_group_diff_b : BoolProperty(default=False, - description='Inverte the value of the Vertex Group Diff B') - - invert_vertex_group_scale : BoolProperty(default=False, - description='Inverte the value of the Vertex Group Scale') - - invert_vertex_group_f : BoolProperty(default=False, - description='Inverte the value of the Vertex Group f') - - invert_vertex_group_k : BoolProperty(default=False, - description='Inverte the value of the Vertex Group k') - - min_diff_a : FloatProperty( - name="Min Diff A", default=0.1, min=0, soft_max=2, precision=3, - description="Min Diff A") - - max_diff_a : FloatProperty( - name="Max Diff A", default=0.1, min=0, soft_max=2, precision=3, - description="Max Diff A") - - min_diff_b : FloatProperty( - name="Min Diff B", default=0.1, min=0, soft_max=2, precision=3, - description="Min Diff B") - - max_diff_b : FloatProperty( - name="Max Diff B", default=0.1, min=0, soft_max=2, precision=3, - description="Max Diff B") - - min_scale : FloatProperty( - name="Scale", default=0.35, min=0, soft_max=1, max=10, precision=2, - description="Min Scale Value") - - max_scale : FloatProperty( - name="Scale", default=1, min=0, soft_max=1, max=10, precision=2, - description="Max Scale value") - - min_f : FloatProperty( - name="Min f", default=0.02, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4, step=0.05, - description="Min Feed Rate") - - max_f : FloatProperty( - name="Max f", default=0.055, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4, step=0.05, - description="Max Feed Rate") - - min_k : FloatProperty( - name="Min k", default=0.035, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4, step=0.05, - description="Min Kill Rate") - - max_k : FloatProperty( - name="Max k", default=0.062, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4, step=0.05, - description="Max Kill Rate") - - brush_mult : FloatProperty( - name="Mult", default=0.5, min=-1, max=1, precision=3, step=0.05, - description="Multiplier for brush value") - - bool_mod : BoolProperty( - name="Use Modifiers", default=False, - description="Read modifiers affect the vertex groups") - - bool_cache : BoolProperty( - name="Use Cache", default=False, - description="Read modifiers affect the vertex groups") - - cache_frame_start : IntProperty( - name="Start", default=1, - description="Frame on which the simulation starts") - - cache_frame_end : IntProperty( - name="End", default=250, - description="Frame on which the simulation ends") - - cache_dir : StringProperty( - name="Cache directory", default="", subtype='FILE_PATH', - description = 'Directory that contains Reaction-Diffusion cache files' - ) - - update_weight_a : BoolProperty( - name="Update Vertex Group A", default=True, - description="Transfer Cache to the Vertex Groups named A") - - update_weight_b : BoolProperty( - name="Update Vertex Group B", default=True, - description="Transfer Cache to the Vertex Groups named B") - - update_colors_a : BoolProperty( - name="Update Vertex Color A", default=False, - description="Transfer Cache to the Vertex Color named A") - - update_colors_b : BoolProperty( - name="Update Vertex Color B", default=False, - description="Transfer Cache to the Vertex Color named B") - - update_colors : BoolProperty( - name="Update Vertex Color AB", default=False, - description="Transfer Cache to the Vertex Color named AB") - - update_uv : BoolProperty( - name="Update UV", default=False, - description="Transfer Cache to the UV Map Layer named AB") - - normalize : BoolProperty( - name="Normalize values", default=False, - description="Normalize values from 0 to 1") - - fast_bake : BoolProperty( - name="Fast Bake", default=True, - description="Do not update modifiers or vertex groups while baking. Much faster!") - - from numpy import * def compute_formula(ob=None, formula="rx", float_var=(0,0,0,0,0), int_var=(0,0,0,0,0)): verts = ob.data.vertices @@ -631,7 +456,7 @@ class _weight_laplacian(Operator): for i in range(len(lap)): val = (lap[i]-min_def)/delta_def - if val > 0.7: print(str(val) + " " + str(lap[i])) + #if val > 0.7: print(str(val) + " " + str(lap[i])) #val = weight[i] + 0.2*lap[i] ob.vertex_groups[-1].add([i], val, 'REPLACE') self.bounds_string = str(round(min_def,2)) + " to " + str(round(max_def,2)) @@ -802,7 +627,6 @@ class weight_laplacian(Operator): lap = np.array(lap) lap /= np.max(lap) lap = list(lap) - print(lap) for i in range(n_verts): vg.add([i], lap[i], 'REPLACE') @@ -812,117 +636,11 @@ class weight_laplacian(Operator): bm.free() return {'FINISHED'} - -class reaction_diffusion(Operator): - bl_idname = "object.reaction_diffusion" - bl_label = "Reaction Diffusion" - bl_description = ("Run a Reaction-Diffusion based on existing Vertex Groups: A and B") - bl_options = {'REGISTER', 'UNDO'} - - steps : IntProperty( - name="Steps", default=10, min=0, soft_max=50, - description="Number of Steps") - - dt : FloatProperty( - name="dt", default=0.2, min=0, soft_max=0.2, - description="Time Step") - - diff_a : FloatProperty( - name="Diff A", default=1, min=0, soft_max=2, - description="Diffusion A") - - diff_b : FloatProperty( - name="Diff B", default=0.5, min=0, soft_max=2, - description="Diffusion B") - - f : FloatProperty( - name="f", default=0.055, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4, - description="Feed Rate") - - k : FloatProperty( - name="k", default=0.062, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4, - description="Kill Rate") - - bounds_string = "" - - frame = None - - @classmethod - def poll(cls, context): - return len(context.object.vertex_groups) > 0 - - - def execute(self, context): - #bpy.app.handlers.frame_change_post.remove(reaction_diffusion_def) - reaction_diffusion_add_handler(self, context) - set_animatable_fix_handler(self, context) - try: ob = context.object - except: - self.report({'ERROR'}, "Please select an Object") - return {'CANCELLED'} - - me = ob.data - bm = bmesh.new() - bm.from_mesh(me) - bm.edges.ensure_lookup_table() - - # store weight values - a = [] - b = [] - for v in me.vertices: - try: - a.append(ob.vertex_groups["A"].weight(v.index)) - except: - a.append(0) - try: - b.append(ob.vertex_groups["B"].weight(v.index)) - except: - b.append(0) - - a = array(a) - b = array(b) - f = self.f - k = self.k - diff_a = self.diff_a - diff_b = self.diff_b - dt = self.dt - n_verts = len(bm.verts) - - for i in range(self.steps): - - lap_a = zeros((n_verts))#[0]*n_verts - lap_b = zeros((n_verts))#[0]*n_verts - for e in bm.edges: - id0 = e.verts[0].index - id1 = e.verts[1].index - lap_a[id0] += a[id1] - a[id0] - lap_a[id1] += a[id0] - a[id1] - lap_b[id0] += b[id1] - b[id0] - lap_b[id1] += b[id0] - b[id1] - ab2 = a*b**2 - a += (diff_a*lap_a - ab2 + f*(1-a))*dt - b += (diff_b*lap_b + ab2 - (k+f)*b)*dt - - for i in range(n_verts): - ob.vertex_groups['A'].add([i], a[i], 'REPLACE') - ob.vertex_groups['B'].add([i], b[i], 'REPLACE') - ob.vertex_groups.update() - ob.data.update() - - bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1) - - bpy.ops.object.mode_set(mode='WEIGHT_PAINT') - bm.free() - return {'FINISHED'} - - class edges_deformation(Operator): bl_idname = "object.edges_deformation" bl_label = "Edges Deformation" - bl_description = ( - "Compute Weight based on the deformation of edges " - "according to visible modifiers" - ) + bl_description = ("Compute Weight based on the deformation of edges"+ + "according to visible modifiers.") bl_options = {'REGISTER', 'UNDO'} bounds : EnumProperty( @@ -1065,10 +783,8 @@ class edges_deformation(Operator): class edges_bending(Operator): bl_idname = "object.edges_bending" bl_label = "Edges Bending" - bl_description = ( - "Compute Weight based on the bending of edges" - "according to visible modifiers" - ) + bl_description = ("Compute Weight based on the bending of edges"+ + "according to visible modifiers.") bl_options = {'REGISTER', 'UNDO'} bounds : EnumProperty( @@ -1244,7 +960,7 @@ class weight_contour_displace(Operator): vertex_group_name = ob0.vertex_groups[group_id].name bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.select_all(action='DESELECT') bpy.ops.object.mode_set(mode='OBJECT') if self.use_modifiers: #me0 = ob0.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy() @@ -1461,10 +1177,10 @@ class weight_contour_displace(Operator): # Displace Modifier if self.bool_displace: - displace_modifier = ob.modifiers.new(type='DISPLACE', name='Displace') - displace_modifier.mid_level = 0 - displace_modifier.strength = 0.1 - displace_modifier.vertex_group = vertex_group_name + ob.modifiers.new(type='DISPLACE', name='Displace') + ob.modifiers["Displace"].mid_level = 0 + ob.modifiers["Displace"].strength = 0.1 + ob.modifiers['Displace'].vertex_group = vertex_group_name bpy.ops.object.mode_set(mode='EDIT') bpy.ops.object.mode_set(mode='WEIGHT_PAINT') @@ -1743,11 +1459,8 @@ class weight_contour_mask_wip(Operator): group_id = ob0.vertex_groups.active_index vertex_group_name = ob0.vertex_groups[group_id].name - #bpy.ops.object.mode_set(mode='EDIT') - #bpy.ops.mesh.select_all(action='SELECT') - #bpy.ops.object.mode_set(mode='OBJECT') if self.use_modifiers: - me0 = simple_to_mesh(ob0)#ob0.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy() + me0 = simple_to_mesh(ob0) else: me0 = ob0.data.copy() @@ -1769,7 +1482,6 @@ class weight_contour_mask_wip(Operator): weight = weight[mask] mask = np.logical_not(mask) delete_verts = np.array(bm.verts)[mask] - #for v in delete_verts: bm.verts.remove(v) # Create mesh and object name = ob0.name + '_ContourMask_{:.3f}'.format(iso_val) @@ -1815,590 +1527,11 @@ class weight_contour_mask_wip(Operator): return {'FINISHED'} - -class weight_contour_curves(Operator): - bl_idname = "object.weight_contour_curves" - bl_label = "Contour Curves" - bl_description = ("") - bl_options = {'REGISTER', 'UNDO'} - - use_modifiers : BoolProperty( - name="Use Modifiers", default=True, - description="Apply all the modifiers") - - min_iso : FloatProperty( - name="Min Value", default=0., soft_min=0, soft_max=1, - description="Minimum weight value") - max_iso : FloatProperty( - name="Max Value", default=1, soft_min=0, soft_max=1, - description="Maximum weight value") - n_curves : IntProperty( - name="Curves", default=3, soft_min=1, soft_max=10, - description="Number of Contour Curves") - - min_rad : FloatProperty( - name="Min Radius", default=1, soft_min=0, soft_max=1, - description="Change radius according to Iso Value") - max_rad : FloatProperty( - name="Max Radius", default=1, soft_min=0, soft_max=1, - description="Change radius according to Iso Value") - - @classmethod - def poll(cls, context): - ob = context.object - return len(ob.vertex_groups) > 0 or ob.type == 'CURVE' - - def invoke(self, context, event): - return context.window_manager.invoke_props_dialog(self, width=350) - - def execute(self, context): - start_time = timeit.default_timer() - try: - check = context.object.vertex_groups[0] - except: - self.report({'ERROR'}, "The object doesn't have Vertex Groups") - return {'CANCELLED'} - ob0 = context.object - - group_id = ob0.vertex_groups.active_index - vertex_group_name = ob0.vertex_groups[group_id].name - - bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.object.mode_set(mode='OBJECT') - if self.use_modifiers: - me0 = simple_to_mesh(ob0) #ob0.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy() - else: - me0 = ob0.data.copy() - - # generate new bmesh - bm = bmesh.new() - bm.from_mesh(me0) - bm.verts.ensure_lookup_table() - bm.edges.ensure_lookup_table() - bm.faces.ensure_lookup_table() - - # store weight values - weight = [] - ob = bpy.data.objects.new("temp", me0) - for g in ob0.vertex_groups: - ob.vertex_groups.new(name=g.name) - weight = get_weight_numpy(ob.vertex_groups[vertex_group_name], len(bm.verts)) - - #filtered_edges = bm.edges - total_verts = np.zeros((0,3)) - total_segments = [] - radius = [] - - # start iterate contours levels - vertices = get_vertices_numpy(me0) - filtered_edges = get_edges_id_numpy(me0) - - faces_weight = [np.array([weight[v] for v in p.vertices]) for p in me0.polygons] - fw_min = np.array([np.min(fw) for fw in faces_weight]) - fw_max = np.array([np.max(fw) for fw in faces_weight]) - - bm_faces = np.array(bm.faces) - - ### Spiral - normals = np.array([v.normal for v in me0.vertices]) - - for c in range(self.n_curves): - min_iso = min(self.min_iso, self.max_iso) - max_iso = max(self.min_iso, self.max_iso) - try: - delta_iso = (max_iso-min_iso)/(self.n_curves-1) - iso_val = c*delta_iso + min_iso - if iso_val < 0: iso_val = (min_iso + max_iso)/2 - except: - iso_val = (min_iso + max_iso)/2 - - # remove passed faces - bool_mask = iso_val < fw_max - bm_faces = bm_faces[bool_mask] - fw_min = fw_min[bool_mask] - fw_max = fw_max[bool_mask] - - # mask faces - bool_mask = fw_min < iso_val - faces_mask = bm_faces[bool_mask] - - n_verts = len(bm.verts) - count = len(total_verts) - - # vertices indexes - id0 = filtered_edges[:,0] - id1 = filtered_edges[:,1] - # vertices weight - w0 = weight[id0] - w1 = weight[id1] - # weight condition - bool_w0 = w0 < iso_val - bool_w1 = w1 < iso_val - - # mask all edges that have one weight value below the iso value - mask_new_verts = np.logical_xor(bool_w0, bool_w1) - - id0 = id0[mask_new_verts] - id1 = id1[mask_new_verts] - # filter arrays - v0 = vertices[id0] - v1 = vertices[id1] - w0 = w0[mask_new_verts] - w1 = w1[mask_new_verts] - div = (w1-w0) - if div == 0: div = 0.000001 - - param = np.expand_dims((iso_val-w0)/div,axis=1) - verts = v0 + (v1-v0)*param - - # indexes of edges with new vertices - edges_index = filtered_edges[mask_new_verts][:,2] - edges_id = {} - for i, id in enumerate(edges_index): edges_id[id] = i+len(total_verts) - - # remove all edges completely below the iso value - mask_edges = np.logical_not(np.logical_and(bool_w0, bool_w1)) - filtered_edges = filtered_edges[mask_edges] - if len(verts) == 0: continue - - # finding segments - segments = [] - for f in faces_mask: - seg = [] - for e in f.edges: - try: - seg.append(edges_id[e.index]) - if len(seg) == 2: - segments.append(seg) - seg = [] - except: pass - - - #curves_points_indexes = find_curves(segments) - total_segments = total_segments + segments - total_verts = np.concatenate((total_verts,verts)) - - if self.min_rad != self.max_rad: - try: - iso_rad = c*(self.max_rad-self.min_rad)/(self.n_curves-1)+self.min_rad - if iso_rad < 0: iso_rad = (self.min_rad + self.max_rad)/2 - except: - iso_rad = (self.min_rad + self.max_rad)/2 - radius = radius + [iso_rad]*len(verts) - print("Contour Curves, computing time: " + str(timeit.default_timer() - start_time) + " sec") - bm.free() - bm = bmesh.new() - # adding new vertices _local for fast access - _new_vert = bm.verts.new - for v in total_verts: _new_vert(v) - bm.verts.ensure_lookup_table() - - # adding new edges - _new_edge = bm.edges.new - for s in total_segments: - try: - pts = [bm.verts[i] for i in s] - _new_edge(pts) - except: pass - - - try: - name = ob0.name + '_ContourCurves' - me = bpy.data.meshes.new(name) - bm.to_mesh(me) - bm.free() - ob = bpy.data.objects.new(name, me) - # Link object to scene and make active - scn = context.scene - context.collection.objects.link(ob) - context.view_layer.objects.active = ob - ob.select_set(True) - ob0.select_set(False) - - print("Contour Curves, bmesh time: " + str(timeit.default_timer() - start_time) + " sec") - bpy.ops.object.convert(target='CURVE') - ob = context.object - if not (self.min_rad == 0 and self.max_rad == 0): - if self.min_rad != self.max_rad: - count = 0 - for s in ob.data.splines: - for p in s.points: - p.radius = radius[count] - count += 1 - else: - for s in ob.data.splines: - for p in s.points: - p.radius = self.min_rad - ob.data.bevel_depth = 0.01 - ob.data.fill_mode = 'FULL' - ob.data.bevel_resolution = 3 - except: - self.report({'ERROR'}, "There are no values in the chosen range") - return {'CANCELLED'} - - # align new object - ob.matrix_world = ob0.matrix_world - print("Contour Curves time: " + str(timeit.default_timer() - start_time) + " sec") - - bpy.data.meshes.remove(me0) - bpy.data.meshes.remove(me) - - return {'FINISHED'} - -class tissue_weight_contour_curves_pattern(Operator): - bl_idname = "object.tissue_weight_contour_curves_pattern" - bl_label = "Contour Curves" - bl_description = ("") - bl_options = {'REGISTER', 'UNDO'} - - use_modifiers : BoolProperty( - name="Use Modifiers", default=True, - description="Apply all the modifiers") - - auto_bevel : BoolProperty( - name="Automatic Bevel", default=False, - description="Bevel depends on weight density") - - min_iso : FloatProperty( - name="Min Value", default=0., soft_min=0, soft_max=1, - description="Minimum weight value") - max_iso : FloatProperty( - name="Max Value", default=1, soft_min=0, soft_max=1, - description="Maximum weight value") - n_curves : IntProperty( - name="Curves", default=10, soft_min=1, soft_max=100, - description="Number of Contour Curves") - min_rad = 1 - max_rad = 1 - - in_displace : FloatProperty( - name="Displace A", default=0, soft_min=-10, soft_max=10, - description="Pattern displace strength") - out_displace : FloatProperty( - name="Displace B", default=2, soft_min=-10, soft_max=10, - description="Pattern displace strength") - - in_steps : IntProperty( - name="Steps A", default=1, min=0, soft_max=10, - description="Number of layers to move inwards") - out_steps : IntProperty( - name="Steps B", default=1, min=0, soft_max=10, - description="Number of layers to move outwards") - limit_z : BoolProperty( - name="Limit Z", default=False, - description="Limit Pattern in Z") - - merge : BoolProperty( - name="Merge Vertices", default=True, - description="Merge points") - merge_thres : FloatProperty( - name="Merge Threshold", default=0.01, min=0, soft_max=1, - description="Minimum Curve Radius") - - bevel_depth : FloatProperty( - name="Bevel Depth", default=0, min=0, soft_max=1, - description="") - min_bevel_depth : FloatProperty( - name="Min Bevel Depth", default=0.1, min=0, soft_max=1, - description="") - max_bevel_depth : FloatProperty( - name="Max Bevel Depth", default=1, min=0, soft_max=1, - description="") - remove_open_curves : BoolProperty( - name="Remove Open Curves", default=False, - description="Remove Open Curves") - - vertex_group_pattern : StringProperty( - name="Displace", default='', - description="Vertex Group used for pattern displace") - - vertex_group_bevel : StringProperty( - name="Bevel", default='', - description="Variable Bevel depth") - - object_name : StringProperty( - name="Active Object", default='', - description="") - - try: vg_name = bpy.context.object.vertex_groups.active.name - except: vg_name = '' - - vertex_group_contour : StringProperty( - name="Contour", default=vg_name, - description="Vertex Group used for contouring") - clean_distance : FloatProperty( - name="Clean Distance", default=0, min=0, soft_max=10, - description="Remove short segments") - - - spiralized: BoolProperty( - name='Spiralized', default=False, - description='Create a Spiral Contour. Works better with dense meshes' - ) - spiral_axis: FloatVectorProperty( - name="Spiral Axis", default=(0,0,1), - description="Axis of the Spiral (in local coordinates)" - ) - spiral_rotation : FloatProperty( - name="Spiral Rotation", default=0, min=0, max=2*pi, - description="" - ) - - @classmethod - def poll(cls, context): - ob = context.object - return ob and len(ob.vertex_groups) > 0 or ob.type == 'CURVE' - - def invoke(self, context, event): - return context.window_manager.invoke_props_dialog(self, width=250) - - def draw(self, context): - if not context.object.type == 'CURVE': - self.object_name = context.object.name - ob = bpy.data.objects[self.object_name] - if self.vertex_group_contour not in [vg.name for vg in ob.vertex_groups]: - self.vertex_group_contour = ob.vertex_groups.active.name - layout = self.layout - col = layout.column(align=True) - col.prop(self, "use_modifiers") - col.label(text="Contour Curves:") - col.prop_search(self, 'vertex_group_contour', ob, "vertex_groups", text='') - row = col.row(align=True) - row.prop(self,'min_iso') - row.prop(self,'max_iso') - col.prop(self,'n_curves') - col.separator() - col.label(text='Curves Bevel:') - col.prop(self,'auto_bevel') - if not self.auto_bevel: - col.prop_search(self, 'vertex_group_bevel', ob, "vertex_groups", text='') - if self.vertex_group_bevel != '' or self.auto_bevel: - row = col.row(align=True) - row.prop(self,'min_bevel_depth') - row.prop(self,'max_bevel_depth') - else: - col.prop(self,'bevel_depth') - col.separator() - - col.label(text="Displace Pattern:") - col.prop_search(self, 'vertex_group_pattern', ob, "vertex_groups", text='') - if self.vertex_group_pattern != '': - row = col.row(align=True) - row.prop(self,'in_steps') - row.prop(self,'out_steps') - row = col.row(align=True) - row.prop(self,'in_displace') - row.prop(self,'out_displace') - col.prop(self,'limit_z') - col.separator() - row=col.row(align=True) - row.prop(self,'spiralized') - row.label(icon='MOD_SCREW') - if self.spiralized: - #row=col.row(align=True) - #row.prop(self,'spiral_axis') - #col.separator() - col.prop(self,'spiral_rotation') - col.separator() - - col.label(text='Clean Curves:') - col.prop(self,'clean_distance') - col.prop(self,'remove_open_curves') - - def execute(self, context): - n_curves = self.n_curves - start_time = timeit.default_timer() - try: - check = context.object.vertex_groups[0] - except: - self.report({'ERROR'}, "The object doesn't have Vertex Groups") - return {'CANCELLED'} - ob0 = bpy.data.objects[self.object_name] - - dg = context.evaluated_depsgraph_get() - ob = ob0.evaluated_get(dg) - me0 = ob.data - - # generate new bmesh - bm = bmesh.new() - bm.from_mesh(me0) - n_verts = len(bm.verts) - - # store weight values - try: - weight = get_weight_numpy(ob.vertex_groups[self.vertex_group_contour], len(me0.vertices)) - except: - bm.free() - self.report({'ERROR'}, "Please select a Vertex Group for contouring") - return {'CANCELLED'} - - try: - pattern_weight = get_weight_numpy(ob.vertex_groups[self.vertex_group_pattern], len(me0.vertices)) - except: - #self.report({'WARNING'}, "There is no Vertex Group assigned to the pattern displace") - pattern_weight = np.zeros(len(me0.vertices)) - - variable_bevel = False - try: - bevel_weight = get_weight_numpy(ob.vertex_groups[self.vertex_group_bevel], len(me0.vertices)) - variable_bevel = True - except: - bevel_weight = np.ones(len(me0.vertices)) - - if self.auto_bevel: - # calc weight density - bevel_weight = np.ones(len(me0.vertices))*10000 - bevel_weight = np.zeros(len(me0.vertices)) - edges_length = np.array([e.calc_length() for e in bm.edges]) - edges_dw = np.array([max(abs(weight[e.verts[0].index]-weight[e.verts[1].index]),0.000001) for e in bm.edges]) - dens = edges_length/edges_dw - n_records = np.zeros(len(me0.vertices)) - for i, e in enumerate(bm.edges): - for v in e.verts: - id = v.index - #bevel_weight[id] = min(bevel_weight[id], dens[i]) - bevel_weight[id] += dens[i] - n_records[id] += 1 - bevel_weight = bevel_weight/n_records - bevel_weight = (bevel_weight - min(bevel_weight))/(max(bevel_weight) - min(bevel_weight)) - #bevel_weight = 1-bevel_weight - variable_bevel = True - - #filtered_edges = bm.edges - total_verts = np.zeros((0,3)) - total_radii = np.zeros((0,1)) - total_segments = []# np.array([]) - radius = [] - - # start iterate contours levels - vertices, normals = get_vertices_and_normals_numpy(me0) - filtered_edges = get_edges_id_numpy(me0) - - - min_iso = min(self.min_iso, self.max_iso) - max_iso = max(self.min_iso, self.max_iso) - - # Spiral - if self.spiralized: - nx = normals[:,0] - ny = normals[:,1] - ang = self.spiral_rotation + weight*pi*n_curves+arctan2(nx,ny) - weight = sin(ang)/2+0.5 - n_curves = 1 - - if n_curves > 1: - delta_iso = (max_iso-min_iso)/(n_curves-1) - - else: - delta_iso = None - - faces_weight = [np.array([weight[v] for v in p.vertices]) for p in me0.polygons] - fw_min = np.array([np.min(fw) for fw in faces_weight]) - fw_max = np.array([np.max(fw) for fw in faces_weight]) - - bm_faces = np.array(bm.faces) - - #print("Contour Curves, data loaded: " + str(timeit.default_timer() - start_time) + " sec") - step_time = timeit.default_timer() - for c in range(n_curves): - if delta_iso: - iso_val = c*delta_iso + min_iso - if iso_val < 0: iso_val = (min_iso + max_iso)/2 - else: - iso_val = (min_iso + max_iso)/2 - - #if c == 0 and self.auto_bevel: - - - # remove passed faces - bool_mask = iso_val < fw_max - bm_faces = bm_faces[bool_mask] - fw_min = fw_min[bool_mask] - fw_max = fw_max[bool_mask] - - # mask faces - bool_mask = fw_min < iso_val - faces_mask = bm_faces[bool_mask] - - count = len(total_verts) - - new_filtered_edges, edges_index, verts, bevel = contour_edges_pattern(self, c, len(total_verts), iso_val, vertices, normals, filtered_edges, weight, pattern_weight, bevel_weight) - - if len(edges_index) > 0: - if self.auto_bevel and False: - bevel = 1-dens[edges_index] - bevel = bevel[:,np.newaxis] - if self.max_bevel_depth != self.min_bevel_depth: - min_radius = self.min_bevel_depth / max(0.0001,self.max_bevel_depth) - radii = min_radius + bevel*(1 - min_radius) - else: - radii = bevel - else: - continue - - if verts[0,0] == None: continue - else: filtered_edges = new_filtered_edges - edges_id = {} - for i, id in enumerate(edges_index): edges_id[id] = i + count - - if len(verts) == 0: continue - - # finding segments - segments = [] - for f in faces_mask: - seg = [] - for e in f.edges: - try: - #seg.append(new_ids[np.where(edges_index == e.index)[0][0]]) - seg.append(edges_id[e.index]) - if len(seg) == 2: - segments.append(seg) - seg = [] - except: pass - - total_segments = total_segments + segments - total_verts = np.concatenate((total_verts, verts)) - total_radii = np.concatenate((total_radii, radii)) - - if self.min_rad != self.max_rad: - try: - iso_rad = c*(self.max_rad-self.min_rad)/(self.n_curves-1)+self.min_rad - if iso_rad < 0: iso_rad = (self.min_rad + self.max_rad)/2 - except: - iso_rad = (self.min_rad + self.max_rad)/2 - radius = radius + [iso_rad]*len(verts) - #print("Contour Curves, points computing: " + str(timeit.default_timer() - step_time) + " sec") - step_time = timeit.default_timer() - - if len(total_segments) > 0: - step_time = timeit.default_timer() - ordered_points = find_curves(total_segments, len(total_verts)) - - #print("Contour Curves, point ordered in: " + str(timeit.default_timer() - step_time) + " sec") - step_time = timeit.default_timer() - crv = curve_from_pydata(total_verts, total_radii, ordered_points, ob0.name + '_ContourCurves', self.remove_open_curves, merge_distance=self.clean_distance) - context.view_layer.objects.active = crv - if variable_bevel: crv.data.bevel_depth = self.max_bevel_depth - else: crv.data.bevel_depth = self.bevel_depth - - crv.select_set(True) - ob0.select_set(False) - crv.matrix_world = ob0.matrix_world - #print("Contour Curves, curves created in: " + str(timeit.default_timer() - step_time) + " sec") - else: - bm.free() - self.report({'ERROR'}, "There are no values in the chosen range") - return {'CANCELLED'} - bm.free() - print("Contour Curves, total time: " + str(timeit.default_timer() - start_time) + " sec") - return {'FINISHED'} - class vertex_colors_to_vertex_groups(Operator): bl_idname = "object.vertex_colors_to_vertex_groups" bl_label = "Vertex Color" bl_options = {'REGISTER', 'UNDO'} - bl_description = ("Convert the active Vertex Color into a Vertex Group") + bl_description = ("Convert the active Vertex Color into a Vertex Group.") red : BoolProperty( name="red channel", default=False, description="convert red channel") @@ -2415,19 +1548,20 @@ class vertex_colors_to_vertex_groups(Operator): @classmethod def poll(cls, context): try: - return len(context.object.data.vertex_colors) > 0 + return len(context.object.data.color_attributes) > 0 except: return False def execute(self, context): - obj = context.active_object - id = len(obj.vertex_groups) + ob = context.active_object + id = len(ob.vertex_groups) id_red = id id_green = id id_blue = id id_value = id - boolCol = len(obj.data.vertex_colors) - if(boolCol): col_name = obj.data.vertex_colors.active.name + boolCol = len(ob.data.color_attributes) + if(boolCol): + col = ob.data.color_attributes.active_color bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_all(action='SELECT') @@ -2435,25 +1569,25 @@ class vertex_colors_to_vertex_groups(Operator): bpy.ops.object.vertex_group_add() bpy.ops.object.vertex_group_assign() id_red = id - obj.vertex_groups[id_red].name = col_name + '_red' + ob.vertex_groups[id_red].name = col.name + '_red' id+=1 if(self.green and boolCol): bpy.ops.object.vertex_group_add() bpy.ops.object.vertex_group_assign() id_green = id - obj.vertex_groups[id_green].name = col_name + '_green' + ob.vertex_groups[id_green].name = col.name + '_green' id+=1 if(self.blue and boolCol): bpy.ops.object.vertex_group_add() bpy.ops.object.vertex_group_assign() id_blue = id - obj.vertex_groups[id_blue].name = col_name + '_blue' + ob.vertex_groups[id_blue].name = col.name + '_blue' id+=1 if(self.value and boolCol): bpy.ops.object.vertex_group_add() bpy.ops.object.vertex_group_assign() id_value = id - obj.vertex_groups[id_value].name = col_name + '_value' + ob.vertex_groups[id_value].name = col.name + '_value' id+=1 mult = 1 @@ -2464,14 +1598,15 @@ class vertex_colors_to_vertex_groups(Operator): sub_blue = 1 + self.value sub_value = 1 - id = len(obj.vertex_groups) + id = len(ob.vertex_groups) if(id_red <= id and id_green <= id and id_blue <= id and id_value <= \ id and boolCol): - v_colors = obj.data.vertex_colors.active.data + v_colors = ob.data.color_attributes.active_color.data + i = 0 - for f in obj.data.polygons: - for v in f.vertices: - gr = obj.data.vertices[v].groups + if ob.data.color_attributes.active_color.domain == 'POINT': + for v in ob.data.vertices: + gr = v.groups if(self.red): gr[min(len(gr)-sub_red, id_red)].weight = \ self.invert + mult * v_colors[i].color[0] if(self.green): gr[min(len(gr)-sub_green, id_green)].weight\ @@ -2485,6 +1620,24 @@ class vertex_colors_to_vertex_groups(Operator): gr[min(len(gr)-sub_value, id_value)].weight\ = self.invert + mult * (0.2126*r + 0.7152*g + 0.0722*b) i+=1 + elif ob.data.color_attributes.active_color.domain == 'CORNER': + for f in ob.data.polygons: + for v in f.vertices: + gr = ob.data.vertices[v].groups + if(self.red): gr[min(len(gr)-sub_red, id_red)].weight = \ + self.invert + mult * v_colors[i].color[0] + if(self.green): gr[min(len(gr)-sub_green, id_green)].weight\ + = self.invert + mult * v_colors[i].color[1] + if(self.blue): gr[min(len(gr)-sub_blue, id_blue)].weight = \ + self.invert + mult * v_colors[i].color[2] + if(self.value): + r = v_colors[i].color[0] + g = v_colors[i].color[1] + b = v_colors[i].color[2] + gr[min(len(gr)-sub_value, id_value)].weight\ + = self.invert + mult * (0.2126*r + 0.7152*g + 0.0722*b) + i+=1 + bpy.ops.paint.weight_paint_toggle() return {'FINISHED'} @@ -2492,7 +1645,7 @@ class vertex_group_to_vertex_colors(Operator): bl_idname = "object.vertex_group_to_vertex_colors" bl_label = "Vertex Group" bl_options = {'REGISTER', 'UNDO'} - bl_description = ("Convert the active Vertex Group into a Vertex Color") + bl_description = ("Convert the active Vertex Group into a Vertex Color.") channel : EnumProperty( items=[('BLUE', 'Blue Channel', 'Convert to Blue Channel'), @@ -2519,8 +1672,8 @@ class vertex_group_to_vertex_colors(Operator): bpy.ops.object.mode_set(mode='OBJECT') group_name = obj.vertex_groups[group_id].name - me.vertex_colors.new() - colors_id = obj.data.vertex_colors.active_index + bpy.ops.geometry.color_attribute_add() + active_color = obj.data.color_attributes.active_color colors_name = group_name if(self.channel == 'FALSE_COLORS'): colors_name += "_false_colors" @@ -2528,9 +1681,9 @@ class vertex_group_to_vertex_colors(Operator): elif(self.channel == 'RED'): colors_name += "_red" elif(self.channel == 'GREEN'): colors_name += "_green" elif(self.channel == 'BLUE'): colors_name += "_blue" - context.object.data.vertex_colors[colors_id].name = colors_name + active_color.name = colors_name - v_colors = obj.data.vertex_colors.active.data + v_colors = obj.data.color_attributes.active_color.data bm = bmesh.new() bm.from_mesh(me) @@ -2539,9 +1692,7 @@ class vertex_group_to_vertex_colors(Operator): if self.invert: weight = 1-weight loops_size = get_attribute_numpy(me.polygons, attribute='loop_total', mult=1) n_colors = np.sum(loops_size) - verts = np.ones(n_colors) - me.polygons.foreach_get('vertices',verts) - splitted_weight = weight[verts.astype(int)][:,None] + splitted_weight = weight[:,None] r = np.zeros(splitted_weight.shape) g = np.zeros(splitted_weight.shape) b = np.zeros(splitted_weight.shape) @@ -2578,14 +1729,14 @@ class vertex_group_to_vertex_colors(Operator): v_colors.foreach_set('color',colors) bpy.ops.paint.vertex_paint_toggle() - context.object.data.vertex_colors[colors_id].active_render = True + bpy.ops.geometry.color_attribute_render_set(name=active_color.name) return {'FINISHED'} class vertex_group_to_uv(Operator): bl_idname = "object.vertex_group_to_uv" bl_label = "Vertex Group" bl_options = {'REGISTER', 'UNDO'} - bl_description = ("Combine two Vertex Groups as UV Map Layer") + bl_description = ("Combine two Vertex Groups as UV Map Layer.") vertex_group_u : StringProperty( name="U", default='', @@ -2673,10 +1824,7 @@ class curvature_to_vertex_groups(Operator): bl_label = "Curvature" bl_options = {'REGISTER', 'UNDO'} bl_description = ("Generate a Vertex Group based on the curvature of the" - "mesh. Is based on Dirty Vertex Color") - - invert : BoolProperty( - name="invert", default=False, description="invert values") + "mesh. Is based on Dirty Vertex Color.") blur_strength : FloatProperty( name="Blur Strength", default=1, min=0.001, @@ -2686,33 +1834,44 @@ class curvature_to_vertex_groups(Operator): name="Blur Iterations", default=1, min=0, max=40, description="Number of times to blur the values") - min_angle : FloatProperty( - name="Min Angle", default=0, min=0, - max=pi/2, subtype='ANGLE', description="Minimum angle") - - max_angle : FloatProperty( - name="Max Angle", default=pi, min=pi/2, - max=pi, subtype='ANGLE', description="Maximum angle") + angle : FloatProperty( + name="Angle", default=5*pi/90, min=0, + max=pi/2, subtype='ANGLE', description="Angle") invert : BoolProperty( name="Invert", default=False, description="Invert the curvature map") + absolute : BoolProperty( + name="Absolute", default=False, description="Absolute values") + def execute(self, context): bpy.ops.object.mode_set(mode='OBJECT') - vertex_colors = context.active_object.data.vertex_colors - vertex_colors.new() - vertex_colors[-1].active = True - vertex_colors[-1].active_render = True - vertex_colors[-1].name = "Curvature" - for c in vertex_colors[-1].data: c.color = (1,1,1,1) + bpy.ops.geometry.color_attribute_add(domain='CORNER', color = (1,1,1,1)) + color_attributes = context.active_object.data.color_attributes + color_attributes.active = color_attributes[-1] + color_attributes.active_color = color_attributes[-1] + color_attributes[-1].name = "Curvature" + bpy.ops.geometry.color_attribute_render_set(name=color_attributes[-1].name) bpy.ops.object.mode_set(mode='VERTEX_PAINT') bpy.ops.paint.vertex_color_dirt( blur_strength=self.blur_strength, - blur_iterations=self.blur_iterations, clean_angle=self.max_angle, - dirt_angle=self.min_angle) + blur_iterations=self.blur_iterations, + clean_angle=pi/2 + self.angle, + dirt_angle=pi/2 - self.angle, + normalize=False) bpy.ops.object.vertex_colors_to_vertex_groups(invert=self.invert) - vertex_colors.remove(vertex_colors.active) + if self.absolute: + ob = context.object + weight = get_weight_numpy(ob.vertex_groups[-1], len(ob.data.vertices)) + weight = np.abs(0.5-weight)*2 + bm = bmesh.new() + bm.from_mesh(ob.data) + bmesh_set_weight_numpy(bm,len(ob.vertex_groups)-1,weight) + bm.to_mesh(ob.data) + ob.vertex_groups.update() + ob.data.update() + #bpy.ops.geometry.color_attribute_remove() return {'FINISHED'} class face_area_to_vertex_groups(Operator): @@ -2720,7 +1879,7 @@ class face_area_to_vertex_groups(Operator): bl_label = "Area" bl_options = {'REGISTER', 'UNDO'} bl_description = ("Generate a Vertex Group based on the area of individual" - "faces") + "faces.") invert : BoolProperty( name="invert", default=False, description="invert values") @@ -3035,13 +2194,13 @@ class TISSUE_PT_weight(Panel): col.label(text="Weight Curves:") #col.operator("object.weight_contour_curves", icon="MOD_CURVE") col.operator("object.tissue_weight_streamlines", icon="ANIM") - col.operator("object.tissue_weight_contour_curves_pattern", icon="FORCE_TURBULENCE") + op = col.operator("object.tissue_weight_contour_curves_pattern", icon="FORCE_TURBULENCE") + op.contour_mode = 'WEIGHT' col.separator() col.operator("object.weight_contour_displace", icon="MOD_DISPLACE") col.operator("object.weight_contour_mask", icon="MOD_MASK") col.separator() col.label(text="Simulations:") - #col.operator("object.reaction_diffusion", icon="MOD_OCEAN") col.operator("object.start_reaction_diffusion", icon="EXPERIMENTAL", text="Reaction-Diffusion") @@ -3056,1164 +2215,11 @@ class TISSUE_PT_weight(Panel): col.operator("object.vertex_group_to_uv", icon="UV", text="Convert to UV") - #col.prop(context.object, "reaction_diffusion_run", icon="PLAY", text="Run Simulation") - ####col.prop(context.object, "reaction_diffusion_run") - #col.separator() - #col.label(text="Vertex Color from:") - #col.operator("object.vertex_group_to_vertex_colors", icon="GROUP_VERTEX") - - - - -class start_reaction_diffusion(Operator): - bl_idname = "object.start_reaction_diffusion" - bl_label = "Start Reaction Diffusion" - bl_description = ("Run a Reaction-Diffusion based on existing Vertex Groups: A and B") - bl_options = {'REGISTER', 'UNDO'} - - run : BoolProperty( - name="Run Reaction-Diffusion", default=True, description="Compute a new iteration on frame changes") - - time_steps : IntProperty( - name="Steps", default=10, min=0, soft_max=50, - description="Number of Steps") - - dt : FloatProperty( - name="dt", default=1, min=0, soft_max=0.2, - description="Time Step") - - diff_a : FloatProperty( - name="Diff A", default=0.18, min=0, soft_max=2, - description="Diffusion A") - - diff_b : FloatProperty( - name="Diff B", default=0.09, min=0, soft_max=2, - description="Diffusion B") - - f : FloatProperty( - name="f", default=0.055, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4, - description="Feed Rate") - - k : FloatProperty( - name="k", default=0.062, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4, - description="Kill Rate") - - @classmethod - def poll(cls, context): - return context.object.type == 'MESH' and context.mode != 'EDIT_MESH' - - def execute(self, context): - reaction_diffusion_add_handler(self, context) - set_animatable_fix_handler(self, context) - - ob = context.object - - ob.reaction_diffusion_settings.run = self.run - ob.reaction_diffusion_settings.dt = self.dt - ob.reaction_diffusion_settings.time_steps = self.time_steps - ob.reaction_diffusion_settings.f = self.f - ob.reaction_diffusion_settings.k = self.k - ob.reaction_diffusion_settings.diff_a = self.diff_a - ob.reaction_diffusion_settings.diff_b = self.diff_b - - - # check vertex group A - try: - vg = ob.vertex_groups['A'] - except: - ob.vertex_groups.new(name='A') - # check vertex group B - try: - vg = ob.vertex_groups['B'] - except: - ob.vertex_groups.new(name='B') - - for v in ob.data.vertices: - ob.vertex_groups['A'].add([v.index], 1, 'REPLACE') - ob.vertex_groups['B'].add([v.index], 0, 'REPLACE') - - ob.vertex_groups.update() - ob.data.update() - bpy.ops.object.mode_set(mode='WEIGHT_PAINT') - - return {'FINISHED'} - -class reset_reaction_diffusion_weight(Operator): - bl_idname = "object.reset_reaction_diffusion_weight" - bl_label = "Reset Reaction Diffusion Weight" - bl_description = ("Set A and B weight to default values") - bl_options = {'REGISTER', 'UNDO'} - - @classmethod - def poll(cls, context): - return context.object.type == 'MESH' and context.mode != 'EDIT_MESH' - - def execute(self, context): - reaction_diffusion_add_handler(self, context) - set_animatable_fix_handler(self, context) - - ob = context.object - - # check vertex group A - try: - vg = ob.vertex_groups['A'] - except: - ob.vertex_groups.new(name='A') - # check vertex group B - try: - vg = ob.vertex_groups['B'] - except: - ob.vertex_groups.new(name='B') - - for v in ob.data.vertices: - ob.vertex_groups['A'].add([v.index], 1, 'REPLACE') - ob.vertex_groups['B'].add([v.index], 0, 'REPLACE') - - ob.vertex_groups.update() - ob.data.update() - bpy.ops.object.mode_set(mode='WEIGHT_PAINT') - - return {'FINISHED'} - -class bake_reaction_diffusion(Operator): - bl_idname = "object.bake_reaction_diffusion" - bl_label = "Bake Data" - bl_description = ("Bake the Reaction-Diffusion to the cache directory") - bl_options = {'REGISTER', 'UNDO'} - - @classmethod - def poll(cls, context): - return context.object.type == 'MESH' and context.mode != 'EDIT_MESH' - - def execute(self, context): - ob = context.object - props = ob.reaction_diffusion_settings - if props.fast_bake: - bool_run = props.run - props.run = False - context.scene.frame_current = props.cache_frame_start - fast_bake_def(ob, frame_start=props.cache_frame_start, frame_end=props.cache_frame_end) - #create_fast_bake_def(ob, frame_start=props.cache_frame_start, frame_end=props.cache_frame_end) - context.scene.frame_current = props.cache_frame_end - props.run = bool_run - else: - for i in range(props.cache_frame_start, props.cache_frame_end): - context.scene.frame_current = i - reaction_diffusion_def(ob, bake=True) - props.bool_cache = True - - return {'FINISHED'} - -class reaction_diffusion_free_data(Operator): - bl_idname = "object.reaction_diffusion_free_data" - bl_label = "Free Data" - bl_description = ("Free Reaction-Diffusion data") - bl_options = {'REGISTER', 'UNDO'} - - @classmethod - def poll(cls, context): - return context.object.type == 'MESH' - - def execute(self, context): - ob = context.object - props = ob.reaction_diffusion_settings - props.bool_cache = False - - folder = Path(props.cache_dir) - for i in range(props.cache_frame_start, props.cache_frame_end): - data_a = folder / "a_{:04d}".format(i) - if os.path.exists(data_a): - os.remove(data_a) - data_a = folder / "b_{:04d}".format(i) - if os.path.exists(data_a): - os.remove(data_a) - return {'FINISHED'} - -from bpy.app.handlers import persistent - -def reaction_diffusion_scene(scene, bake=False): - for ob in scene.objects: - if ob.reaction_diffusion_settings.run: - reaction_diffusion_def(ob) - -def reaction_diffusion_def(ob, bake=False): - - scene = bpy.context.scene - start = time.time() - if type(ob) == bpy.types.Scene: return None - props = ob.reaction_diffusion_settings - - if bake or props.bool_cache: - if props.cache_dir == '': - letters = string.ascii_letters - random_name = ''.join(rnd.choice(letters) for i in range(6)) - if bpy.context.blend_data.filepath == '': - folder = Path(bpy.context.preferences.filepaths.temporary_directory) - folder = folder / 'reaction_diffusion_cache' / random_name - else: - folder = '//' + Path(bpy.context.blend_data.filepath).stem - folder = Path(bpy.path.abspath(folder)) / 'reaction_diffusion_cache' / random_name - folder.mkdir(parents=True, exist_ok=True) - props.cache_dir = str(folder) - else: - folder = Path(props.cache_dir) - - me = ob.data - n_edges = len(me.edges) - n_verts = len(me.vertices) - a = np.zeros(n_verts) - b = np.zeros(n_verts) - - print("{:6d} Reaction-Diffusion: {}".format(scene.frame_current, ob.name)) - - if not props.bool_cache: - - if props.bool_mod: - # hide deforming modifiers - mod_visibility = [] - for m in ob.modifiers: - mod_visibility.append(m.show_viewport) - if not mod_preserve_shape(m): m.show_viewport = False - - # evaluated mesh - dg = bpy.context.evaluated_depsgraph_get() - ob_eval = ob.evaluated_get(dg) - me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg) - - # set original visibility - for v, m in zip(mod_visibility, ob.modifiers): - m.show_viewport = v - ob.modifiers.update() - - bm = bmesh.new() # create an empty BMesh - bm.from_mesh(me) # fill it in from a Mesh - dvert_lay = bm.verts.layers.deform.active - - dt = props.dt - time_steps = props.time_steps - f = props.f - k = props.k - diff_a = props.diff_a - diff_b = props.diff_b - scale = props.diff_mult - - brush_mult = props.brush_mult - - # store weight values - if 'dB' in ob.vertex_groups: db = np.zeros(n_verts) - if 'grad' in ob.vertex_groups: grad = np.zeros(n_verts) - - if props.vertex_group_diff_a != '': diff_a = np.zeros(n_verts) - if props.vertex_group_diff_b != '': diff_b = np.zeros(n_verts) - if props.vertex_group_scale != '': scale = np.zeros(n_verts) - if props.vertex_group_f != '': f = np.zeros(n_verts) - if props.vertex_group_k != '': k = np.zeros(n_verts) - if props.vertex_group_brush != '': brush = np.zeros(n_verts) - else: brush = 0 - - group_index_a = ob.vertex_groups["A"].index - group_index_b = ob.vertex_groups["B"].index - a = bmesh_get_weight_numpy(group_index_a, dvert_lay, bm.verts) - b = bmesh_get_weight_numpy(group_index_b, dvert_lay, bm.verts) - - if props.vertex_group_diff_a != '': - group_index = ob.vertex_groups[props.vertex_group_diff_a].index - diff_a = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - if props.invert_vertex_group_diff_a: - vg_bounds = (props.min_diff_a, props.max_diff_a) - else: - vg_bounds = (props.max_diff_a, props.min_diff_a) - diff_a = np.interp(diff_a, (0,1), vg_bounds) - - if props.vertex_group_diff_b != '': - group_index = ob.vertex_groups[props.vertex_group_diff_b].index - diff_b = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - if props.invert_vertex_group_diff_b: - vg_bounds = (props.max_diff_b, props.min_diff_b) - else: - vg_bounds = (props.min_diff_b, props.max_diff_b) - diff_b = np.interp(diff_b, (0,1), vg_bounds) - - if props.vertex_group_scale != '': - group_index = ob.vertex_groups[props.vertex_group_scale].index - scale = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - if props.invert_vertex_group_scale: - vg_bounds = (props.max_scale, props.min_scale) - else: - vg_bounds = (props.min_scale, props.max_scale) - scale = np.interp(scale, (0,1), vg_bounds) - - if props.vertex_group_f != '': - group_index = ob.vertex_groups[props.vertex_group_f].index - f = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - if props.invert_vertex_group_f: - vg_bounds = (props.max_f, props.min_f) - else: - vg_bounds = (props.min_f, props.max_f) - f = np.interp(f, (0,1), vg_bounds, ) - - if props.vertex_group_k != '': - group_index = ob.vertex_groups[props.vertex_group_k].index - k = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - if props.invert_vertex_group_k: - vg_bounds = (props.max_k, props.min_k) - else: - vg_bounds = (props.min_k, props.max_k) - k = np.interp(k, (0,1), vg_bounds) - - if props.vertex_group_brush != '': - group_index = ob.vertex_groups[props.vertex_group_brush].index - brush = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - brush *= brush_mult - - - #timeElapsed = time.time() - start - #print('RD - Read Vertex Groups:',timeElapsed) - #start = time.time() - - diff_a *= scale - diff_b *= scale - - edge_verts = [0]*n_edges*2 - me.edges.foreach_get("vertices", edge_verts) - edge_verts = np.array(edge_verts) - - if 'gradient' in ob.vertex_groups.keys() and False: - group_index = ob.vertex_groups['gradient'].index - gradient = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - - arr = (np.arange(n_edges)*2).astype(int) - id0 = edge_verts[arr] - id1 = edge_verts[arr+1] - - #gradient = np.abs(gradient[id0] - gradient[id1]) - gradient = gradient[id1] - gradient[id0] - gradient /= np.max(gradient) - sign = np.sign(gradient) - sign[sign==0] = 1 - gradient = (0.05*abs(gradient) + 0.95)*sign - #gradient *= (1-abs(gradient) - #gradient = 0.2*(1-gradient) + 0.95 - - #gradient = get_uv_edge_vectors(me) - #uv_dir = Vector((0.5,0.5,0)).normalized() - #gradient = np.array([abs(g.dot(uv_dir.normalized())) for g in gradient]) - #gradient = (gradient + 0.5)/2 - #gradient = np.array([max(0,g.dot(uv_dir.normalized())) for g in gradient]) - - timeElapsed = time.time() - start - print(' Preparation Time:',timeElapsed) - start = time.time() - - try: - _f = f if type(f) is np.ndarray else np.array((f,)) - _k = k if type(k) is np.ndarray else np.array((k,)) - _diff_a = diff_a if type(diff_a) is np.ndarray else np.array((diff_a,)) - _diff_b = diff_b if type(diff_b) is np.ndarray else np.array((diff_b,)) - _brush = brush if type(brush) is np.ndarray else np.array((brush,)) - - #a, b = numba_reaction_diffusion_anisotropic(n_verts, n_edges, edge_verts, a, b, _brush, _diff_a, _diff_b, _f, _k, dt, time_steps, gradient) - a, b = numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, _brush, _diff_a, _diff_b, _f, _k, dt, time_steps) - except: - print('Not using Numba! The simulation could be slow.') - arr = np.arange(n_edges)*2 - id0 = edge_verts[arr] # first vertex indices for each edge - id1 = edge_verts[arr+1] # second vertex indices for each edge - for i in range(time_steps): - b += brush - lap_a = np.zeros(n_verts) - lap_b = np.zeros(n_verts) - lap_a0 = a[id1] - a[id0] # laplacian increment for first vertex of each edge - lap_b0 = b[id1] - b[id0] # laplacian increment for first vertex of each edge - - np.add.at(lap_a, id0, lap_a0) - np.add.at(lap_b, id0, lap_b0) - np.add.at(lap_a, id1, -lap_a0) - np.add.at(lap_b, id1, -lap_b0) - - ab2 = a*b**2 - a += eval("(diff_a*lap_a - ab2 + f*(1-a))*dt") - b += eval("(diff_b*lap_b + ab2 - (k+f)*b)*dt") - #a += (diff_a*lap_a - ab2 + f*(1-a))*dt - #b += (diff_b*lap_b + ab2 - (k+f)*b)*dt - - a = nan_to_num(a) - b = nan_to_num(b) - - timeElapsed = time.time() - start - print(' Simulation Time:',timeElapsed) - - if bake: - if not(os.path.exists(folder)): - os.mkdir(folder) - file_name = folder / "a_{:04d}".format(scene.frame_current) - a.tofile(file_name) - file_name = folder / "b_{:04d}".format(scene.frame_current) - b.tofile(file_name) - elif props.bool_cache: - try: - file_name = folder / "a_{:04d}".format(scene.frame_current) - a = np.fromfile(file_name) - file_name = folder / "b_{:04d}".format(scene.frame_current) - b = np.fromfile(file_name) - except: - print(' Cannot read cache.') - return - - if props.update_weight_a or props.update_weight_b: - start = time.time() - if props.update_weight_a: - if 'A' in ob.vertex_groups.keys(): - vg_a = ob.vertex_groups['A'] - else: - vg_a = ob.vertex_groups.new(name='A') - else: - vg_a = None - if props.update_weight_b: - if 'B' in ob.vertex_groups.keys(): - vg_b = ob.vertex_groups['B'] - else: - vg_b = ob.vertex_groups.new(name='B') - else: - vg_b = None - if vg_a == vg_b == None: - pass - else: - if ob.mode == 'WEIGHT_PAINT':# or props.bool_cache: - # slower, but prevent crashes - for i in range(n_verts): - if vg_a: vg_a.add([i], a[i], 'REPLACE') - if vg_b: vg_b.add([i], b[i], 'REPLACE') - else: - if props.bool_mod or props.bool_cache: - #bm.free() # release old bmesh - bm = bmesh.new() # create an empty BMesh - bm.from_mesh(ob.data) # fill it in from a Mesh - dvert_lay = bm.verts.layers.deform.active - # faster, but can cause crashes while painting weight - if vg_a: index_a = vg_a.index - if vg_b: index_b = vg_b.index - for i, v in enumerate(bm.verts): - dvert = v[dvert_lay] - if vg_a: dvert[index_a] = a[i] - if vg_b: dvert[index_b] = b[i] - bm.to_mesh(ob.data) - bm.free() - print(' Writing Vertex Groups Time:',time.time() - start) - if props.normalize: - min_a = np.min(a) - max_a = np.max(a) - min_b = np.min(b) - max_b = np.max(b) - a = (a - min_a)/(max_a - min_a) - b = (b - min_b)/(max_b - min_b) - split_a = None - split_b = None - splitted = False - if props.update_colors:#_a or props.update_colors_b: - start = time.time() - loops_size = get_attribute_numpy(me.polygons, attribute='loop_total', mult=1) - n_colors = np.sum(loops_size) - v_id = np.ones(n_colors) - me.polygons.foreach_get('vertices',v_id) - v_id = v_id.astype(int) - #v_id = np.array([v for p in ob.data.polygons for v in p.vertices]) - ''' - if props.update_colors_b: - if 'B' in ob.data.vertex_colors.keys(): - vc = ob.data.vertex_colors['B'] - else: - vc = ob.data.vertex_colors.new(name='B') - c_val = b[v_id] - c_val = np.repeat(c_val, 4, axis=0) - vc.data.foreach_set('color',c_val) - - if props.update_colors_a: - if 'A' in ob.data.vertex_colors.keys(): - vc = ob.data.vertex_colors['A'] - else: - vc = ob.data.vertex_colors.new(name='A') - c_val = a[v_id] - c_val = np.repeat(c_val, 4, axis=0) - vc.data.foreach_set('color',c_val) - ''' - split_a = a[v_id,None] - split_b = b[v_id,None] - splitted = True - ones = np.ones((n_colors,1)) - #rgba = np.concatenate((split_a,split_b,-split_b+split_a,ones),axis=1).flatten() - rgba = np.concatenate((split_a,split_b,ones,ones),axis=1).flatten() - if 'AB' in ob.data.vertex_colors.keys(): - vc = ob.data.vertex_colors['AB'] - else: - vc = ob.data.vertex_colors.new(name='AB') - vc.data.foreach_set('color',rgba) - ob.data.vertex_colors.update() - - print(' Writing Vertex Colors Time:',time.time() - start) - if props.update_uv: - start = time.time() - if 'AB' in me.uv_layers.keys(): - uv_layer = me.uv_layers['AB'] - else: - uv_layer = me.uv_layers.new(name='AB') - if not splitted: - loops_size = get_attribute_numpy(me.polygons, attribute='loop_total', mult=1) - n_data = np.sum(loops_size) - v_id = np.ones(n_data) - me.polygons.foreach_get('vertices',v_id) - v_id = v_id.astype(int) - split_a = a[v_id,None] - split_b = b[v_id,None] - uv = np.concatenate((split_a,split_b),axis=1).flatten() - uv_layer.data.foreach_set('uv',uv) - me.uv_layers.update() - print(' Writing UV Map Time:',time.time() - start) - - for ps in ob.particle_systems: - if ps.vertex_group_density == 'B' or ps.vertex_group_density == 'A': - ps.invert_vertex_group_density = not ps.invert_vertex_group_density - ps.invert_vertex_group_density = not ps.invert_vertex_group_density - - if props.bool_mod and not props.bool_cache: bpy.data.meshes.remove(me) - -def fast_bake_def(ob, frame_start=1, frame_end=250): - scene = bpy.context.scene - start = time.time() - if type(ob) == bpy.types.Scene: return None - props = ob.reaction_diffusion_settings - - # Define cache folder - if props.cache_dir == '': - letters = string.ascii_letters - random_name = ''.join(rnd.choice(letters) for i in range(6)) - if bpy.context.blend_data.filepath == '': - folder = Path(bpy.context.preferences.filepaths.temporary_directory) - folder = folder / 'reaction_diffusion_cache' / random_name - else: - folder = '//' + Path(bpy.context.blend_data.filepath).stem - folder = Path(bpy.path.abspath(folder)) / 'reaction_diffusion_cache' / random_name - folder.mkdir(parents=True, exist_ok=True) - props.cache_dir = str(folder) - else: - folder = Path(props.cache_dir) - - if props.bool_mod: - # hide deforming modifiers - mod_visibility = [] - for m in ob.modifiers: - mod_visibility.append(m.show_viewport) - if not mod_preserve_shape(m): m.show_viewport = False - - # evaluated mesh - dg = bpy.context.evaluated_depsgraph_get() - ob_eval = ob.evaluated_get(dg) - me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg) - - # set original visibility - for v, m in zip(mod_visibility, ob.modifiers): - m.show_viewport = v - ob.modifiers.update() - else: - me = ob.data - - bm = bmesh.new() # create an empty BMesh - bm.from_mesh(me) # fill it in from a Mesh - dvert_lay = bm.verts.layers.deform.active - n_edges = len(me.edges) - n_verts = len(me.vertices) - a = np.zeros(n_verts) - b = np.zeros(n_verts) - group_index_a = ob.vertex_groups["A"].index - group_index_b = ob.vertex_groups["B"].index - - dt = props.dt - time_steps = props.time_steps - f = props.f - k = props.k - diff_a = props.diff_a - diff_b = props.diff_b - scale = props.diff_mult - - brush_mult = props.brush_mult - - # store weight values - if 'dB' in ob.vertex_groups: db = np.zeros(n_verts) - if 'grad' in ob.vertex_groups: grad = np.zeros(n_verts) - - if props.vertex_group_diff_a != '': diff_a = np.zeros(n_verts) - if props.vertex_group_diff_b != '': diff_b = np.zeros(n_verts) - if props.vertex_group_scale != '': scale = np.zeros(n_verts) - if props.vertex_group_f != '': f = np.zeros(n_verts) - if props.vertex_group_k != '': k = np.zeros(n_verts) - if props.vertex_group_brush != '': brush = np.zeros(n_verts) - else: brush = 0 - - a = bmesh_get_weight_numpy(group_index_a, dvert_lay, bm.verts) - b = bmesh_get_weight_numpy(group_index_b, dvert_lay, bm.verts) - - if props.vertex_group_diff_a != '': - group_index = ob.vertex_groups[props.vertex_group_diff_a].index - diff_a = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - if props.invert_vertex_group_diff_a: - vg_bounds = (props.min_diff_a, props.max_diff_a) - else: - vg_bounds = (props.max_diff_a, props.min_diff_a) - diff_a = np.interp(diff_a, (0,1), vg_bounds) - - if props.vertex_group_diff_b != '': - group_index = ob.vertex_groups[props.vertex_group_diff_b].index - diff_b = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - if props.invert_vertex_group_diff_b: - vg_bounds = (props.max_diff_b, props.min_diff_b) - else: - vg_bounds = (props.min_diff_b, props.max_diff_b) - diff_b = np.interp(diff_b, (0,1), vg_bounds) - - if props.vertex_group_scale != '': - group_index = ob.vertex_groups[props.vertex_group_scale].index - scale = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - if props.invert_vertex_group_scale: - vg_bounds = (props.max_scale, props.min_scale) - else: - vg_bounds = (props.min_scale, props.max_scale) - scale = np.interp(scale, (0,1), vg_bounds) - - if props.vertex_group_f != '': - group_index = ob.vertex_groups[props.vertex_group_f].index - f = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - if props.invert_vertex_group_f: - vg_bounds = (props.max_f, props.min_f) - else: - vg_bounds = (props.min_f, props.max_f) - f = np.interp(f, (0,1), vg_bounds, ) - - if props.vertex_group_k != '': - group_index = ob.vertex_groups[props.vertex_group_k].index - k = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - if props.invert_vertex_group_k: - vg_bounds = (props.max_k, props.min_k) - else: - vg_bounds = (props.min_k, props.max_k) - k = np.interp(k, (0,1), vg_bounds) - - if props.vertex_group_brush != '': - group_index = ob.vertex_groups[props.vertex_group_brush].index - brush = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts) - brush *= brush_mult - - diff_a *= scale - diff_b *= scale - - edge_verts = [0]*n_edges*2 - me.edges.foreach_get("vertices", edge_verts) - - gradient = get_uv_edge_vectors(me) - uv_dir = Vector((0.5,0.5,0)) - #gradient = [abs(g.dot(uv_dir)) for g in gradient] - gradient = [max(0,g.dot(uv_dir)) for g in gradient] - - timeElapsed = time.time() - start - print(' Preparation Time:',timeElapsed) - start = time.time() - - try: - edge_verts = np.array(edge_verts) - _f = f if type(f) is np.ndarray else np.array((f,)) - _k = k if type(k) is np.ndarray else np.array((k,)) - _diff_a = diff_a if type(diff_a) is np.ndarray else np.array((diff_a,)) - _diff_b = diff_b if type(diff_b) is np.ndarray else np.array((diff_b,)) - _brush = brush if type(brush) is np.ndarray else np.array((brush,)) - - run_rd = False - for j in range(props.cache_frame_start, props.cache_frame_end+1): - start2 = time.time() - print("{:6d} Reaction-Diffusion: {}".format(j, ob.name)) - if run_rd: - b += _brush - a, b = numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, _brush, _diff_a, _diff_b, _f, _k, dt, time_steps) - else: - run_rd = True - - if not(os.path.exists(folder)): - os.mkdir(folder) - file_name = folder / "a_{:04d}".format(j) - a.tofile(file_name) - file_name = folder / "b_{:04d}".format(j) - b.tofile(file_name) - - timeElapsed = time.time() - start2 - print(' Simulation Time:',timeElapsed) - - except: - print('Not using Numba! The simulation could be slow.') - edge_verts = np.array(edge_verts) - arr = np.arange(n_edges)*2 - id0 = edge_verts[arr] # first vertex indices for each edge - id1 = edge_verts[arr+1] # second vertex indices for each edge - for j in range(props.cache_frame_start, props.cache_frame_end): - for i in range(time_steps): - b += brush - lap_a = np.zeros(n_verts) - lap_b = np.zeros(n_verts) - lap_a0 = a[id1] - a[id0] # laplacian increment for first vertex of each edge - lap_b0 = b[id1] - b[id0] # laplacian increment for first vertex of each edge - - np.add.at(lap_a, id0, lap_a0) - np.add.at(lap_b, id0, lap_b0) - np.add.at(lap_a, id1, -lap_a0) - np.add.at(lap_b, id1, -lap_b0) - - ab2 = a*b**2 - a += eval("(diff_a*lap_a - ab2 + f*(1-a))*dt") - b += eval("(diff_b*lap_b + ab2 - (k+f)*b)*dt") - - a = nan_to_num(a) - b = nan_to_num(b) - - if not(os.path.exists(folder)): - os.mkdir(folder) - file_name = folder / "a_{:04d}".format(j) - a.tofile(file_name) - file_name = folder / "b_{:04d}".format(j) - b.tofile(file_name) - - if ob.mode == 'WEIGHT_PAINT': - # slower, but prevent crashes - vg_a = ob.vertex_groups['A'] - vg_b = ob.vertex_groups['B'] - for i in range(n_verts): - vg_a.add([i], a[i], 'REPLACE') - vg_b.add([i], b[i], 'REPLACE') - else: - if props.bool_mod: - bm.free() # release old bmesh - bm = bmesh.new() # create an empty BMesh - bm.from_mesh(ob.data) # fill it in from a Mesh - dvert_lay = bm.verts.layers.deform.active - # faster, but can cause crashes while painting weight - for i, v in enumerate(bm.verts): - dvert = v[dvert_lay] - dvert[group_index_a] = a[i] - dvert[group_index_b] = b[i] - bm.to_mesh(ob.data) - - # Update Vertex Colors - if 'A' in ob.data.vertex_colors or 'B' in ob.data.vertex_colors: - v_id = np.array([v for p in ob.data.polygons for v in p.vertices]) - - if 'B' in ob.data.vertex_colors: - c_val = b[v_id] - c_val = np.repeat(c_val, 4, axis=0) - vc = ob.data.vertex_colors['B'] - vc.data.foreach_set('color',c_val.tolist()) - - if 'A' in ob.data.vertex_colors: - c_val = a[v_id] - c_val = np.repeat(c_val, 4, axis=0) - vc = ob.data.vertex_colors['A'] - vc.data.foreach_set('color',c_val.tolist()) - - for ps in ob.particle_systems: - if ps.vertex_group_density == 'B' or ps.vertex_group_density == 'A': - ps.invert_vertex_group_density = not ps.invert_vertex_group_density - ps.invert_vertex_group_density = not ps.invert_vertex_group_density - - if props.bool_mod: bpy.data.meshes.remove(me) - bm.free() - timeElapsed = time.time() - start - print(' Closing Time:',timeElapsed) - -def create_fast_bake_def(ob, frame_start=1, frame_end=250): - scene = bpy.context.scene - start = time.time() - if type(ob) == bpy.types.Scene: return None - props = ob.reaction_diffusion_settings - - dt = props.dt - time_steps = props.time_steps - scale = props.diff_mult - - if props.cache_dir == '': - letters = string.ascii_letters - random_name = ''.join(rnd.choice(letters) for i in range(6)) - if bpy.context.blend_data.filepath == '': - folder = Path(bpy.context.preferences.filepaths.temporary_directory) - folder = folder / 'reaction_diffusion_cache' / random_name - else: - folder = '//' + Path(bpy.context.blend_data.filepath).stem - folder = Path(bpy.path.abspath(folder)) / 'reaction_diffusion_cache' / random_name - folder.mkdir(parents=True, exist_ok=True) - props.cache_dir = str(folder) - else: - folder = Path(props.cache_dir) - - if props.bool_mod: - # hide deforming modifiers - mod_visibility = [] - for m in ob.modifiers: - mod_visibility.append(m.show_viewport) - if not mod_preserve_shape(m): m.show_viewport = False - - # evaluated mesh - dg = bpy.context.evaluated_depsgraph_get() - ob_eval = ob.evaluated_get(dg) - me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg) - - # set original visibility - for v, m in zip(mod_visibility, ob.modifiers): - m.show_viewport = v - ob.modifiers.update() - else: - me = ob.data - - bm = bmesh.new() # create an empty BMesh - bm.from_mesh(me) # fill it in from a Mesh - verts = get_vertices_numpy(me) - dvert_lay = bm.verts.layers.deform.active - n_edges = len(me.edges) - n_verts = len(me.vertices) - group_index_x = ob.vertex_groups["x"].index - group_index_y = ob.vertex_groups["y"].index - group_index_module = ob.vertex_groups["module"].index - group_index_values = ob.vertex_groups["values"].index - - if not props.bool_cache: - time_steps = props.time_steps - - # store weight values - if 'dB' in ob.vertex_groups: db = np.zeros(n_verts) - if 'grad' in ob.vertex_groups: grad = np.zeros(n_verts) - vec_x = np.zeros(n_verts) - vec_y = np.zeros(n_verts) - vec_module = np.zeros(n_verts) - values = np.zeros(n_verts) - - vec_x = bmesh_get_weight_numpy(group_index_x, dvert_lay, bm.verts) - vec_y = bmesh_get_weight_numpy(group_index_y, dvert_lay, bm.verts) - vec_module = bmesh_get_weight_numpy(group_index_module, dvert_lay, bm.verts) - values = bmesh_get_weight_numpy(group_index_values, dvert_lay, bm.verts) - field = np.concatenate((vec_x[:,None],vec_y[:,None],vec_y[:,None]*0),axis=1) - field = field*2-1 - field[:,2] = 0 - edge_verts = get_edges_numpy(me) - - id0 = edge_verts[:,0] - id1 = edge_verts[:,1] - vert0 = verts[id0] - vert1 = verts[id1] - vec = vert1-vert0 - edge_field = (field[id0] + field[id1])/2 # average vector associated to the edge - print(vert0.shape) - print(field.shape) - print(edge_field.shape) - # normalize vectors - vec /= np.linalg.norm(vec,axis=1)[:,None] - edge_field /= np.linalg.norm(edge_field,axis=1)[:,None] - edge_flow = np.einsum('...j,...j', vec, edge_field) - #sign = (edge_flow>0).astype(int) - #values[edge_verts[sign]] += values[edge_verts[1-sign]]* - #values[verts0] += values[verts1]*edge_flow - - timeElapsed = time.time() - start - print(' Preparation Time:',timeElapsed) - start = time.time() - - # Preserve energy - mult = np.zeros(values.shape) - #mult[id0] -= edge_flow - #mult[id1] += edge_flow - np.add.at(mult,id0,-edge_flow) - np.add.at(mult,id1,edge_flow) - print("mult") - mult = scale/mult - print(mult) - print(np.sum(mult)) - - - #try: - print(vec) - print(edge_flow) - print(edge_flow) - - bool_run = False - for j in range(props.cache_frame_start, props.cache_frame_end+1): - start2 = time.time() - print("{:6d} Reaction-Diffusion: {}".format(j, ob.name)) - if bool_run: - print(values) - #for i in range(1): - values = integrate_field(n_edges,id0,id1,values,edge_flow,mult,time_steps) - #values0 = values - #np.add.at(values, id0, values0[id1]*edge_flow*mult[id1]) - #np.add.at(values, id1, -values0[id0]*edge_flow*mult[id0]) - #np.add.at(values, id0, values0[id1]*edge_flow*mult) - #np.add.at(values, id1, -values0[id0]*edge_flow*mult) - #values[id1] += values0[id0]*edge_flow/mult[id1]*dt - #values[id0] -= values0[id1]*edge_flow/mult[id0]*dt - #values[id1] = edge_flow - #values[id1] += edge_flow - #a, b = numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, _brush, _diff_a, _diff_b, _f, _k, dt, time_steps) - - ''' - lap_a = np.zeros(n_verts) - lap_b = np.zeros(n_verts) - lap_a0 = a[id1] - a[id0] # laplacian increment for first vertex of each edge - lap_b0 = b[id1] - b[id0] # laplacian increment for first vertex of each edge - - np.add.at(lap_a, id0, lap_a0) - np.add.at(lap_b, id0, lap_b0) - np.add.at(lap_a, id1, -lap_a0) - np.add.at(lap_b, id1, -lap_b0) - ''' - else: - bool_run = True - - if not(os.path.exists(folder)): - os.mkdir(folder) - file_name = folder / "a_{:04d}".format(j) - values.tofile(file_name) - file_name = folder / "b_{:04d}".format(j) - values.tofile(file_name) - - - timeElapsed = time.time() - start2 - print(' Simulation Time:',timeElapsed) - - if props.bool_mod: bpy.data.meshes.remove(me) - bm.free() - timeElapsed = time.time() - start - print(' Closing Time:',timeElapsed) - - - - - -class TISSUE_PT_reaction_diffusion(Panel): - bl_space_type = 'PROPERTIES' - bl_region_type = 'WINDOW' - bl_context = "data" - bl_label = "Tissue Reaction-Diffusion" - bl_options = {'DEFAULT_CLOSED'} - - @classmethod - def poll(cls, context): - return 'A' and 'B' in context.object.vertex_groups - - def draw(self, context): - reaction_diffusion_add_handler(self, context) - - ob = context.object - props = ob.reaction_diffusion_settings - layout = self.layout - col = layout.column(align=True) - row = col.row(align=True) - if not ("A" and "B" in ob.vertex_groups): - row.operator("object.start_reaction_diffusion", - icon="EXPERIMENTAL", - text="Reaction-Diffusion") - else: - row.operator("object.start_reaction_diffusion", - icon="EXPERIMENTAL", - text="Reset Reaction-Diffusion") - row = col.row(align=True) - row.prop(props, "run", text="Run Reaction-Diffusion") - col = layout.column(align=True) - row = col.row(align=True) - row.prop(props, "time_steps") - row.prop(props, "dt") - row.enabled = not props.bool_cache - col.separator() - row = col.row(align=True) - col1 = row.column(align=True) - col1.prop(props, "diff_a") - col1.enabled = props.vertex_group_diff_a == '' and not props.bool_cache - col1 = row.column(align=True) - col1.prop(props, "diff_b") - col1.enabled = props.vertex_group_diff_b == '' and not props.bool_cache - row = col.row(align=True) - row.prop(props, "diff_mult") - row.enabled = props.vertex_group_scale == '' and not props.bool_cache - #col.separator() - row = col.row(align=True) - col1 = row.column(align=True) - col1.prop(props, "f") - col1.enabled = props.vertex_group_f == '' and not props.bool_cache - col1 = row.column(align=True) - col1.prop(props, "k") - col1.enabled = props.vertex_group_k == '' and not props.bool_cache - col.separator() - col.label(text='Cache:') - #col.prop(props, "bool_cache") - col.prop(props, "cache_dir", text='') - col.separator() - row = col.row(align=True) - row.prop(props, "cache_frame_start") - row.prop(props, "cache_frame_end") - col.separator() - if props.bool_cache: - col.operator("object.reaction_diffusion_free_data") - else: - row = col.row(align=True) - row.operator("object.bake_reaction_diffusion") - file = bpy.context.blend_data.filepath - temp = bpy.context.preferences.filepaths.temporary_directory - if file == temp == props.cache_dir == '': - row.enabled = False - col.label(text="Cannot use cache", icon='ERROR') - col.label(text='please save the Blender or set a Cache directory') - col.prop(props, "fast_bake") - - col.separator() - col.label(text='Output attributes:') - row = col.row(align=True) - col2 = row.column(align=True) - row2 = col2.row(align=True) - row2.prop(props, "update_weight_a", icon='GROUP_VERTEX', text='A') - row2.prop(props, "update_weight_b", icon='GROUP_VERTEX', text='B') - col2.enabled = props.bool_cache - row.separator() - #row.prop(props, "update_colors_a", icon='GROUP_VCOL', text='A') - #row.prop(props, "update_colors_b", icon='GROUP_VCOL', text='B') - row.prop(props, "update_colors", icon='GROUP_VCOL', text='AB') - row.separator() - row.prop(props, "update_uv", icon='GROUP_UVS', text='AB') - col.prop(props,'normalize') - - #col.prop_search(props, 'vertex_group_diff_a', ob, "vertex_groups", text='Diff A') - #col.prop_search(props, 'vertex_group_diff_b', ob, "vertex_groups", text='Diff B') - #col.prop_search(props, 'vertex_group_scale', ob, "vertex_groups", text='Scale') - #col.prop_search(props, 'vertex_group_f', ob, "vertex_groups", text='f') - #col.prop_search(props, 'vertex_group_k', ob, "vertex_groups", text='k') - - -class TISSUE_PT_reaction_diffusion_weight(Panel): - bl_space_type = 'PROPERTIES' - bl_region_type = 'WINDOW' - bl_context = "data" - bl_parent_id = "TISSUE_PT_reaction_diffusion" - bl_label = "Vertex Groups" - bl_options = {'DEFAULT_CLOSED'} - - @classmethod - def poll(cls, context): - return 'A' and 'B' in context.object.vertex_groups - - def draw(self, context): - ob = context.object - props = ob.reaction_diffusion_settings - layout = self.layout - #layout.use_property_split = True - col = layout.column(align=True) - col.prop(props, "bool_mod") - if props.bool_mod and props.fast_bake: - col.label(text="When Fast Bake is on, the modifiers", icon='ERROR') - col.label(text=" are used only for the first frame") - col.separator() - insert_weight_parameter(col, ob, 'brush', text='Brush:') - insert_weight_parameter(col, ob, 'diff_a', text='Diff A:') - insert_weight_parameter(col, ob, 'diff_b', text='Diff B:') - insert_weight_parameter(col, ob, 'scale', text='Scale:') - insert_weight_parameter(col, ob, 'f', text='f:') - insert_weight_parameter(col, ob, 'k', text='k:') - col.enabled = not props.bool_cache - -def insert_weight_parameter(col, ob, name, text=''): - props = ob.reaction_diffusion_settings - split = col.split(factor=0.25, align=True) - col2 = split.column(align=True) - col2.label(text=text) - col2 = split.column(align=True) - row2 = col2.row(align=True) - row2.prop_search(props, 'vertex_group_' + name, ob, "vertex_groups", text='') - if name != 'brush': - row2.prop(props, "invert_vertex_group_" + name, text="", toggle=True, icon='ARROW_LEFTRIGHT') - if 'vertex_group_' + name in props: - if props['vertex_group_' + name] != '': - if name == 'brush': - col2.prop(props, "brush_mult") - else: - row2 = col2.row(align=True) - row2.prop(props, "min_" + name, text="Min") - row2 = col2.row(align=True) - row2.prop(props, "max_" + name, text="Max") - col.separator() - -def contour_edges_pattern(operator, c, verts_count, iso_val, vertices, normals, filtered_edges, weight, pattern_weight, bevel_weight): - # vertices indexes - id0 = filtered_edges[:,0] - id1 = filtered_edges[:,1] - # vertices weight - w0 = weight[id0] - w1 = weight[id1] - # weight condition - bool_w0 = w0 < iso_val - bool_w1 = w1 < iso_val - - # mask all edges that have one weight value below the iso value - mask_new_verts = np.logical_xor(bool_w0, bool_w1) - if not mask_new_verts.any(): - return np.array([[None]]), {}, np.array([[None]]), np.array([[None]]) - - id0 = id0[mask_new_verts] - id1 = id1[mask_new_verts] - # filter arrays - v0 = vertices[id0] - v1 = vertices[id1] - n0 = normals[id0] - n1 = normals[id1] - w0 = w0[mask_new_verts] - w1 = w1[mask_new_verts] - pattern0 = pattern_weight[id0] - pattern1 = pattern_weight[id1] - try: - bevel0 = bevel_weight[id0] - bevel1 = bevel_weight[id1] - except: pass - - ### Spiral - #edge_nor = (n0+n1)/2 - #shift = np.arctan2(edge_nor[:,0], edge_nor[:,1])/2/pi*delta_iso - - #param = (iso_val + shift - w0)/(w1-w0) - param = (iso_val - w0)/(w1-w0) - # pattern displace - #mult = 1 if c%2 == 0 else -1 - if c%(operator.in_steps + operator.out_steps) < operator.in_steps: - mult = operator.in_displace - else: - mult = operator.out_displace - pattern_value = pattern0 + (pattern1-pattern0)*param - try: - bevel_value = bevel0 + (bevel1-bevel0)*param - bevel_value = np.expand_dims(bevel_value,axis=1) - except: bevel_value = None - disp = pattern_value * mult - - param = np.expand_dims(param,axis=1) - disp = np.expand_dims(disp,axis=1) - verts = v0 + (v1-v0)*param - norm = n0 + (n1-n0)*param - if operator.limit_z: disp *= 1-abs(np.expand_dims(norm[:,2], axis=1)) - verts = verts + norm*disp - #verts = verts[np.flip(np.argsort(shift))] - #verts = verts[np.argsort(shift)] - - # indexes of edges with new vertices - edges_index = filtered_edges[mask_new_verts][:,2] - - # remove all edges completely below the iso value - #mask_edges = np.logical_not(np.logical_and(bool_w0, bool_w1)) - #filtered_edges = filtered_edges[mask_edges] - return filtered_edges, edges_index, verts, bevel_value - def contour_bmesh(me, bm, weight, iso_val): bm.verts.ensure_lookup_table() bm.edges.ensure_lookup_table() bm.faces.ensure_lookup_table() - # store weight values - vertices = get_vertices_numpy(me) faces_mask = np.array(bm.faces) filtered_edges = get_edges_id_numpy(me) @@ -4246,39 +2252,12 @@ def contour_bmesh(me, bm, weight, iso_val): param = np.expand_dims(param,axis=1) verts = v0 + (v1-v0)*param - # indexes of edges with new vertices - #edges_index = filtered_edges[mask_new_verts][:,2] - edges_id = {} for i, e in enumerate(filtered_edges): #edges_id[id] = i + n_verts edges_id['{}_{}'.format(e[0],e[1])] = i + n_verts edges_id['{}_{}'.format(e[1],e[0])] = i + n_verts - - ''' - for e in filtered_edges: - id0 = e.verts[0].index - id1 = e.verts[1].index - w0 = weight[id0] - w1 = weight[id1] - - if w0 == w1: continue - elif w0 > iso_val and w1 > iso_val: - continue - elif w0 < iso_val and w1 < iso_val: continue - elif w0 == iso_val or w1 == iso_val: continue - else: - v0 = me0.vertices[id0].co - v1 = me0.vertices[id1].co - v = v0.lerp(v1, (iso_val-w0)/(w1-w0)) - delete_edges.append(e) - verts.append(v) - edges_id[str(id0)+"_"+str(id1)] = count - edges_id[str(id1)+"_"+str(id0)] = count - count += 1 - ''' - splitted_faces = [] switch = False @@ -4524,7 +2503,6 @@ class tissue_weight_streamlines(Operator): # generate new bmesh bm = bmesh.new() bm.from_mesh(me) - print(len(me.vertices)) #for v in me.vertices: # if v.select: seeds.append(v.index) for v in bm.verts: @@ -4605,6 +2583,7 @@ class tissue_weight_streamlines(Operator): if self.pos_steps > 0: for i in range(n_verts): n = neigh[i] + if len(n) == 0: continue nw = neigh_weight[i] max_w = max(nw) if self.same_weight: @@ -4618,6 +2597,7 @@ class tissue_weight_streamlines(Operator): prev_vert = [-1]*n_verts for i in range(n_verts): n = neigh[i] + if len(n) == 0: continue nw = neigh_weight[i] min_w = min(nw) if self.same_weight: @@ -4638,7 +2618,8 @@ class tissue_weight_streamlines(Operator): for j in range(self.pos_steps): if self.rand_dir > 0: n = neigh[next_pts[-1]] - next = n[int(len(n) * (1-random.random() * self.rand_dir))] + if len(n) == 0: break + next = n[int((len(n)-1) * (1-random.random() * self.rand_dir))] else: next = next_vert[next_pts[-1]] if next > 0: @@ -4649,6 +2630,7 @@ class tissue_weight_streamlines(Operator): for j in range(self.neg_steps): if self.rand_dir > 0: n = neigh[prev_pts[-1]] + if len(n) == 0: break prev = n[int(len(n) * random.random() * self.rand_dir)] else: prev = prev_vert[prev_pts[-1]]