Refactor: combine insert_keyframe() and insert_key_rna() into a single function #122053

Merged
Nathan Vegdahl merged 49 commits from nathanvegdahl/blender:combine_keying_functions into main 2024-06-11 16:43:08 +02:00
113 changed files with 3151 additions and 1367 deletions
Showing only changes of commit 9fac870d64 - Show all commits

View File

@ -18,9 +18,8 @@ if(UNIX)
endif()
set(PATCH_MAYBE_DOS2UNIX_CMD
${_dos2unix}
${PATCH_DIR}/opencollada.diff
${BUILD_DIR}/opencollada/src/external_opencollada/CMakeLists.txt
${BUILD_DIR}/opencollada/src/external_opencollada/Externals/LibXML/CMakeLists.txt &&
${BUILD_DIR}/opencollada/src/external_opencollada/Externals/LibXML/CMakeLists.txt
)
else()
set(OPENCOLLADA_EXTRA_ARGS
@ -43,9 +42,6 @@ ExternalProject_Add(external_opencollada
PATCH_COMMAND
${PATCH_MAYBE_DOS2UNIX_CMD}
${PATCH_CMD} -p 1 -N -d
${BUILD_DIR}/opencollada/src/external_opencollada <
${PATCH_DIR}/opencollada.diff
CMAKE_ARGS
-DCMAKE_INSTALL_PREFIX=${LIBDIR}/opencollada

View File

@ -147,9 +147,9 @@ set(SDL_FILE SDL2-${SDL_VERSION}.tar.gz)
set(SDL_CPE "cpe:2.3:a:libsdl:sdl:${SDL_VERSION}:*:*:*:*:*:*:*")
set(SDL_HOMEPAGE https://www.libsdl.org)
set(OPENCOLLADA_VERSION v1.6.68)
set(OPENCOLLADA_URI https://github.com/KhronosGroup/OpenCOLLADA/archive/${OPENCOLLADA_VERSION}.tar.gz)
set(OPENCOLLADA_HASH ee7dae874019fea7be11613d07567493)
set(OPENCOLLADA_VERSION 717cb7c1041a1796aad3cf843dd8f5095d7a6a33)
set(OPENCOLLADA_URI https://github.com/aras-p/OpenCOLLADA/archive/${OPENCOLLADA_VERSION}.tar.gz)
set(OPENCOLLADA_HASH 64cb7f705751790f1a8c6d0dfe6682cd)
set(OPENCOLLADA_HASH_TYPE MD5)
set(OPENCOLLADA_FILE opencollada-${OPENCOLLADA_VERSION}.tar.gz)

View File

@ -1,169 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 95abbe2..4f14f30 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -254,11 +254,11 @@ if(USE_STATIC_MSVC_RUNTIME)
endif()
#adding PCRE
-find_package(PCRE)
+#find_package(PCRE)
if (PCRE_FOUND)
message(STATUS "SUCCESSFUL: PCRE found")
else () # if pcre not found building its local copy from ./Externals
- if (WIN32 OR APPLE)
+ if (1)
message("WARNING: Native PCRE not found, taking PCRE from ./Externals")
add_definitions(-DPCRE_STATIC)
add_subdirectory(${EXTERNAL_LIBRARIES}/pcre)
diff --git a/DAEValidator/CMakeLists.txt b/DAEValidator/CMakeLists.txt
index 03ad540..f7d05cf 100644
--- a/DAEValidator/CMakeLists.txt
+++ b/DAEValidator/CMakeLists.txt
@@ -98,7 +98,7 @@ if (WIN32)
# C4710: 'function' : function not inlined
# C4711: function 'function' selected for inline expansion
# C4820: 'bytes' bytes padding added after construct 'member_name'
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /MP /Wall /WX /wd4505 /wd4514 /wd4592 /wd4710 /wd4711 /wd4820")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /MP /Wall /wd4505 /wd4514 /wd4592 /wd4710 /wd4711 /wd4820")
else ()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Werror")
endif ()
diff --git a/DAEValidator/library/include/no_warning_begin b/DAEValidator/library/include/no_warning_begin
index 7a69c32..defb315 100644
--- a/DAEValidator/library/include/no_warning_begin
+++ b/DAEValidator/library/include/no_warning_begin
@@ -2,6 +2,9 @@
#if defined(_WIN32)
# pragma warning(push)
# pragma warning(disable:4668)
+# if _MSC_VER >=1900
+# pragma warning(disable:5031)
+# endif
# if defined(_MSC_VER) && defined(_DEBUG)
# pragma warning(disable:4548)
# endif
diff --git a/DAEValidator/library/src/ArgumentParser.cpp b/DAEValidator/library/src/ArgumentParser.cpp
index 897e4dc..98a69ff 100644
--- a/DAEValidator/library/src/ArgumentParser.cpp
+++ b/DAEValidator/library/src/ArgumentParser.cpp
@@ -6,10 +6,10 @@
using namespace std;
-#ifdef _MSC_VER
-#define NOEXCEPT _NOEXCEPT
-#else
+#ifndef _NOEXCEPT
#define NOEXCEPT noexcept
+#else
+#define NOEXCEPT _NOEXCEPT
#endif
namespace opencollada
diff --git a/Externals/LibXML/CMakeLists.txt b/Externals/LibXML/CMakeLists.txt
index 40081e7..e1d1bfa 100644
--- a/Externals/LibXML/CMakeLists.txt
+++ b/Externals/LibXML/CMakeLists.txt
@@ -9,6 +9,7 @@ add_definitions(
-DLIBXML_SCHEMAS_ENABLED
-DLIBXML_XPATH_ENABLED
-DLIBXML_TREE_ENABLED
+ -DLIBXML_STATIC
)
if(USE_STATIC_MSVC_RUNTIME)
diff --git a/GeneratedSaxParser/src/GeneratedSaxParserUtils.cpp b/GeneratedSaxParser/src/GeneratedSaxParserUtils.cpp
index 1f9a3ee..d151e9a 100644
--- a/GeneratedSaxParser/src/GeneratedSaxParserUtils.cpp
+++ b/GeneratedSaxParser/src/GeneratedSaxParserUtils.cpp
@@ -1553,7 +1553,7 @@ namespace GeneratedSaxParser
#if defined(COLLADABU_OS_WIN) && !defined(__MINGW32__)
return _isnan( value ) ? true : false;
#else
-#ifdef isnan
+#if defined(isnan) || defined(__APPLE__)
return isnan( value );
#else
return std::isnan(value);
diff --git a/DAEValidator/library/src/Dae.cpp b/DAEValidator/library/src/Dae.cpp
index 9256ee1..241ad67 100644
--- a/DAEValidator/library/src/Dae.cpp
+++ b/DAEValidator/library/src/Dae.cpp
@@ -304,7 +304,7 @@ namespace opencollada
if (auto root_node = root())
{
const auto & nodes = root_node.selectNodes("//*[@id]");
- for (const auto & node : nodes)
+ for (const auto node : nodes)
{
string id = node.attribute("id").value();
mIdCache.insert(id);
@@ -312,4 +312,4 @@ namespace opencollada
}
}
}
-}
\ No newline at end of file
+}
diff --git a/DAEValidator/library/src/DaeValidator.cpp b/DAEValidator/library/src/DaeValidator.cpp
index 715d903..24423ce 100644
--- a/DAEValidator/library/src/DaeValidator.cpp
+++ b/DAEValidator/library/src/DaeValidator.cpp
@@ -162,7 +162,7 @@ namespace opencollada
// Find xsi:schemaLocation attributes in dae and try to validate against specified xsd documents
const auto & elements = dae.root().selectNodes("//*[@xsi:schemaLocation]");
- for (const auto & element : elements)
+ for (const auto element : elements)
{
if (auto schemaLocation = element.attribute("schemaLocation"))
{
@@ -274,7 +274,7 @@ namespace opencollada
int result = 0;
map<string, size_t> ids;
const auto & nodes = dae.root().selectNodes("//*[@id]");
- for (const auto & node : nodes)
+ for (const auto node : nodes)
{
string id = node.attribute("id").value();
size_t line = node.line();
diff -Naur a/CMakeLists.txt b/CMakeLists.txt
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -274,7 +274,7 @@
add_subdirectory(${EXTERNAL_LIBRARIES}/UTF)
add_subdirectory(common/libBuffer)
add_subdirectory(${EXTERNAL_LIBRARIES}/MathMLSolver)
-add_subdirectory(${EXTERNAL_LIBRARIES}/zlib)
+#add_subdirectory(${EXTERNAL_LIBRARIES}/zlib)
# building OpenCOLLADA libs
add_subdirectory(COLLADABaseUtils)
@@ -284,10 +284,10 @@
add_subdirectory(COLLADAStreamWriter)
# building COLLADAValidator app
-add_subdirectory(COLLADAValidator)
+#add_subdirectory(COLLADAValidator)
# DAE validator app
-add_subdirectory(DAEValidator)
+#add_subdirectory(DAEValidator)
# Library export
install(EXPORT LibraryExport DESTINATION ${OPENCOLLADA_INST_CMAKECONFIG} FILE OpenCOLLADATargets.cmake)
diff -Naur OpenCOLLADA-1.6.68/common/libBuffer/include/CommonFWriteBufferFlusher.h external_opencollada/common/libBuffer/include/CommonFWriteBufferFlusher.h
--- OpenCOLLADA-1.6.68/common/libBuffer/include/CommonFWriteBufferFlusher.h 2018-11-26 14:43:10 -0700
+++ external_opencollada/common/libBuffer/include/CommonFWriteBufferFlusher.h 2022-08-19 11:36:04 -0600
@@ -23,7 +23,7 @@
# include <tr1/unordered_map>
#endif
-#ifdef _LIBCPP_VERSION
+#if defined(_LIBCPP_VERSION) || defined(WIN32)
// If we're compiling with libc++, create a namespace alias for tr1 that points to std.
// Not particularly elegant, and largely should be filed under "hack", but it works for OS X with clang for now.
namespace std {

View File

@ -341,7 +341,13 @@ if(WITH_OPENCOLLADA)
if(WITH_STATIC_LIBS)
# PCRE is bundled with OpenCollada without headers, so can't use
# find_package reliably to detect it.
set(PCRE_LIBRARIES ${LIBDIR}/opencollada/lib/libpcre.a)
# NOTE: newer fork no longer depends on PCRE: see !122270.
if(EXISTS ${LIBDIR}/opencollada/lib/libpcre.a)
set(PCRE_LIBRARIES ${LIBDIR}/opencollada/lib/libpcre.a)
else()
# Quiet warnings.
set(PCRE_LIBRARIES)
endif()
else()
find_package_wrapper(PCRE)
endif()

View File

@ -512,12 +512,13 @@ if(WITH_OPENCOLLADA)
endif()
list(APPEND OPENCOLLADA_LIBRARIES ${OPENCOLLADA}/lib/opencollada/UTF.lib)
if(EXISTS ${OPENCOLLADA}/lib/opencollada/pcre.lib)
set(PCRE_LIBRARIES
optimized ${OPENCOLLADA}/lib/opencollada/pcre.lib
set(PCRE_LIBRARIES
optimized ${OPENCOLLADA}/lib/opencollada/pcre.lib
debug ${OPENCOLLADA}/lib/opencollada/pcre_d.lib
)
debug ${OPENCOLLADA}/lib/opencollada/pcre_d.lib
)
endif()
endif()
if(WITH_CODEC_FFMPEG)

View File

@ -0,0 +1,17 @@
if NOT EXIST %PYTHON% (
echo python not found, required for this operation
exit /b 1
)
set FORMAT_PATHS=%BLENDER_DIR%\tools\utils_maintenance\autopep8_format_paths.py
for %%a in (%PYTHON%) do (
set PEP8_LOCATION=%%~dpa\..\lib\site-packages\autopep8.py
)
REM Use -B to avoid writing __pycache__ in lib directory and causing update conflicts.
REM While we run with --no-subprocess a sub process is still used to get the version
REM information, so we stil have to supply a valid --autopep8-command here.
%PYTHON% -B %FORMAT_PATHS% --autopep8-command "%PEP8_LOCATION%" --no-subprocess %FORMAT_ARGS%
:EOF

View File

@ -27,4 +27,6 @@ set PATH=%CF_PATH%;%PATH%
REM Use -B to avoid writing __pycache__ in lib directory and causing update conflicts.
%PYTHON% -B %FORMAT_PATHS% %FORMAT_ARGS%
call "%~dp0\autopep8.cmd"
:EOF

View File

@ -82,11 +82,26 @@ def enum_sampling_pattern(self, context):
5)]
debug_items = [
('SOBOL_BURLEY', "Sobol-Burley", "Use on-the-fly computed Owen-scrambled Sobol for random sampling", 0),
('TABULATED_SOBOL', "Tabulated Sobol", "Use pre-computed tables of Owen-scrambled Sobol for random sampling", 1),
('BLUE_NOISE', "Blue-Noise (pure)", "Blue-Noise (pure)", 2),
('BLUE_NOISE_FIRST', "Blue-Noise (first)", "Blue-Noise (first)", 3),
('BLUE_NOISE_ROUND', "Blue-Noise (round)", "Blue-Noise (round)", 4),
('SOBOL_BURLEY',
"Sobol-Burley",
"Use on-the-fly computed Owen-scrambled Sobol for random sampling",
0),
('TABULATED_SOBOL',
"Tabulated Sobol",
"Use pre-computed tables of Owen-scrambled Sobol for random sampling",
1),
('BLUE_NOISE',
"Blue-Noise (pure)",
"Use a blue-noise pattern, which optimizes the frequency distribution of noise, for random sampling",
2),
('BLUE_NOISE_FIRST',
"Blue-Noise (first)",
"Use a blue-noise pattern for the first sample, then use Tabulated Sobol for the remaining samples, for random sampling",
3),
('BLUE_NOISE_ROUND',
"Blue-Noise (round)",
"Use a blue-noise sequence with a length rounded up to the next power of 2, for random sampling",
4),
]
non_debug_items = [

View File

@ -89,6 +89,10 @@ unique_ptr<Denoiser> Denoiser::create(Device *denoiser_device,
* or between few GPU and a CPU. */
single_denoiser_device = find_best_device(denoiser_device, params.type);
}
/* Ensure that we have a device to be used later in the code below. */
if (single_denoiser_device == nullptr) {
single_denoiser_device = cpu_fallback_device;
}
bool is_cpu_denoiser_device = single_denoiser_device->info.type == DEVICE_CPU;
if (is_cpu_denoiser_device == false) {

View File

@ -0,0 +1,7 @@
System Extensions
Extensions extracted into this directory will be available from the
default "System" repository.
This allows extensions to be bundled with Blender outside of
user repositories.

View File

@ -300,26 +300,6 @@ def extenion_repos_sync(*_):
repo_status_text.from_message("Sync \"{:s}\"".format(active_repo.name), text)
@bpy.app.handlers.persistent
def extenion_repos_upgrade(*_):
# This is called from operators (create or an explicit call to sync)
# so calling a modal operator is "safe".
if (active_repo := repo_active_or_none()) is None:
return
print_debug("UPGRADE:", active_repo.name)
from contextlib import redirect_stdout
import io
stdout = io.StringIO()
with redirect_stdout(stdout):
bpy.ops.extensions.package_upgrade_all('INVOKE_DEFAULT', use_active_only=True)
if text := stdout.getvalue():
repo_status_text.from_message("Upgrade \"{:s}\"".format(active_repo.name), text)
@bpy.app.handlers.persistent
def extenion_repos_files_clear(directory, _):
# Perform a "safe" file deletion by only removing files known to be either
@ -601,9 +581,6 @@ def register():
handlers = bpy.app.handlers._extension_repos_sync
handlers.append(extenion_repos_sync)
handlers = bpy.app.handlers._extension_repos_upgrade
handlers.append(extenion_repos_upgrade)
handlers = bpy.app.handlers._extension_repos_files_clear
handlers.append(extenion_repos_files_clear)
@ -653,10 +630,6 @@ def unregister():
if extenion_repos_sync in handlers:
handlers.remove(extenion_repos_sync)
handlers = bpy.app.handlers._extension_repos_upgrade
if extenion_repos_upgrade in handlers:
handlers.remove(extenion_repos_upgrade)
handlers = bpy.app.handlers._extension_repos_files_clear
if extenion_repos_files_clear in handlers:
handlers.remove(extenion_repos_files_clear)

View File

@ -916,6 +916,39 @@ def _repo_dir_and_index_get(repo_index, directory, report_fn):
return directory
def _extensions_maybe_online_action_poll_impl(cls, repo, action_text):
if repo is not None:
if not repo.enabled:
cls.poll_message_set("Active repository is disabled")
return False
if repo is None:
# This may not be correct but it's a reasonable assumption.
online_access_required = True
else:
# Check the specifics to allow refreshing a single repository from the popover.
online_access_required = repo.use_remote_url and (not repo.remote_url.startswith("file://"))
if online_access_required:
if not bpy.app.online_access:
cls.poll_message_set(
"Online access required to {:s}. {:s}".format(
action_text,
"Launch Blender without --offline-mode" if bpy.app.online_access_override else
"Enable online access in System preferences"
)
)
return False
repos_all = extension_repos_read(use_active_only=False)
if not len(repos_all):
cls.poll_message_set("No repositories available")
return False
return True
# -----------------------------------------------------------------------------
# Public Repository Actions
#
@ -1079,24 +1112,15 @@ class EXTENSIONS_OT_repo_sync_all(Operator, _ExtCmdMixIn):
)
@classmethod
def poll(cls, _context):
if not bpy.app.online_access:
if bpy.app.online_access_override:
cls.poll_message_set(
"Online access required to check for updates. Launch Blender without --offline-mode"
)
else:
cls.poll_message_set(
"Online access required to check for updates. Enable online access in System preferences"
)
return False
def poll(cls, context):
repo = getattr(context, "extension_repo", None)
return _extensions_maybe_online_action_poll_impl(cls, repo, "check for updates")
repos_all = extension_repos_read(use_active_only=False)
if not len(repos_all):
cls.poll_message_set("No repositories available")
return False
return True
@classmethod
def description(cls, context, props):
if props.use_active_only:
return "Refresh the list of extensions for the active repository"
return "" # Default.
def exec_command_iter(self, is_modal):
use_active_only = self.use_active_only
@ -1167,7 +1191,7 @@ class EXTENSIONS_OT_repo_sync_all(Operator, _ExtCmdMixIn):
class EXTENSIONS_OT_package_upgrade_all(Operator, _ExtCmdMixIn):
"""Upgrade all the extensions to their latest version for all the remote repositories"""
bl_idname = "extensions.package_upgrade_all"
bl_label = "Ext Package Upgrade All"
bl_label = "Install Available Updates"
__slots__ = (
*_ExtCmdMixIn.cls_slots,
"_repo_directories",
@ -1179,21 +1203,23 @@ class EXTENSIONS_OT_package_upgrade_all(Operator, _ExtCmdMixIn):
)
@classmethod
def poll(cls, _context):
if not bpy.app.online_access:
if bpy.app.online_access_override:
cls.poll_message_set("Online access required to install updates. Launch Blender without --offline-mode")
else:
cls.poll_message_set(
"Online access required to install updates. Enable online access in System preferences")
return False
def poll(cls, context):
repo = getattr(context, "extension_repo", None)
if repo is not None:
# NOTE: we could simply not show this operator for local repositories as it's
# arguably self evident that a local-only repository has nothing to upgrade from.
# For now tell the user why they can't use this action.
if not repo.use_remote_url:
cls.poll_message_set("Upgrade is not supported for local repositories")
return False
repos_all = extension_repos_read(use_active_only=False)
if not len(repos_all):
cls.poll_message_set("No repositories available")
return False
return _extensions_maybe_online_action_poll_impl(cls, repo, "install updates")
return True
@classmethod
def description(cls, context, props):
if props.use_active_only:
return "Upgrade all the extensions to their latest version for the active repository"
return "" # Default.
def exec_command_iter(self, is_modal):
from . import repo_cache_store

View File

@ -22,6 +22,7 @@ from bpy.types import (
from bl_ui.space_userpref import (
USERPREF_PT_addons,
USERPREF_MT_extensions_active_repo,
)
from . import repo_status_text
@ -1195,6 +1196,20 @@ def tags_panel_draw(panel, context):
col.prop(wm.extension_tags, "[\"{:s}\"]".format(escape_identifier(t)))
def extensions_repo_active_draw(self, context):
# Draw icon buttons on the right hand side of the UI-list.
from . import repo_active_or_none
layout = self.layout
# Allow the poll functions to only check against the active repository.
if (repo := repo_active_or_none()) is not None:
layout.context_pointer_set("extension_repo", repo)
layout.operator("extensions.repo_sync_all", text="", icon='FILE_REFRESH').use_active_only = True
layout.operator("extensions.package_upgrade_all", text="", icon='IMPORT').use_active_only = True
classes = (
# Pop-overs.
USERPREF_PT_extensions_filter,
@ -1206,6 +1221,7 @@ classes = (
def register():
USERPREF_PT_addons.append(extensions_panel_draw)
USERPREF_PT_extensions_tags.append(tags_panel_draw)
USERPREF_MT_extensions_active_repo.append(extensions_repo_active_draw)
for cls in classes:
bpy.utils.register_class(cls)
@ -1214,6 +1230,7 @@ def register():
def unregister():
USERPREF_PT_addons.remove(extensions_panel_draw)
USERPREF_PT_extensions_tags.remove(tags_panel_draw)
USERPREF_MT_extensions_active_repo.remove(extensions_repo_active_draw)
for cls in reversed(classes):
bpy.utils.unregister_class(cls)

View File

@ -1361,7 +1361,7 @@ def pkg_manifest_validate_field_permissions(
elif isinstance(value, list):
# Historic beta convention, keep for compatibility.
for i, item in enumerate(value):
if not isinstance(item_key, str):
if not isinstance(item, str):
return "Expected item at index {:d} to be an int not a {:s}".format(i, str(type(item)))
else:
# The caller doesn't allow this.

View File

@ -1,3 +1,5 @@
# SPDX-FileCopyrightText: 2024 Blender Foundation
#
# SPDX-License-Identifier: GPL-2.0-or-later
def register():

View File

@ -1,3 +1,7 @@
# SPDX-FileCopyrightText: 2024 Blender Foundation
#
# SPDX-License-Identifier: GPL-2.0-or-later
# This is a data file that is evaluated directly (not imported).
# NOTE: this can be removed once upgrading from 4.1 is no longer relevant.
{
@ -30,7 +34,6 @@
"greasepencil_tools": ("grease_pencil_tools", "Grease Pencil Tools"),
"io_anim_camera": ("export_camera_animation", "Export Camera Animation"),
"io_anim_nuke_chan": ("nuke_animation_format_chan", "Nuke Animation Format (.chan)"),
"io_coat3D": ("coat_applink", "3D-Coat Applink"),
"io_export_dxf": ("export_autocad_dxf_format_dxf", "Export Autocad DXF Format (.dxf)"),
"io_export_paper_model": ("export_paper_model", "Export Paper Model"),
"io_export_pc2": ("export_pointcache_formatpc2", "Export Pointcache Format(.pc2)"),
@ -93,7 +96,7 @@
"vdm_brush_baker": ("vdm_brush_baker", "VDM Brush Baker"),
# These were built-in but not mapped to an extension ("install" won't be shown in the UI).
"coat_applink": ("", "3D-Coat Applink"), # External dependencies.
"io_coat3D": ("", "3D-Coat Applink"), # External dependencies.
"depsgraph_debug": ("", "Dependency Graph Debug"), # External dependencies.
"io_import_images_as_planes": ("", "Import Images as Planes"), # Integrated with Blender.
"io_mesh_stl": ("", "STL Format"), # Was on by default.

View File

@ -5,7 +5,7 @@
bl_info = {
"name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
"version": (5, 12, 3),
"version": (5, 12, 4),
"blender": (4, 2, 0),
"location": "File > Import-Export",
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",

View File

@ -2663,6 +2663,8 @@ def fbx_data_from_scene(scene, depsgraph, settings):
# XXX: When exporting with subsurf information temporarily disable
# the last subsurf modifier.
tmp_mods.append((last_subsurf, last_subsurf.show_render, last_subsurf.show_viewport))
last_subsurf.show_render = False
last_subsurf.show_viewport = False
if do_evaluate:
# If modifiers has been altered need to update dependency graph.

View File

@ -5,7 +5,7 @@
bl_info = {
'name': 'glTF 2.0 format',
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
"version": (4, 2, 48),
"version": (4, 3, 5),
'blender': (4, 2, 0),
'location': 'File > Import-Export',
'description': 'Import-Export as glTF 2.0',
@ -86,6 +86,14 @@ def ensure_filepath_matches_export_format(filepath, export_format):
def on_export_format_changed(self, context):
# Update the filename in collection export settings when the format (.glb/.gltf) changes
if isinstance(self.id_data, bpy.types.Collection):
self.filepath = ensure_filepath_matches_export_format(
self.filepath,
self.export_format,
)
# Update the filename in the file browser when the format (.glb/.gltf)
# changes
sfile = context.space_data
@ -583,6 +591,13 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
default="",
)
# Not starting with "export_", as this is a collection only option
at_collection_center: BoolProperty(
name="Export at Collection Center",
description="Export at Collection center of mass of root objects of the collection",
default=False,
)
export_extras: BoolProperty(
name='Custom Properties',
description='Export custom properties as glTF extras',
@ -1102,6 +1117,7 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
export_settings['gltf_active_collection_with_nested'] = False
export_settings['gltf_active_scene'] = self.use_active_scene
export_settings['gltf_collection'] = self.collection
export_settings['gltf_at_collection_center'] = self.at_collection_center
export_settings['gltf_selected'] = self.use_selection
export_settings['gltf_layers'] = True # self.export_layers
@ -1288,6 +1304,7 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
is_file_browser = context.space_data.type == 'FILE_BROWSER'
export_main(layout, operator, is_file_browser)
export_panel_collection(layout, operator, is_file_browser)
export_panel_include(layout, operator, is_file_browser)
export_panel_transform(layout, operator)
export_panel_data(layout, operator)
@ -1317,6 +1334,16 @@ def export_main(layout, operator, is_file_browser):
layout.prop(operator, 'will_save_settings')
def export_panel_collection(layout, operator, is_file_browser):
if is_file_browser:
return
header, body = layout.panel("GLTF_export_collection", default_closed=True)
header.label(text="Collection")
if body:
body.prop(operator, 'at_collection_center')
def export_panel_include(layout, operator, is_file_browser):
header, body = layout.panel("GLTF_export_include", default_closed=True)
header.label(text="Include")
@ -1765,6 +1792,18 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
default="BLENDER",
)
disable_bone_shape: BoolProperty(
name='Disable Bone Shape',
description='Do not create bone shapes',
default=False,
)
bone_shape_scale_factor: FloatProperty(
name='Bone Shape Scale',
description='Scale factor for bone shapes',
default=1.0,
)
guess_original_bind_pose: BoolProperty(
name='Guess Original Bind Pose',
description=(
@ -1785,6 +1824,7 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
)
def draw(self, context):
operator = self
layout = self.layout
layout.use_property_split = True
@ -1794,9 +1834,9 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
layout.prop(self, 'merge_vertices')
layout.prop(self, 'import_shading')
layout.prop(self, 'guess_original_bind_pose')
layout.prop(self, 'bone_heuristic')
layout.prop(self, 'export_import_convert_lighting_mode')
layout.prop(self, 'import_webp_texture')
import_bone_panel(layout, operator)
import_panel_user_extension(context, layout)
@ -1896,6 +1936,16 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
self.loglevel = logging.DEBUG
def import_bone_panel(layout, operator):
header, body = layout.panel("GLTF_import_bone", default_closed=False)
header.label(text="Bones")
if body:
body.prop(operator, 'bone_heuristic')
if operator.bone_heuristic == 'BLENDER':
body.prop(operator, 'disable_bone_shape')
body.prop(operator, 'bone_shape_scale_factor')
def import_panel_user_extension(context, layout):
for draw in importer_extension_layout_draw.values():
draw(context, layout)

View File

@ -74,6 +74,13 @@ def __gather_scene(blender_scene, export_settings):
if export_settings['gltf_flatten_obj_hierarchy'] is True:
vtree.break_obj_hierarchy()
# Now we filtered the tree, in case of Collection Export,
# We need to calculate the collection center,
# In order to set the scene center to the collection center
# Using object center barycenter for now (another option could be to use bounding box center)
if export_settings['gltf_collection'] and export_settings['gltf_at_collection_center']:
vtree.calculate_collection_center()
vtree.variants_reset_to_original()
export_user_extensions('vtree_after_filter_hook', export_settings, vtree)

View File

@ -405,8 +405,14 @@ def __gather_name(blender_object, export_settings):
def __gather_trans_rot_scale(vnode, export_settings):
if vnode.parent_uuid is None:
# No parent, so matrix is world matrix
trans, rot, sca = vnode.matrix_world.decompose()
# No parent, so matrix is world matrix, except if we export a collection
if export_settings['gltf_collection'] and export_settings['gltf_at_collection_center']:
# If collection, we need to take into account the collection offset
trans, rot, sca = vnode.matrix_world.decompose()
trans -= export_settings['gltf_collection_center']
else:
# No parent, so matrix is world matrix
trans, rot, sca = vnode.matrix_world.decompose()
else:
# calculate local matrix
if export_settings['vtree'].nodes[vnode.parent_uuid].skin is None:

View File

@ -5,7 +5,7 @@
import bpy
import uuid
import numpy as np
from mathutils import Quaternion, Matrix
from mathutils import Quaternion, Matrix, Vector
from ...io.exp.gltf2_io_user_extensions import export_user_extensions
from ...io.com import gltf2_io
from ...io.imp.gltf2_io_binary import BinaryData
@ -907,3 +907,23 @@ class VExportTree:
self.export_settings['log'].warning(
"We can't remove armature object because some armatures have multiple root bones.")
break
def calculate_collection_center(self):
# Because we already filtered the tree, we can use all objects
# to calculate the center of the scene
# Are taken into account all objects that are direct root in the exported collection
centers = []
for node in [
n for n in self.nodes.values() if n.parent_uuid is None and n.blender_type in [
VExportNode.OBJECT,
VExportNode.ARMATURE,
VExportNode.LIGHT,
VExportNode.CAMERA]]:
if node.matrix_world is not None:
centers.append(node.matrix_world.translation)
if len(centers) == 0:
self.export_settings['gltf_collection_center'] = Vector((0.0, 0.0, 0.0))
self.export_settings['gltf_collection_center'] = sum(centers, Vector()) / len(centers)

View File

@ -525,6 +525,7 @@ def gather_alpha_info(alpha_nav):
# Nodes will look like:
# Alpha -> [Math:Round] -> Alpha Socket
# Alpha -> [Math:X < cutoff] -> [Math:1 - X] -> Alpha Socket
# Alpha -> [X > cutoff] -> Alpha Socket (Wrong, but backwards compatible with legacy)
def detect_alpha_clip(alpha_nav):
nav = alpha_nav.peek_back()
if not nav.moved:
@ -538,7 +539,7 @@ def detect_alpha_clip(alpha_nav):
# Detect 1 - (X < cutoff)
# (There is no >= node)
elif nav.node.type == 'MATH' and nav.node.operation == 'SUBTRACT':
if nav.node.type == 'MATH' and nav.node.operation == 'SUBTRACT':
if nav.get_constant(0)[0] == 1.0:
nav2 = nav.peek_back(1)
if nav2.moved and nav2.node.type == 'MATH':
@ -555,6 +556,21 @@ def detect_alpha_clip(alpha_nav):
alpha_nav.assign(nav2)
return in0
# Detect (X > cutoff)
# Wrong when X = cutoff, but backwards compatible with legacy
# Alpha Clip setup
if nav.node.type == 'MATH':
in0 = nav.get_constant(0)[0]
in1 = nav.get_constant(1)[0]
if nav.node.operation == 'GREATER_THAN' and in1 is not None:
nav.select_input_socket(0)
alpha_nav.assign(nav)
return in1
elif nav.node.operation == 'LESS_THAN' and in0 is not None:
nav.select_input_socket(1)
alpha_nav.assign(nav)
return in0
return None

View File

@ -32,7 +32,7 @@ class BlenderMaterial():
set_extras(mat, pymaterial.extras)
BlenderMaterial.set_double_sided(pymaterial, mat)
BlenderMaterial.set_eevee_blend_method(pymaterial, mat)
BlenderMaterial.set_eevee_surface_render_method(pymaterial, mat)
BlenderMaterial.set_viewport_color(pymaterial, mat, vertex_color)
mat.use_nodes = True
@ -70,19 +70,12 @@ class BlenderMaterial():
mat.use_backface_culling = (pymaterial.double_sided != True)
@staticmethod
def set_eevee_blend_method(pymaterial, mat):
def set_eevee_surface_render_method(pymaterial, mat):
alpha_mode = pymaterial.alpha_mode or 'OPAQUE'
if alpha_mode == 'OPAQUE':
mat.blend_method = 'OPAQUE'
elif alpha_mode == 'BLEND':
mat.blend_method = 'BLEND'
elif alpha_mode == 'MASK':
# Alpha clipping is done with nodes, NOT with the
# blend_method, since the blend_method only affects Eevee
# legacy. Using the CLIP method here just hints to Eevee
# that the alpha is always 0 or 1.
mat.blend_method = 'CLIP'
mat.alpha_threshold = 0.5
if alpha_mode in ['OPAQUE', 'MASK']:
mat.surface_render_method = 'DITHERED'
else:
mat.surface_render_method = 'BLENDED'
@staticmethod
def set_viewport_color(pymaterial, mat, vertex_color):

View File

@ -130,6 +130,9 @@ class BlenderNode():
obj.show_in_front = True
obj.data.relation_line_position = "HEAD"
if gltf.import_settings['disable_bone_shape'] is True:
return
# Create a special collection (if not exists already)
# Content of this collection will not be exported
if BLENDER_GLTF_SPECIAL_COLLECTION not in bpy.data.collections:
@ -233,13 +236,14 @@ class BlenderNode():
pynode = gltf.data.nodes[id]
set_extras(pose_bone, pynode.extras)
if gltf.import_settings['bone_heuristic'] == "BLENDER":
if gltf.import_settings['bone_heuristic'] == "BLENDER" and gltf.import_settings['disable_bone_shape'] is False:
pose_bone.custom_shape = bpy.data.objects[gltf.bone_shape]
armature_min_dim = min([blender_arma.dimensions[0] /
blender_arma.scale[0], blender_arma.dimensions[1] /
blender_arma.scale[1], blender_arma.dimensions[2] /
blender_arma.scale[2]])
pose_bone.custom_shape_scale_xyz = Vector([armature_min_dim * 0.05] * 3)
pose_bone.custom_shape_scale_xyz = Vector(
[armature_min_dim * 0.05] * 3) * gltf.import_settings['bone_shape_scale_factor']
pose_bone.use_custom_shape_bone_size = False
@staticmethod

View File

@ -378,7 +378,14 @@ def script_paths_pref():
return paths
def script_paths(*, subdir=None, user_pref=True, check_all=False, use_user=True):
def script_paths_system_environment():
"""Returns a list of system script directories from environment variables."""
if env_system_path := _os.environ.get("BLENDER_SYSTEM_SCRIPTS"):
return [_os.path.normpath(env_system_path)]
return []
def script_paths(*, subdir=None, user_pref=True, check_all=False, use_user=True, use_system_environment=True):
"""
Returns a list of valid script paths.
@ -388,6 +395,10 @@ def script_paths(*, subdir=None, user_pref=True, check_all=False, use_user=True)
:type user_pref: bool
:arg check_all: Include local, user and system paths rather just the paths Blender uses.
:type check_all: bool
:arg use_user: Include user paths
:type use_user: bool
:arg use_system_environment: Include BLENDER_SYSTEM_SCRIPTS variable path
:type use_system_environment: bool
:return: script paths.
:rtype: list
"""
@ -419,6 +430,9 @@ def script_paths(*, subdir=None, user_pref=True, check_all=False, use_user=True)
if user_pref:
base_paths.extend(script_paths_pref())
if use_system_environment:
base_paths.extend(script_paths_system_environment())
scripts = []
for path in base_paths:
if not path:
@ -473,16 +487,22 @@ def app_template_paths(*, path=None):
"""
subdir_args = (path,) if path is not None else ()
# Note: keep in sync with: Blender's 'BKE_appdir_app_template_any'.
# Uses 'BLENDER_USER_SCRIPTS', 'BLENDER_SYSTEM_SCRIPTS'
# ... in this case 'system' accounts for 'local' too.
for resource_fn, module_name in (
(_user_resource, "bl_app_templates_user"),
(system_resource, "bl_app_templates_system"),
):
path_test = resource_fn('SCRIPTS', path=_os.path.join("startup", module_name, *subdir_args))
if path_test and _os.path.isdir(path_test):
# Uses BLENDER_USER_SCRIPTS
path_test = _user_resource('SCRIPTS', path=_os.path.join("startup", "bl_app_templates_user", *subdir_args))
if path_test and _os.path.isdir(path_test):
yield path_test
# Uses BLENDER_SYSTTEM_SCRIPTS
for path in script_paths_system_environment():
path_test = _os.path.join(path, "startup", "bl_app_templates_system", *subdir_args)
if _os.path.isdir(path_test):
yield path_test
# Uses default local or system location.
path_test = system_resource('SCRIPTS', path=_os.path.join("startup", "bl_app_templates_system", *subdir_args))
if path_test and _os.path.isdir(path_test):
yield path_test
def preset_paths(subdir):
"""
@ -494,7 +514,7 @@ def preset_paths(subdir):
:rtype: list
"""
dirs = []
for path in script_paths(subdir="presets", check_all=True):
for path in script_paths(subdir="presets"):
directory = _os.path.join(path, subdir)
if not directory.startswith(path):
raise Exception("invalid subdir given {!r}".format(subdir))
@ -809,17 +829,16 @@ def register_submodule_factory(module_name, submodule_names):
def register():
nonlocal module
module = __import__(name=module_name, fromlist=submodule_names)
submodules[:] = [getattr(module, name) for name in submodule_names]
for mod in submodules:
submodules[:] = [(getattr(module, mod_name), mod_name) for mod_name in submodule_names]
for mod, _mod_name in submodules:
mod.register()
def unregister():
from sys import modules
for mod in reversed(submodules):
for mod, mod_name in reversed(submodules):
mod.unregister()
name = mod.__name__
delattr(module, name.partition(".")[2])
del modules[name]
delattr(module, mod_name)
del modules[mod.__name__]
submodules.clear()
return register, unregister

View File

@ -5,7 +5,8 @@
import bpy
from bpy.types import Operator
from bpy.props import BoolProperty
from bpy_extras.node_utils import connect_sockets
from bpy_extras.node_utils import find_base_socket_type, connect_sockets
from bpy.app.translations import pgettext_data as data_
from .node_editor.node_functions import (
NodeEditorBase,
@ -15,9 +16,7 @@ from .node_editor.node_functions import (
get_output_location,
get_internal_socket,
is_visible_socket,
is_viewer_socket,
is_viewer_link,
viewer_socket_name,
force_update,
)
@ -65,7 +64,7 @@ class NODE_OT_connect_to_output(Operator, NodeEditorBase):
output_sockets = self.get_output_sockets(node_tree)
if len(output_sockets):
for i, socket in enumerate(output_sockets):
if is_viewer_socket(socket) and socket.socket_type == socket_type:
if socket.is_inspect_output:
# If viewer output is already used but leads to the same socket we can still use it.
is_used = self.has_socket_other_users(socket)
if is_used:
@ -82,7 +81,7 @@ class NODE_OT_connect_to_output(Operator, NodeEditorBase):
if viewer_socket is None:
# Create viewer socket.
viewer_socket = node_tree.interface.new_socket(
viewer_socket_name, in_out='OUTPUT', socket_type=socket_type)
data_("(Viewer)"), in_out='OUTPUT', socket_type=socket_type)
viewer_socket.is_inspect_output = True
return viewer_socket
@ -101,9 +100,9 @@ class NODE_OT_connect_to_output(Operator, NodeEditorBase):
return groupout
@classmethod
def search_sockets(cls, node, r_sockets, index=None):
"""Recursively scan nodes for viewer sockets and store them in a list"""
for i, input_socket in enumerate(node.inputs):
def search_connected_viewer_sockets(cls, output_node, r_sockets, index=None):
"""From an output node, recursively scan node tree for connected viewer sockets"""
for i, input_socket in enumerate(output_node.inputs):
if index and i != index:
continue
if len(input_socket.links):
@ -112,25 +111,26 @@ class NODE_OT_connect_to_output(Operator, NodeEditorBase):
external_socket = link.from_socket
if hasattr(next_node, "node_tree"):
for socket_index, socket in enumerate(next_node.node_tree.interface.items_tree):
# Find inside socket matching outside one.
if socket.identifier == external_socket.identifier:
break
if is_viewer_socket(socket) and socket not in r_sockets:
if socket.is_inspect_output and socket not in r_sockets:
r_sockets.append(socket)
# continue search inside of node group but restrict socket to where we came from.
# Continue search inside of node group but restrict socket to where we came from.
groupout = get_group_output_node(next_node.node_tree)
cls.search_sockets(groupout, r_sockets, index=socket_index)
cls.search_connected_viewer_sockets(groupout, r_sockets, index=socket_index)
@classmethod
def scan_nodes(cls, tree, sockets):
"""Recursively get all viewer sockets in a material tree"""
def search_viewer_sockets_in_tree(cls, tree, r_sockets):
"""Recursively get all viewer sockets in a node tree"""
for node in tree.nodes:
if hasattr(node, "node_tree"):
if node.node_tree is None:
continue
for socket in cls.get_output_sockets(node.node_tree):
if is_viewer_socket(socket) and (socket not in sockets):
sockets.append(socket)
cls.scan_nodes(node.node_tree, sockets)
if socket.is_inspect_output and (socket not in r_sockets):
r_sockets.append(socket)
cls