Refactor: combine insert_keyframe() and insert_key_rna() into a single function #122053

Merged
Nathan Vegdahl merged 49 commits from nathanvegdahl/blender:combine_keying_functions into main 2024-06-11 16:43:08 +02:00
136 changed files with 1475 additions and 741 deletions
Showing only changes of commit 309a278f26 - Show all commits

View File

@ -18,12 +18,16 @@ macro(fftw_build FFTW_POSTFIX)
CMAKE_ARGS
-DCMAKE_INSTALL_PREFIX=${LIBDIR}/fftw3
-DENABLE_THREADS=ON
-DWITH_COMBINED_THREADS=OFF
-DBUILD_SHARED_LIBS=OFF
-DBUILD_TESTS=OFF
${FFTW_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/fftw3
)
else()
set(FFTW_EXTRA_ARGS --enable-static)
set(FFTW_EXTRA_ARGS --enable-static --enable-threads)
set(FFTW_INSTALL install)
ExternalProject_Add(external_fftw3_${FFTW_POSTFIX}
URL file://${PACKAGE_DIR}/${FFTW_FILE}
@ -57,12 +61,12 @@ if(MSVC)
COMMAND ${CMAKE_COMMAND} -E copy
${LIBDIR}/fftw3/lib/fftw3.lib
${HARVEST_TARGET}/fftw3/lib/fftw3.lib
COMMAND ${CMAKE_COMMAND} -E copy
${LIBDIR}/fftw3/bin/fftw3.dll
${HARVEST_TARGET}/fftw3/lib/fftw3.dll
COMMAND ${CMAKE_COMMAND} -E copy
${LIBDIR}/fftw3/include/fftw3.h
${HARVEST_TARGET}/fftw3/include/fftw3.h
COMMAND ${CMAKE_COMMAND} -E copy
${LIBDIR}/fftw3/lib/fftw3_threads.lib
${HARVEST_TARGET}/fftw3/lib/fftw3_threads.lib
DEPENDEES install
)
ExternalProject_Add_Step(external_fftw3_float after_install
@ -70,8 +74,8 @@ if(MSVC)
${LIBDIR}/fftw3/lib/fftw3f.lib
${HARVEST_TARGET}/fftw3/lib/fftw3f.lib
COMMAND ${CMAKE_COMMAND} -E copy
${LIBDIR}/fftw3/bin/fftw3f.dll
${HARVEST_TARGET}/fftw3/lib/fftw3f.dll
${LIBDIR}/fftw3/lib/fftw3f_threads.lib
${HARVEST_TARGET}/fftw3/lib/fftw3f_threads.lib
DEPENDEES install
)
endif()

View File

@ -73,3 +73,13 @@ index ae14ced..a49e131 100644
else
return HIPArch::Unknown;
}
--- a/devices/cpu/cpu_engine.h
+++ b/devices/cpu/cpu_engine.h
@@ -7,5 +7,7 @@
#include "cpu_device.h"
#include <queue>
#include <condition_variable>
+// BLENDER: needed for building on Linux.
+#include <thread>
OIDN_NAMESPACE_BEGIN

View File

@ -440,12 +440,16 @@ endif()
if(WITH_FFTW3)
set(FFTW3 ${LIBDIR}/fftw3)
if(EXISTS ${FFTW3}/lib/libfftw3-3.lib) # 3.6 libraries
set(FFTW3_LIBRARIES ${FFTW3}/lib/libfftw3-3.lib ${FFTW3}/lib/libfftw3f.lib)
elseif(EXISTS ${FFTW3}/lib/libfftw.lib)
set(FFTW3_LIBRARIES ${FFTW3}/lib/libfftw.lib) # 3.5 Libraries
else()
set(FFTW3_LIBRARIES ${FFTW3}/lib/fftw3.lib ${FFTW3}/lib/fftw3f.lib) # msys2+MSVC Libraries
set(FFTW3_LIBRARIES
${FFTW3}/lib/fftw3.lib
${FFTW3}/lib/fftw3f.lib
)
if(EXISTS ${FFTW3}/lib/fftw3_threads.lib)
list(APPEND FFTW3_LIBRARIES
${FFTW3}/lib/fftw3_threads.lib
${FFTW3}/lib/fftw3f_threads.lib
)
set(WITH_FFTW3_THREADS_SUPPORT ON)
endif()
set(FFTW3_INCLUDE_DIRS ${FFTW3}/include)
set(FFTW3_LIBPATH ${FFTW3}/lib)

View File

@ -409,7 +409,7 @@ void FFMPEGReader::seek(int position)
{
double pts_time_base = av_q2d(m_formatCtx->streams[m_stream]->time_base);
uint64_t st_time = m_formatCtx->streams[m_stream]->start_time;
int64_t st_time = m_formatCtx->streams[m_stream]->start_time;
uint64_t seek_pos = (uint64_t)(position / (pts_time_base * m_specs.rate));
if(st_time != AV_NOPTS_VALUE)

View File

@ -2437,7 +2437,7 @@ class CYCLES_VIEW3D_PT_shading_lighting(Panel):
sub.template_icon_view(shading, "studio_light", scale_popup=3)
col = split.column()
col.operator("preferences.studiolight_show", emboss=False, text="", icon='PREFERENCES')
col.operator("screen.userpref_show", emboss=False, text="", icon='PREFERENCES').section = 'LIGHTS'
split = layout.split(factor=0.9)
col = split.column()

View File

@ -1112,6 +1112,11 @@ GHOST_TSuccess GHOST_ContextVK::initializeDrawingContext()
return GHOST_kFailure;
}
vulkan_device->users++;
/* Register optional device extensions */
if (vulkan_device->has_extensions({VK_KHR_MAINTENANCE_4_EXTENSION_NAME})) {
extensions_device.push_back(VK_KHR_MAINTENANCE_4_EXTENSION_NAME);
}
#ifdef VK_MVK_MOLTENVK_EXTENSION_NAME
/* According to the Vulkan specs, when `VK_KHR_portability_subset` is available it should be
* enabled. See
@ -1120,7 +1125,6 @@ GHOST_TSuccess GHOST_ContextVK::initializeDrawingContext()
extensions_device.push_back(VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME);
}
#endif
vulkan_device->users++;
vulkan_device->ensure_device(layers_enabled, extensions_device);
vkGetDeviceQueue(

View File

@ -377,7 +377,7 @@ class GHOST_XrGraphicsBindingD3D : public GHOST_IXrGraphicsBinding {
strstream << "Minimum DirectX 11 Feature Level " << gpu_requirements.minFeatureLevel
<< std::endl;
*r_requirement_info = std::move(strstream.str());
*r_requirement_info = strstream.str();
}
return m_ghost_d3d_ctx->m_device->GetFeatureLevel() >= gpu_requirements.minFeatureLevel;

View File

@ -134,6 +134,12 @@ use_repos_to_notify = False
def repos_to_notify():
import os
from .bl_extension_utils import (
scandir_with_demoted_errors,
PKG_MANIFEST_FILENAME_TOML,
)
repos_notify = []
if not bpy.app.background:
# To use notifications on startup requires:
@ -143,7 +149,6 @@ def repos_to_notify():
# Since it's not all that common to disable the status bar just run notifications
# if any repositories are marked to run notifications.
online_access = bpy.app.online_access
prefs = bpy.context.preferences
extension_repos = prefs.extensions.repos
for repo_item in extension_repos:
@ -158,14 +163,30 @@ def repos_to_notify():
if not remote_url:
continue
if online_access:
# All URL's may be accessed.
pass
else:
# Allow remote file-system repositories even when online access is disabled.
if not remote_url.startswith("file://"):
continue
# WARNING: this could be a more expensive check, use a "reasonable" guess.
# This is technically incorrect because knowing if a repository has any installed
# packages requires reading it's meta-data and comparing it with the directory contents.
# Chances are - if the directory contains *any* directories containing a package manifest
# this means it has packages installed.
#
# Simply check the repositories directory isn't empty (ignoring dot-files).
# Importantly, this may be false positives but *not* false negatives.
repo_is_empty = True
repo_directory = repo_item.directory
if os.path.isdir(repo_directory):
for entry in scandir_with_demoted_errors(repo_directory):
if not entry.is_dir():
continue
if entry.name.startswith("."):
continue
if not os.path.exists(os.path.join(entry.path, PKG_MANIFEST_FILENAME_TOML)):
continue
repo_is_empty = False
break
if repo_is_empty:
continue
# NOTE: offline checks are handled by the notification (not here).
repos_notify.append(repo_item)
return repos_notify
@ -223,7 +244,10 @@ def extenion_repos_files_clear(directory, _):
# has the potential to wipe user data #119481.
import shutil
import os
from .bl_extension_utils import scandir_with_demoted_errors
from .bl_extension_utils import (
scandir_with_demoted_errors,
PKG_MANIFEST_FILENAME_TOML,
)
# Unlikely but possible a new repository is immediately removed before initializing,
# avoid errors in this case.
if not os.path.isdir(directory):
@ -232,18 +256,18 @@ def extenion_repos_files_clear(directory, _):
if os.path.isdir(path := os.path.join(directory, ".blender_ext")):
try:
shutil.rmtree(path)
except BaseException as ex:
except Exception as ex:
print("Failed to remove files", ex)
for entry in scandir_with_demoted_errors(directory):
if not entry.is_dir():
continue
path = entry.path
if not os.path.exists(os.path.join(path, "blender_manifest.toml")):
if not os.path.exists(os.path.join(path, PKG_MANIFEST_FILENAME_TOML)):
continue
try:
shutil.rmtree(path)
except BaseException as ex:
except Exception as ex:
print("Failed to remove files", ex)
@ -298,11 +322,11 @@ def monkeypatch_extensions_repos_update_pre(*_):
print_debug("PRE:")
try:
monkeypatch_extenions_repos_update_pre_impl()
except BaseException as ex:
except Exception as ex:
print_debug("ERROR", str(ex))
try:
monkeypatch_extensions_repos_update_pre._fn_orig()
except BaseException as ex:
except Exception as ex:
print_debug("ERROR", str(ex))
@ -311,11 +335,11 @@ def monkeypatch_extenions_repos_update_post(*_):
print_debug("POST:")
try:
monkeypatch_extenions_repos_update_post._fn_orig()
except BaseException as ex:
except Exception as ex:
print_debug("ERROR", str(ex))
try:
monkeypatch_extenions_repos_update_post_impl()
except BaseException as ex:
except Exception as ex:
print_debug("ERROR", str(ex))

View File

@ -94,7 +94,7 @@ class subcmd_utils:
bpy.ops.bl_pkg.repo_sync_all()
if show_done:
sys.stdout.write("Done...\n\n")
except BaseException:
except Exception:
print("Error synchronizing")
import traceback
traceback.print_exc()
@ -372,7 +372,7 @@ class subcmd_pkg:
)
except RuntimeError:
return False # The error will have been printed.
except BaseException as ex:
except Exception as ex:
sys.stderr.write(str(ex))
sys.stderr.write("\n")

View File

@ -24,6 +24,9 @@ from . import bl_extension_utils
# only keep this as a reference and in case we can speed up forcing them to exit.
USE_GRACEFUL_EXIT = False
# Special value to signal no packages can be updated because all repositories are blocked by being offline.
STATE_DATA_ALL_OFFLINE = object()
# -----------------------------------------------------------------------------
# Internal Utilities
@ -140,8 +143,19 @@ def sync_status_generator(repos_notify):
# Setup The Update #
# ################ #
repos_notify_orig = repos_notify
if not bpy.app.online_access:
repos_notify = [repo for repo in repos_notify if repo.remote_url.startswith("file://")]
if not repos_notify:
# Special case, early exit.
yield (STATE_DATA_ALL_OFFLINE, 0, ())
return
yield None
any_offline = len(repos_notify) != len(repos_notify_orig)
del repos_notify_orig
# An extension unique to this session.
unique_ext = "@{:x}".format(os.getpid())
@ -244,10 +258,15 @@ def sync_status_generator(repos_notify):
# TODO: more elegant way to detect changes.
# Re-calculating the same information each time then checking if it's different isn't great.
if command_result.status_data_changed:
extra_warnings = []
if command_result.all_complete:
any_lock_errors = sync_apply_locked(repos_notify, repos_notify_files, unique_ext)
update_total = sync_status_count_outdated_extensions(repos_notify)
yield (cmd_batch.calc_status_data(), update_total, any_lock_errors)
if any_lock_errors:
extra_warnings.append(" Failed to acquire lock!")
if any_offline:
extra_warnings.append(" Skipping online repositories!")
yield (cmd_batch.calc_status_data(), update_total, extra_warnings)
else:
yield None
@ -294,7 +313,7 @@ class NotifyHandle:
self.state = 0
# We could start the generator separately, this seems OK here for now.
self.sync_generator = iter(sync_status_generator(repos_notify))
# TEXT/ICON_ID/COUNT
# status_data, update_count, extra_warnings.
self.sync_info = None
@ -354,11 +373,15 @@ def splash_draw_status_fn(self, context):
if _notify.sync_info is None:
self.layout.label(text="Updates starting...")
elif _notify.sync_info[0] is STATE_DATA_ALL_OFFLINE:
# The special case is ugly but showing this operator doesn't fit well with other kinds of status updates.
self.layout.operator("bl_pkg.extensions_show_online_prefs", text="Offline mode", icon='ORPHAN_DATA')
else:
status_data, update_count, any_lock_errors = _notify.sync_info
status_data, update_count, extra_warnings = _notify.sync_info
text, icon = bl_extension_utils.CommandBatch.calc_status_text_icon_from_data(status_data, update_count)
if any_lock_errors:
text = text + " - failed to acquire lock!"
# Not more than 1-2 of these (failed to lock, some repositories offline .. etc).
for warning in extra_warnings:
text = text + warning
row = self.layout.row(align=True)
if update_count > 0:
row.operator("bl_pkg.extensions_show_for_update", text=text, icon=icon)

View File

@ -456,7 +456,7 @@ def _preferences_ensure_sync():
win.cursor_set('WAIT')
try:
bpy.ops.bl_pkg.repo_sync_all()
except BaseException as ex:
except Exception as ex:
print("Sync failed:", ex)
for wm in bpy.data.window_managers:
@ -982,7 +982,7 @@ class BlPkgRepoSync(Operator, _BlPkgCmdMixIn):
if not os.path.exists(directory):
try:
os.makedirs(directory)
except BaseException as ex:
except Exception as ex:
self.report({'ERROR'}, str(ex))
return {'CANCELLED'}
@ -1050,7 +1050,7 @@ class BlPkgRepoSyncAll(Operator, _BlPkgCmdMixIn):
if not os.path.exists(repo_item.directory):
try:
os.makedirs(repo_item.directory)
except BaseException as ex:
except Exception as ex:
self.report({'WARNING'}, str(ex))
return None
@ -2245,42 +2245,27 @@ class BlPkgShowUpgrade(Operator):
return {'FINISHED'}
class BlPkgOnlineAccess(Operator):
"""Handle online access"""
bl_idname = "bl_pkg.extension_online_access"
# NOTE: this is a wrapper for `SCREEN_OT_userpref_show`.
# It exists *only* to add a poll function which sets a message when offline mode is forced.
class BlPkgShowOnlinePreference(Operator):
"""Show system preferences "Network" panel to allow online access"""
bl_idname = "bl_pkg.extensions_show_online_prefs"
bl_label = ""
bl_options = {'INTERNAL'}
enable: BoolProperty(
name="Enable",
default=False,
)
@classmethod
def poll(cls, context):
if bpy.app.online_access_override:
if not bpy.app.online_access:
cls.poll_message_set("Blender was launched in offline-mode which cannot be changed at runtime")
return False
return True
def execute(self, context):
wm = context.window_manager
prefs = context.preferences
remote_url = "https://extensions.blender.org/api/v1/extensions"
if self.enable:
extension_repos = prefs.extensions.repos
repo_found = None
for repo in extension_repos:
if repo.remote_url == remote_url:
repo_found = repo
break
if repo_found:
repo_found.enabled = True
else:
# While not expected, we want to know if this ever occurs, don't fail silently.
self.report({'WARNING'}, "Repository \"{:s}\" not found!".format(remote_url))
if bpy.app.online_access:
# Run the first check for updates automatically.
# Invoke the modal operator so users can cancel by pressing "Escape".
assert bpy.ops.bl_pkg.repo_sync_all.poll()
bpy.ops.bl_pkg.repo_sync_all('INVOKE_DEFAULT')
prefs.extensions.use_online_access_handled = True
bpy.ops.screen.userpref_show('INVOKE_DEFAULT', section='SYSTEM')
return {'FINISHED'}
@ -2334,7 +2319,7 @@ classes = (
BlPkgRepoUnlock,
BlPkgShowUpgrade,
BlPkgOnlineAccess,
BlPkgShowOnlinePreference,
# Dummy, just shows a message.
BlPkgEnableNotInstalled,

View File

@ -296,16 +296,21 @@ def extensions_panel_draw_online_extensions_request_impl(
if layout_panel is not None:
# Text wrapping isn't supported, manually wrap.
for line in (
"Welcome! Access community-made add-ons and themes from the",
"Welcome! Access community-made add-ons and themes from the ",
"extensions.blender.org repository.",
"",
"This also requires internet access which must be enabled in \"System\" preferences.",
"This requires online access which must be enabled in \"System\" preferences.",
):
layout_panel.label(text=line)
row = layout.row()
row.operator("bl_pkg.extension_online_access", text="Dismiss", icon='X').enable = False
row.operator("bl_pkg.extension_online_access", text="Enable Repository", icon='CHECKMARK').enable = True
props = row.operator("wm.context_set_boolean", text="Dismiss", icon='X')
props.data_path = "preferences.extensions.use_online_access_handled"
props.value = True
# The only reason to prefer this over `screen.userpref_show`
# is it will be disabled when `--offline-mode` is forced with a useful error for why.
row.operator("bl_pkg.extensions_show_online_prefs", text="Go to System")
def extensions_panel_draw_impl(
@ -833,7 +838,16 @@ def extensions_panel_draw(panel, context):
if repo_status_text.running:
return
if not prefs.extensions.use_online_access_handled:
# Check if the extensions "Welcome" panel should be displayed.
# Even though it can be dismissed it's quite "in-your-face" so only show when it's needed.
if (
# The user didn't dismiss.
(not prefs.extensions.use_online_access_handled) and
# Running offline.
(not bpy.app.online_access) and
# There is one or more repositories that require remote access.
any(repo for repo in prefs.extensions.repos if repo.enabled and repo.use_remote_url)
):
extensions_panel_draw_online_extensions_request_impl(panel, context)
extensions_panel_draw_impl(

View File

@ -136,7 +136,7 @@ if sys.platform == "win32":
if res == 0:
print(WinError())
def file_handle_non_blocking_is_error_blocking(ex: BaseException) -> bool:
def file_handle_non_blocking_is_error_blocking(ex: Exception) -> bool:
if not isinstance(ex, OSError):
return False
from ctypes import GetLastError
@ -152,7 +152,7 @@ else:
flags = fcntl.fcntl(file_handle.fileno(), fcntl.F_GETFL)
fcntl.fcntl(file_handle, fcntl.F_SETFL, flags | os.O_NONBLOCK)
def file_handle_non_blocking_is_error_blocking(ex: BaseException) -> bool:
def file_handle_non_blocking_is_error_blocking(ex: Exception) -> bool:
if not isinstance(ex, BlockingIOError):
return False
return True
@ -170,7 +170,7 @@ def scandir_with_demoted_errors(path: str) -> Generator[os.DirEntry[str], None,
try:
for entry in os.scandir(path):
yield entry
except BaseException as ex:
except Exception as ex:
print("Error: scandir", ex)
@ -203,7 +203,7 @@ def command_output_from_json_0(
# It's possible this is multiple chunks.
try:
chunk = stdout.read()
except BaseException as ex:
except Exception as ex:
if not file_handle_non_blocking_is_error_blocking(ex):
raise ex
chunk = b''
@ -512,7 +512,7 @@ def pkg_repo_cache_clear(local_dir: str) -> None:
# Should never fail unless the file-system has permissions issues or corruption.
try:
os.unlink(entry.path)
except BaseException as ex:
except Exception as ex:
print("Error: unlink", ex)
@ -829,7 +829,7 @@ class _RepoCacheEntry:
def _json_data_ensure(
self,
*,
error_fn: Callable[[BaseException], None],
error_fn: Callable[[Exception], None],
check_files: bool = False,
ignore_missing: bool = False,
) -> Any:
@ -842,7 +842,7 @@ class _RepoCacheEntry:
try:
self._pkg_manifest_remote = json_from_filepath(filepath_json)
except BaseException as ex:
except Exception as ex:
self._pkg_manifest_remote = None
error_fn(ex)
@ -867,7 +867,7 @@ class _RepoCacheEntry:
def _json_data_refresh_from_toml(
self,
*,
error_fn: Callable[[BaseException], None],
error_fn: Callable[[Exception], None],
force: bool = False,
) -> None:
assert self.remote_url == ""
@ -885,7 +885,7 @@ class _RepoCacheEntry:
# A symbolic-link that's followed (good), if it exists and is a file an error is raised here and returned.
if not os.path.isdir(directory):
os.makedirs(directory, exist_ok=True)
except BaseException as ex:
except Exception as ex:
error_fn(ex)
return
del directory
@ -911,7 +911,7 @@ class _RepoCacheEntry:
def _json_data_refresh(
self,
*,
error_fn: Callable[[BaseException], None],
error_fn: Callable[[Exception], None],
force: bool = False,
) -> None:
if force or (self._pkg_manifest_remote is None) or (self._pkg_manifest_remote_mtime == 0):
@ -934,7 +934,7 @@ class _RepoCacheEntry:
try:
self._pkg_manifest_remote = json_from_filepath(filepath_json)
except BaseException as ex:
except Exception as ex:
self._pkg_manifest_remote = None
error_fn(ex)
@ -947,7 +947,7 @@ class _RepoCacheEntry:
def pkg_manifest_from_local_ensure(
self,
*,
error_fn: Callable[[BaseException], None],
error_fn: Callable[[Exception], None],
ignore_missing: bool = False,
) -> Optional[Dict[str, Dict[str, Any]]]:
# Important for local-only repositories (where the directory name defines the ID).
@ -961,7 +961,7 @@ class _RepoCacheEntry:
pkg_manifest_local = {}
try:
dir_entries = os.scandir(self.directory)
except BaseException as ex:
except Exception as ex:
dir_entries = None
error_fn(ex)
@ -986,7 +986,7 @@ class _RepoCacheEntry:
filepath_toml = os.path.join(self.directory, filename, PKG_MANIFEST_FILENAME_TOML)
try:
item_local = toml_from_filepath(filepath_toml)
except BaseException as ex:
except Exception as ex:
item_local = None
error_fn(ex)
@ -1017,7 +1017,7 @@ class _RepoCacheEntry:
def pkg_manifest_from_remote_ensure(
self,
*,
error_fn: Callable[[BaseException], None],
error_fn: Callable[[Exception], None],
ignore_missing: bool = False,
) -> Optional[Dict[str, Dict[str, Any]]]:
if self._pkg_manifest_remote is None:
@ -1069,7 +1069,7 @@ class RepoCacheStore:
self,
directory: str,
*,
error_fn: Callable[[BaseException], None],
error_fn: Callable[[Exception], None],
force: bool = False,
) -> None:
for repo_entry in self._repos:
@ -1082,7 +1082,7 @@ class RepoCacheStore:
self,
directory: str,
*,
error_fn: Callable[[BaseException], None],
error_fn: Callable[[Exception], None],
ignore_missing: bool = False,
directory_subset: Optional[Set[str]] = None,
) -> Optional[Dict[str, Dict[str, Any]]]:
@ -1099,7 +1099,7 @@ class RepoCacheStore:
def pkg_manifest_from_remote_ensure(
self,
*,
error_fn: Callable[[BaseException], None],
error_fn: Callable[[Exception], None],
check_files: bool = False,
ignore_missing: bool = False,
directory_subset: Optional[Set[str]] = None,
@ -1137,7 +1137,7 @@ class RepoCacheStore:
def pkg_manifest_from_local_ensure(
self,
*,
error_fn: Callable[[BaseException], None],
error_fn: Callable[[Exception], None],
check_files: bool = False,
directory_subset: Optional[Set[str]] = None,
) -> Generator[Optional[Dict[str, Dict[str, Any]]], None, None]:
@ -1198,7 +1198,7 @@ class RepoLock:
try:
with open(local_lock_file, "r", encoding="utf8") as fh:
data = fh.read()
except BaseException as ex:
except Exception as ex:
return "lock file could not be read: {:s}".format(str(ex))
# The lock is held.
@ -1210,7 +1210,7 @@ class RepoLock:
# The lock is held (but stale), remove it.
try:
os.remove(local_lock_file)
except BaseException as ex:
except Exception as ex:
return "lock file could not be removed: {:s}".format(str(ex))
return None
@ -1241,12 +1241,12 @@ class RepoLock:
try:
with open(local_lock_file, "w", encoding="utf8") as fh:
fh.write(self._cookie)
except BaseException as ex:
except Exception as ex:
result[directory] = "Lock could not be created: {:s}".format(str(ex))
# Remove if it was created (but failed to write)... disk-full?
try:
os.remove(local_lock_file)
except BaseException:
except Exception:
pass
continue
@ -1268,7 +1268,7 @@ class RepoLock:
try:
with open(local_lock_file, "r", encoding="utf8") as fh:
data = fh.read()
except BaseException as ex:
except Exception as ex:
result[directory] = "release(): lock file could not be read: {:s}".format(str(ex))
continue
# Owned by another application, this shouldn't happen.
@ -1279,7 +1279,7 @@ class RepoLock:
# This is our lock file, we're allowed to remove it!
try:
os.remove(local_lock_file)
except BaseException as ex:
except Exception as ex:
result[directory] = "release(): failed to remove file {!r}".format(ex)
self._held = False

View File

@ -651,7 +651,7 @@ def pkg_manifest_from_archive_and_validate(
) -> Union[PkgManifest, str]:
try:
zip_fh_context = zipfile.ZipFile(filepath, mode="r")
except BaseException as ex:
except Exception as ex:
return "Error extracting archive \"{:s}\"".format(str(ex))
with contextlib.closing(zip_fh_context) as zip_fh:
@ -1517,7 +1517,7 @@ def repo_json_is_valid_or_error(filepath: str) -> Optional[str]:
try:
with open(filepath, "r", encoding="utf-8") as fh:
result = json.load(fh)
except BaseException as ex:
except Exception as ex:
return str(ex)
if not isinstance(result, dict):
@ -1567,7 +1567,7 @@ def pkg_manifest_toml_is_valid_or_error(filepath: str, strict: bool) -> Tuple[Op
try:
with open(filepath, "rb") as fh:
result = tomllib.load(fh)
except BaseException as ex:
except Exception as ex:
return str(ex), {}
error = pkg_manifest_is_valid_or_error(result, from_repo=False, strict=strict)
@ -2155,7 +2155,7 @@ class subcmd_client:
with CleanupPathsContext(files=(), directories=directories_to_clean):
try:
zip_fh_context = zipfile.ZipFile(filepath_archive, mode="r")
except BaseException as ex:
except Exception as ex:
message_warn(
msg_fn,
"Error extracting archive: {:s}".format(str(ex)),
@ -2223,7 +2223,7 @@ class subcmd_client:
try:
for member in zip_fh.infolist():
zip_fh.extract(member, filepath_local_pkg_temp)
except BaseException as ex:
except Exception as ex:
message_warn(
msg_fn,
"Failed to extract files for \"{:s}\": {:s}".format(manifest.id, str(ex)),
@ -2485,7 +2485,7 @@ class subcmd_client:
filepath_local_pkg = os.path.join(local_dir, pkg_idname)
try:
shutil.rmtree(filepath_local_pkg)
except BaseException as ex:
except Exception as ex:
message_error(msg_fn, "Failure to remove \"{:s}\" with error ({:s})".format(pkg_idname, str(ex)))
continue
@ -2613,7 +2613,7 @@ class subcmd_author:
with CleanupPathsContext(files=(outfile_temp,), directories=()):
try:
zip_fh_context = zipfile.ZipFile(outfile_temp, 'w', zipfile.ZIP_LZMA)
except BaseException as ex:
except Exception as ex:
message_status(msg_fn, "Error creating archive \"{:s}\"".format(str(ex)))
return False
@ -2640,7 +2640,7 @@ class subcmd_author:
compress_type = zipfile.ZIP_STORED if filepath_skip_compress(filepath_abs) else None
try:
zip_fh.write(filepath_abs, filepath_rel, compress_type=compress_type)
except BaseException as ex:
except Exception as ex:
message_status(msg_fn, "Error adding to archive \"{:s}\"".format(str(ex)))
return False
@ -2713,7 +2713,7 @@ class subcmd_author:
try:
zip_fh_context = zipfile.ZipFile(pkg_source_archive, mode="r")
except BaseException as ex:
except Exception as ex:
message_status(msg_fn, "Error extracting archive \"{:s}\"".format(str(ex)))
return False
@ -2799,7 +2799,7 @@ class subcmd_dummy:
if not os.path.exists(repo_dir):
try:
os.makedirs(repo_dir)
except BaseException as ex:
except Exception as ex:
message_error(msg_fn, "Failed to create \"{:s}\" with error: {!r}".format(repo_dir, ex))
return False

View File

@ -5,7 +5,7 @@
bl_info = {
'name': 'glTF 2.0 format',
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
"version": (4, 2, 28),
"version": (4, 2, 39),
'blender': (4, 2, 0),
'location': 'File > Import-Export',
'description': 'Import-Export as glTF 2.0',
@ -484,9 +484,32 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
),
default=False)
export_colors: BoolProperty(
name='Dummy',
description='Keep for compatibility only',
export_vertex_color: EnumProperty(
name='Use Vertex Color',
items=(
('MATERIAL', 'Material',
'Export vertex color when used by material'),
('ACTIVE', 'Active',
'Export active vertex color'),
('NONE', 'None',
'Do not export vertex color')),
description='How to export vertex color',
default='MATERIAL'
)
export_all_vertex_colors: BoolProperty(
name='Export all vertex colors',
description=(
'Export all vertex colors, even if not used by any material. '
'If no Vertex Color is used in the mesh materials, a fake COLOR_0 will be created, '
'in order to keep material unchanged'
),
default=True
)
export_active_vertex_color_when_no_material: BoolProperty(
name='Export active vertex color when no material',
description='When there is no material on object, export active vertex color',
default=True
)
@ -1058,6 +1081,14 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
export_settings['gltf_attributes'] = self.export_attributes
export_settings['gltf_cameras'] = self.export_cameras
export_settings['gltf_vertex_color'] = self.export_vertex_color
if self.export_vertex_color == 'NONE':
export_settings['gltf_all_vertex_colors'] = False
export_settings['gltf_active_vertex_color_when_no_material'] = False
else:
export_settings['gltf_all_vertex_colors'] = self.export_all_vertex_colors
export_settings['gltf_active_vertex_color_when_no_material'] = self.export_active_vertex_color_when_no_material
export_settings['gltf_unused_textures'] = self.export_unused_textures
export_settings['gltf_unused_images'] = self.export_unused_images
@ -1190,6 +1221,8 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
path_to_uri(os.path.splitext(os.path.basename(self.filepath))[0] + '.bin')
)
export_settings['warning_joint_weight_exceed_already_displayed'] = False
user_extensions = []
pre_export_callbacks = []
post_export_callbacks = []
@ -1354,6 +1387,25 @@ def export_panel_data_mesh(layout, operator):
col = body.column()
col.prop(operator, 'export_shared_accessors')
header, sub_body = body.panel("GLTF_export_data_material_vertex_color", default_closed=True)
header.label(text="Vertex Colors")
if sub_body:
row = sub_body.row()
row.prop(operator, 'export_vertex_color')
if operator.export_vertex_color == "ACTIVE":
row = sub_body.row()
row.label(
text="Note that fully compliant glTF 2.0 engine/viewer will use it as multiplicative factor for base color.",
icon='ERROR')
row = sub_body.row()
row.label(text="If you want to use VC for any other purpose than vertex color, you should use custom attributes.")
row = sub_body.row()
row.active = operator.export_vertex_color != "NONE"
row.prop(operator, 'export_all_vertex_colors')
row = sub_body.row()
row.active = operator.export_vertex_color != "NONE"
row.prop(operator, 'export_active_vertex_color_when_no_material')
def export_panel_data_material(layout, operator):
header, body = layout.panel("GLTF_export_data_material", default_closed=True)

View File

@ -95,7 +95,8 @@ def get_component_type(attribute_component_type):
"FLOAT_VECTOR_4": gltf2_io_constants.ComponentType.Float,
"INT": gltf2_io_constants.ComponentType.Float, # No signed Int in glTF accessor
"FLOAT": gltf2_io_constants.ComponentType.Float,
"BOOLEAN": gltf2_io_constants.ComponentType.Float
"BOOLEAN": gltf2_io_constants.ComponentType.Float,
"UNSIGNED_BYTE": gltf2_io_constants.ComponentType.UnsignedByte
}.get(attribute_component_type)
@ -145,7 +146,8 @@ def get_numpy_type(attribute_component_type):
"FLOAT_VECTOR_4": np.float32,
"INT": np.float32, # signed integer are not supported by glTF
"FLOAT": np.float32,
"BOOLEAN": np.float32
"BOOLEAN": np.float32,
"UNSIGNED_BYTE": np.uint8,
}.get(attribute_component_type)

View File

@ -554,7 +554,9 @@ def __get_blender_actions(obj_uuid: str,
# Multi-strip tracks do not export correctly yet (they need to be baked),
# so skip them for now and only write single-strip tracks.
non_muted_strips = [strip for strip in track.strips if strip.action is not None and strip.mute is False]
if track.strips is None or len(non_muted_strips) != 1:
if track.strips is None or len(non_muted_strips) > 1:
# Warning if multiple strips are found, then ignore this track
# Ignore without warning if no strip
export_settings['log'].warning(
"NLA track '{}' has {} strips, but only single-strip tracks are supported in 'actions' mode.".format(
track.name, len(

View File

@ -31,10 +31,10 @@ def gather_sk_sampled_keyframes(obj_uuid,
# Using this option, we miss the drivers :(
# No solution exists for now. In the future, we should be able to copy a driver
if action_name in bpy.data.actions:
channel_group, _ = get_channel_groups(
channel_group, _, _ = get_channel_groups(
obj_uuid, bpy.data.actions[action_name], export_settings, no_sample_option=True)
elif blender_obj.data.shape_keys.animation_data and blender_obj.data.shape_keys.animation_data.action:
channel_group, _ = get_channel_groups(
channel_group, _, _ = get_channel_groups(
obj_uuid, blender_obj.data.shape_keys.animation_data.action, export_settings, no_sample_option=True)
else:
channel_group = {}
@ -47,7 +47,7 @@ def gather_sk_sampled_keyframes(obj_uuid,
channels = chan['properties']['value']
break
non_keyed_values = gather_non_keyed_values(obj_uuid, channels, None, export_settings)
non_keyed_values = gather_non_keyed_values(obj_uuid, channels, None, False, export_settings)
while frame <= end_frame:
key = Keyframe(channels, frame, None)

View File

@ -78,11 +78,13 @@ def __gather_skins(blender_primitive, export_settings):
# Warning for the case where we are in the same group, will be done later
# (for example, 3 weights needed, but 2 wanted by user)
if max_bone_set_index > wanted_max_bone_set_index:
export_settings['log'].warning(
"There are more than {} joint vertex influences."
"The {} with highest weight will be used (and normalized).".format(
export_settings['gltf_vertex_influences_nb'],
export_settings['gltf_vertex_influences_nb']))
if export_settings['warning_joint_weight_exceed_already_displayed'] is False:
export_settings['log'].warning(
"There are more than {} joint vertex influences."
"The {} with highest weight will be used (and normalized).".format(
export_settings['gltf_vertex_influences_nb'],
export_settings['gltf_vertex_influences_nb']))
export_settings['warning_joint_weight_exceed_already_displayed'] = True
# Take into account only the first set of 4 weights
max_bone_set_index = wanted_max_bone_set_index
@ -107,11 +109,13 @@ def __gather_skins(blender_primitive, export_settings):
idx = 4 - 1 - i
if not all(weight[:, idx]):
if warning_done is False:
export_settings['log'].warning(
"There are more than {} joint vertex influences."
"The {} with highest weight will be used (and normalized).".format(
export_settings['gltf_vertex_influences_nb'],
export_settings['gltf_vertex_influences_nb']))
if export_settings['warning_joint_weight_exceed_already_displayed'] is False:
export_settings['log'].warning(
"There are more than {} joint vertex influences."
"The {} with highest weight will be used (and normalized).".format(
export_settings['gltf_vertex_influences_nb'],
export_settings['gltf_vertex_influences_nb']))
export_settings['warning_joint_weight_exceed_already_displayed'] = True
warning_done = True
weight[:, idx] = 0.0
@ -203,6 +207,31 @@ def __gather_attribute(blender_primitive, attribute, export_settings):
type=data['data_type'],
)}
elif attribute.startswith("COLOR_") and blender_primitive["attributes"][attribute]['component_type'] == gltf2_io_constants.ComponentType.UnsignedByte:
# We are in special case where we fake a COLOR_0 attribute with UNSIGNED_BYTE
# We need to normalize it
export_user_extensions('gather_attribute_change', export_settings, attribute, data, True)
return {
attribute: gltf2_io.Accessor(
buffer_view=gltf2_io_binary_data.BinaryData(
data['data'].tobytes(),
gltf2_io_constants.BufferViewTarget.ARRAY_BUFFER),
byte_offset=None,
component_type=data['component_type'],
count=len(
data['data']),
extensions=None,
extras=None,
max=None,
min=None,
name=None,
normalized=True,