Refactor: combine insert_keyframe() and insert_key_rna() into a single function #122053

Merged
Nathan Vegdahl merged 49 commits from nathanvegdahl/blender:combine_keying_functions into main 2024-06-11 16:43:08 +02:00
51 changed files with 995 additions and 469 deletions
Showing only changes of commit 77008b1ef0 - Show all commits

View File

@ -18,14 +18,23 @@ bl_info = {
if "bpy" in locals():
import importlib
from . import (
bl_extension_cli,
bl_extension_local,
bl_extension_notify,
bl_extension_ops,
bl_extension_ui,
bl_extension_utils,
)
importlib.reload(bl_extension_cli)
importlib.reload(bl_extension_local)
importlib.reload(bl_extension_notify)
importlib.reload(bl_extension_ops)
importlib.reload(bl_extension_ui)
importlib.reload(bl_extension_utils)
del (
bl_extension_cli,
bl_extension_local,
bl_extension_notify,
bl_extension_ops,
bl_extension_ui,
bl_extension_utils,

View File

@ -1088,13 +1088,8 @@ class EXTENSIONS_OT_repo_sync(Operator, _ExtCmdMixIn):
# Needed to refresh.
self.repo_directory = directory
# Lock repositories.
self.repo_lock = bl_extension_utils.RepoLock(
repo_directories=[directory],
cookie=cookie_from_session(),
)
if lock_result_any_failed_with_report(self, self.repo_lock.acquire()):
return None
# See comment for `EXTENSIONS_OT_repo_sync_all`.
repos_lock = []
cmd_batch = []
if repo_item.remote_url:
@ -1109,6 +1104,15 @@ class EXTENSIONS_OT_repo_sync(Operator, _ExtCmdMixIn):
use_idle=is_modal,
)
)
repos_lock.append(repo_item.directory)
# Lock repositories.
self.repo_lock = bl_extension_utils.RepoLock(
repo_directories=repos_lock,
cookie=cookie_from_session(),
)
if lock_result_any_failed_with_report(self, self.repo_lock.acquire()):
return None
return bl_extension_utils.CommandBatch(
title="Sync",
@ -1176,6 +1180,10 @@ class EXTENSIONS_OT_repo_sync_all(Operator, _ExtCmdMixIn):
self.report({'WARNING'}, str(ex))
return None
# It's only required to lock remote repositories, local repositories can refresh without being modified,
# this is essential for system repositories which may be read-only.
repos_lock = []
cmd_batch = []
for repo_item in repos_all:
# Local only repositories should still refresh, but not run the sync.
@ -1189,8 +1197,7 @@ class EXTENSIONS_OT_repo_sync_all(Operator, _ExtCmdMixIn):
access_token=repo_item.access_token,
use_idle=is_modal,
))
repos_lock = [repo_item.directory for repo_item in repos_all]
repos_lock.append(repo_item.directory)
# Lock repositories.
self.repo_lock = bl_extension_utils.RepoLock(

View File

@ -47,6 +47,7 @@ __all__ = (
"RepoLockContext",
)
import abc
import json
import os
import sys
@ -56,7 +57,6 @@ import subprocess
import time
import tomllib
from typing import (
Any,
Callable,
@ -169,6 +169,21 @@ def file_mtime_or_none(filepath: str) -> Optional[int]:
return None
def file_mtime_or_none_with_error_fn(
filepath: str,
*,
error_fn: Callable[[Exception], None],
) -> Optional[int]:
try:
# For some reason `mypy` thinks this is a float.
return int(os.stat(filepath)[stat.ST_MTIME])
except FileNotFoundError:
pass
except Exception as ex:
error_fn(ex)
return None
def scandir_with_demoted_errors(path: str) -> Generator[os.DirEntry[str], None, None]:
try:
yield from os.scandir(path)
@ -263,6 +278,38 @@ def repositories_validate_or_errors(repos: Sequence[str]) -> Optional[InfoItemSe
return None
def repository_iter_package_dirs(
directory: str,
*,
error_fn: Callable[[Exception], None],
) -> Generator[os.DirEntry[str], None, None]:
try:
dir_entries = os.scandir(directory)
except Exception as ex:
dir_entries = None
error_fn(ex)
for entry in (dir_entries if dir_entries is not None else ()):
# Only check directories.
if not entry.is_dir(follow_symlinks=True):
continue
dirname = entry.name
# Simply ignore these paths without any warnings (accounts for `.git`, `__pycache__`, etc).
if dirname.startswith((".", "_")):
continue
# Report any paths that cannot be used.
if not dirname.isidentifier():
error_fn(Exception("\"{:s}\" is not a supported module name, skipping".format(
os.path.join(directory, dirname)
)))
continue
yield entry
# -----------------------------------------------------------------------------
# Public Stand-Alone Utilities
#
@ -947,9 +994,288 @@ class CommandBatch:
# -----------------------------------------------------------------------------
# Public Repo Cache (non-command-line wrapper)
# Internal Repo Data Source
#
class _RepoDataSouce_ABC(metaclass=abc.ABCMeta):
"""
The purpose of this class is to be a source for the repository data.
Assumptions made by the implementation:
- Data is stored externally (such as a file-system).
- Data can be loaded in a single (blocking) operation.
- Data is small enough to fit in memory.
- It's faster to detect invalid cache than it is to load the data.
"""
__slots__ = (
)
@abc.abstractmethod
def exists(self) -> bool:
raise Exception("Caller must define")
@abc.abstractmethod
def cache_is_valid(
self,
*,
error_fn: Callable[[Exception], None],
) -> bool:
raise Exception("Caller must define")
@abc.abstractmethod
def cache_clear(self) -> None:
raise Exception("Caller must define")
@abc.abstractmethod
def cache_data(self) -> Optional[Dict[str, Dict[str, Any]]]:
raise Exception("Caller must define")
# Should not be called directly use `data(..)` which supports cache.
@abc.abstractmethod
def _data_load(
self,
*,
error_fn: Callable[[Exception], None],
) -> Optional[Dict[str, Dict[str, Any]]]:
raise Exception("Caller must define")
def data(
self,
*,
cache_validate: bool,
force: bool,
error_fn: Callable[[Exception], None],
) -> Optional[Dict[str, Dict[str, Any]]]:
if not self.exists():
self.cache_clear()
return None
if force:
self.cache_clear()
elif cache_validate:
if not self.cache_is_valid(error_fn=error_fn):
self.cache_clear()
if (data := self.cache_data()) is None:
data = self._data_load(error_fn=error_fn)
return data
class _RepoDataSouce_JSON(_RepoDataSouce_ABC):
__slots__ = (
"_data",
"_filepath",
"_mtime",
)
def __init__(self, directory: str):
filepath = os.path.join(directory, REPO_LOCAL_JSON)
self._filepath: str = filepath
self._mtime: int = 0
self._data: Optional[Dict[str, Dict[str, Any]]] = None
def exists(self) -> bool:
try:
return os.path.exists(self._filepath)
except Exception:
return False
def cache_is_valid(
self,
*,
error_fn: Callable[[Exception], None],
) -> bool:
if self._mtime == 0:
return False
if not self.exists():
return False
return self._mtime == file_mtime_or_none_with_error_fn(self._filepath, error_fn=error_fn)
def cache_clear(self) -> None:
self._data = None
self._mtime = 0
def cache_data(self) -> Optional[Dict[str, Dict[str, Any]]]:
return self._data
def _data_load(
self,
*,
error_fn: Callable[[Exception], None],
) -> Optional[Dict[str, Dict[str, Any]]]:
assert self.exists()
data = None
mtime = file_mtime_or_none_with_error_fn(self._filepath, error_fn=error_fn) or 0
if mtime != 0:
try:
data = json_from_filepath(self._filepath)
except Exception as ex:
error_fn(ex)
self._data = data
self._mtime = mtime
return data
class _RepoDataSouce_TOML_FILES(_RepoDataSouce_ABC):
__slots__ = (
"_data",
"_directory",
"_mtime_for_each_package",
)
def __init__(self, directory: str):
self._directory: str = directory
self._mtime_for_each_package: Optional[Dict[str, int]] = None
self._data: Optional[Dict[str, Dict[str, Any]]] = None
def exists(self) -> bool:
try:
return os.path.isdir(self._directory)
except Exception:
return False
def cache_is_valid(
self,
*,
error_fn: Callable[[Exception], None],
) -> bool:
if self._mtime_for_each_package is None:
return False
if not self.exists():
return False
if self._mtime_for_each_package_changed(
directory=self._directory,
mtime_for_each_package=self._mtime_for_each_package,
error_fn=error_fn,
):
return False
return True
def cache_clear(self) -> None:
self._data = None
self._mtime_for_each_package = None
def cache_data(self) -> Optional[Dict[str, Dict[str, Any]]]:
return self._data
def _data_load(
self,
*,
error_fn: Callable[[Exception], None],
) -> Optional[Dict[str, Dict[str, Any]]]:
assert self.exists()
mtime_for_each_package = self._mtime_for_each_package_create(
directory=self._directory,
error_fn=error_fn,
)
data: Dict[str, Any] = {}
for dirname in mtime_for_each_package.keys():
filepath_toml = os.path.join(self._directory, dirname, PKG_MANIFEST_FILENAME_TOML)
try:
item_local = toml_from_filepath(filepath_toml)
except Exception as ex:
item_local = None
error_fn(ex)
if item_local is None:
continue
data[dirname] = item_local
# Begin: transform to list with ID's in item.
# TODO: this transform can probably be removed and the internal format can change
# to use the same structure as the actual JSON.
data = {
"version": "v1",
"blocklist": [],
"data": [
{"id": pkg_idname, **value}
for pkg_idname, value in data.items()
],
}
# End: compatibility change.
self._data = data
self._mtime_for_each_package = mtime_for_each_package
return data
@classmethod
def _mtime_for_each_package_create(
cls,
*,
directory: str,
error_fn: Callable[[Exception], None],
) -> Dict[str, int]:
# Caller must check `self.exists()`.
assert os.path.isdir(directory)
mtime_for_each_package: Dict[str, int] = {}
for entry in repository_iter_package_dirs(directory, error_fn=error_fn):
dirname = entry.name
filepath_toml = os.path.join(directory, dirname, PKG_MANIFEST_FILENAME_TOML)
try:
mtime = int(entry.stat().st_mtime)
except Exception as ex:
error_fn(ex)
mtime = 0
mtime_for_each_package[dirname] = mtime
return mtime_for_each_package
@ classmethod
def _mtime_for_each_package_changed(
cls,
*,
directory: str,
mtime_for_each_package: Dict[str, int],
error_fn: Callable[[Exception], None],
) -> bool:
"""
Detect a change and return as early as possibly.
Ideally this would not have to scan many files, since this could become *expensive*
with very large repositories however as each package has it's own TOML,
there is no viable alternative.
"""
# Caller must check `self.exists()`.
assert os.path.isdir(directory)
package_count = 0
for entry in repository_iter_package_dirs(directory, error_fn=error_fn):
filename = entry.name
mtime_ref = mtime_for_each_package.get(filename)
if mtime_ref is None:
return True
filepath_toml = os.path.join(directory, filename, PKG_MANIFEST_FILENAME_TOML)
mtime_new = (file_mtime_or_none_with_error_fn(filepath_toml, error_fn=error_fn) or 0)
if mtime_new != mtime_ref:
return True
package_count += 1
if package_count != len(mtime_for_each_package):
return True
return False
# -----------------------------------------------------------------------------
# Public Repo Cache (non-command-line wrapper)
class _RepoCacheEntry:
__slots__ = (
"directory",
@ -957,8 +1283,9 @@ class _RepoCacheEntry:
"_pkg_manifest_local",
"_pkg_manifest_remote",
"_pkg_manifest_remote_mtime",
"_pkg_manifest_remote_has_warning"
"_pkg_manifest_remote_data_source",
"_pkg_manifest_remote_has_warning",
)
def __init__(self, directory: str, remote_url: str) -> None:
@ -968,7 +1295,10 @@ class _RepoCacheEntry:
# Manifest data per package loaded from the packages local JSON.
self._pkg_manifest_local: Optional[Dict[str, Dict[str, Any]]] = None
self._pkg_manifest_remote: Optional[Dict[str, Dict[str, Any]]] = None
self._pkg_manifest_remote_mtime = 0
self._pkg_manifest_remote_data_source: _RepoDataSouce_ABC = (
_RepoDataSouce_JSON(directory) if remote_url else
_RepoDataSouce_TOML_FILES(directory)
)
# Avoid many noisy prints.
self._pkg_manifest_remote_has_warning = False
@ -979,116 +1309,36 @@ class _RepoCacheEntry:
check_files: bool = False,
ignore_missing: bool = False,
) -> Any:
if self._pkg_manifest_remote is not None:
if check_files:
self._json_data_refresh(error_fn=error_fn)
return self._pkg_manifest_remote
data = self._pkg_manifest_remote_data_source.data(
cache_validate=check_files,
force=False,
error_fn=error_fn,
)
if data is not self._pkg_manifest_remote:
self._pkg_manifest_remote = data
filepath_json = os.path.join(self.directory, REPO_LOCAL_JSON)
try:
self._pkg_manifest_remote = json_from_filepath(filepath_json)
except Exception as ex:
self._pkg_manifest_remote = None
error_fn(ex)
self._pkg_manifest_local = None
if self._pkg_manifest_remote is not None:
json_mtime = file_mtime_or_none(filepath_json)
assert json_mtime is not None
self._pkg_manifest_remote_mtime = json_mtime
self._pkg_manifest_local = None
self._pkg_manifest_remote_has_warning = False
else:
if data is None:
if not ignore_missing:
# NOTE: this warning will occur when setting up a new repository.
# It could be removed but it's also useful to know when the JSON is missing.
if self.remote_url:
if not self._pkg_manifest_remote_has_warning:
print("Repository file:", filepath_json, "not found, sync required!")
print("Repository data:", self.directory, "not found, sync required!")
self._pkg_manifest_remote_has_warning = True
return self._pkg_manifest_remote
def _json_data_refresh_from_toml(
self,
*,
error_fn: Callable[[Exception], None],
force: bool = False,
) -> None:
assert self.remote_url == ""
# Since there is no remote repo the ID name is defined by the directory name only.
local_json_data = self.pkg_manifest_from_local_ensure(error_fn=error_fn)
if local_json_data is None:
return
filepath_json = os.path.join(self.directory, REPO_LOCAL_JSON)
# We might want to adjust where this happens, create the directory here
# because this could be a fresh repo might not have been initialized until now.
directory = os.path.dirname(filepath_json)
try:
# A symbolic-link that's followed (good), if it exists and is a file an error is raised here and returned.
if not os.path.isdir(directory):
os.makedirs(directory, exist_ok=True)
except Exception as ex:
error_fn(ex)
return
del directory
with open(filepath_json, "w", encoding="utf-8") as fh:
# Indent because it can be useful to check this file if there are any issues.
# Begin: transform to list with ID's in item.
# TODO: this transform can probably be removed and the internal format can change
# to use the same structure as the actual JSON.
local_json_data_compat = {
"version": "v1",
"blocklist": [],
"data": [
{"id": pkg_idname, **value}
for pkg_idname, value in local_json_data.items()
],
}
# End: compatibility change.
fh.write(json.dumps(local_json_data_compat, indent=2))
def _json_data_refresh(
self,
*,
error_fn: Callable[[Exception], None],
force: bool = False,
) -> None:
if force or (self._pkg_manifest_remote is None) or (self._pkg_manifest_remote_mtime == 0):
self._pkg_manifest_remote = None
self._pkg_manifest_remote_mtime = 0
self._pkg_manifest_local = None
# Detect a local-only repository, there is no server to sync with
# so generate the JSON from the TOML files.
# While redundant this avoids having support multiple code-paths for local-only/remote repos.
if self.remote_url == "":
self._json_data_refresh_from_toml(error_fn=error_fn, force=force)
filepath_json = os.path.join(self.directory, REPO_LOCAL_JSON)
mtime_test = file_mtime_or_none(filepath_json)
if self._pkg_manifest_remote is not None:
# TODO: check the time of every installed package.
if mtime_test == self._pkg_manifest_remote_mtime:
return
try:
self._pkg_manifest_remote = json_from_filepath(filepath_json)
except Exception as ex:
self._pkg_manifest_remote = None
error_fn(ex)
self._pkg_manifest_local = None
if self._pkg_manifest_remote is not None:
json_mtime = file_mtime_or_none(filepath_json)
assert json_mtime is not None
self._pkg_manifest_remote_mtime = json_mtime
self._pkg_manifest_remote_data_source.data(
cache_validate=True,
force=force,
error_fn=error_fn,
)
def pkg_manifest_from_local_ensure(
self,
@ -1105,31 +1355,10 @@ class _RepoCacheEntry:
error_fn=error_fn,
)
pkg_manifest_local = {}
try:
dir_entries = os.scandir(self.directory)
except Exception as ex:
dir_entries = None
error_fn(ex)
for entry in (dir_entries if dir_entries is not None else ()):
# Only check directories.
if not entry.is_dir(follow_symlinks=True):
continue
filename = entry.name
# Simply ignore these paths without any warnings (accounts for `.git`, `__pycache__`, etc).
if filename.startswith((".", "_")):
continue
# Report any paths that cannot be used.
if not filename.isidentifier():
error_fn(Exception("\"{:s}\" is not a supported module name, skipping".format(
os.path.join(self.directory, filename)
)))
continue
filepath_toml = os.path.join(self.directory, filename, PKG_MANIFEST_FILENAME_TOML)
for entry in repository_iter_package_dirs(self.directory, error_fn=error_fn):
dirname = entry.name
filepath_toml = os.path.join(self.directory, dirname, PKG_MANIFEST_FILENAME_TOML)
try:
item_local = toml_from_filepath(filepath_toml)
except Exception as ex:
@ -1142,14 +1371,14 @@ class _RepoCacheEntry:
pkg_idname = item_local["id"]
if has_remote:
# This should never happen, the user may have manually renamed a directory.
if pkg_idname != filename:
if pkg_idname != dirname:
print("Skipping package with inconsistent name: \"{:s}\" mismatch \"{:s}\"".format(
filename,
dirname,
pkg_idname,
))
continue
else:
pkg_idname = filename
pkg_idname = dirname
# Validate so local-only packages with invalid manifests aren't used.
if (error_str := pkg_manifest_dict_is_valid_or_error(item_local, from_repo=False, strict=False)):

View File

@ -94,12 +94,5 @@
"system_demo_mode": ("demo_mode", "Demo Mode"),
"system_property_chart": ("property_chart", "Property Chart"),
"vdm_brush_baker": ("vdm_brush_baker", "VDM Brush Baker"),
# These were built-in but not mapped to an extension ("install" won't be shown in the UI).
"io_coat3D": ("", "3D-Coat Applink"), # External dependencies.
"depsgraph_debug": ("", "Dependency Graph Debug"), # External dependencies.
"io_import_images_as_planes": ("", "Import Images as Planes"), # Integrated with Blender.
"io_mesh_stl": ("", "STL Format"), # Was on by default.
"io_scene_x3d": ("", "Web3D X3D/VRML2 format"), # Was on by default.
},
}

View File

@ -11,3 +11,12 @@ options.use_denoise = True
options.denoise_spatial = True
options.denoise_temporal = True
options.denoise_bilateral = True
eevee.fast_gi_method = 'GLOBAL_ILLUMINATION'
eevee.fast_gi_resolution = '2'
eevee.fast_gi_ray_count = 2
eevee.fast_gi_step_count = 8
eevee.fast_gi_quality = 0.25
eevee.fast_gi_distance = 0.0
eevee.fast_gi_thickness_near = 0.25
eevee.fast_gi_thickness_far = 0.7853981852531433
eevee.fast_gi_bias = 0.05000000074505806

View File

@ -552,6 +552,15 @@ class AddPresetEEVEERaytracing(AddPresetBase, Operator):
"options.denoise_spatial",
"options.denoise_temporal",
"options.denoise_bilateral",
"eevee.fast_gi_method",
"eevee.fast_gi_resolution",
"eevee.fast_gi_ray_count",
"eevee.fast_gi_step_count",
"eevee.fast_gi_quality",
"eevee.fast_gi_distance",
"eevee.fast_gi_thickness_near",
"eevee.fast_gi_thickness_far",
"eevee.fast_gi_bias",
]
preset_subdir = "eevee/raytracing"

View File

@ -596,6 +596,14 @@ static_assert(sizeof(ChannelBag) == sizeof(::ActionChannelBag),
*/
bool assign_animation(Action &anim, ID &animated_id);
/**
* Return whether the given Action can be assigned to the ID.
*
* This always returns `true` for layered Actions. For legacy Actions it
* returns `true` if the Action's `idroot` matches the ID.
*/
bool is_action_assignable_to(const bAction *dna_action, ID_Type id_code);
/**
* Ensure that this ID is no longer animated.
*/

View File

@ -17,7 +17,10 @@
#include "BLI_bit_span.hh"
#include "BLI_string_ref.hh"
#include "BLI_vector.hh"
#include "DNA_anim_types.h"
#include "DNA_windowmanager_types.h"
#include "RNA_path.hh"
#include "RNA_types.hh"
@ -85,7 +88,7 @@ class CombinedKeyingResult {
bool has_errors() const;
void generate_reports(ReportList *reports);
void generate_reports(ReportList *reports, eReportType report_level = RPT_ERROR);
};
/**

View File

@ -616,12 +616,38 @@ bool Binding::has_idtype() const
bool assign_animation(Action &anim, ID &animated_id)
{
BLI_assert(anim.is_action_layered());
unassign_animation(animated_id);
Binding *binding = anim.find_suitable_binding_for(animated_id);
return anim.assign_id(binding, animated_id);
}
bool is_action_assignable_to(const bAction *dna_action, const ID_Type id_code)
{
if (!dna_action) {
/* Clearing the Action is always possible. */
return true;
}
if (dna_action->idroot == 0) {
/* This is either a never-assigned legacy action, or a layered action. In
* any case, it can be assigned to any ID. */
return true;
}
const animrig::Action &action = dna_action->wrap();
if (!action.is_action_layered()) {
/* Legacy Actions can only be assigned if their idroot matches. Empty
* Actions are considered both 'layered' and 'legacy' at the same time,
* hence this condition checks for 'not layered' rather than 'legacy'. */
return action.idroot == id_code;
}
return true;
}
void unassign_animation(ID &animated_id)
{
Action *anim = get_animation(animated_id);

View File

@ -505,4 +505,46 @@ TEST_F(ActionLayersTest, KeyframeStrip__keyframe_insert)
EXPECT_EQ(1, channels->fcurves()[1]->totvert);
}
TEST_F(ActionLayersTest, is_action_assignable_to)
{
EXPECT_TRUE(is_action_assignable_to(nullptr, ID_OB))
<< "nullptr Actions should be assignable to any type.";
EXPECT_TRUE(is_action_assignable_to(nullptr, ID_CA))
<< "nullptr Actions should be assignable to any type.";
EXPECT_TRUE(is_action_assignable_to(anim, ID_OB))
<< "Empty Actions should be assignable to any type.";
EXPECT_TRUE(is_action_assignable_to(anim, ID_CA))
<< "Empty Actions should be assignable to any type.";
/* Make the Action a legacy one. */
FCurve fake_fcurve;
BLI_addtail(&anim->curves, &fake_fcurve);
ASSERT_FALSE(anim->is_empty());
ASSERT_TRUE(anim->is_action_legacy());
ASSERT_EQ(0, anim->idroot);
EXPECT_TRUE(is_action_assignable_to(anim, ID_OB))
<< "Legacy Actions with idroot=0 should be assignable to any type.";
EXPECT_TRUE(is_action_assignable_to(anim, ID_CA))
<< "Legacy Actions with idroot=0 should be assignable to any type.";
/* Set the legacy idroot. */
anim->idroot = ID_CA;
EXPECT_FALSE(is_action_assignable_to(anim, ID_OB))
<< "Legacy Actions with idroot=ID_CA should NOT be assignable to ID_OB.";
EXPECT_TRUE(is_action_assignable_to(anim, ID_CA))
<< "Legacy Actions with idroot=CA should be assignable to ID_CA.";
/* Make the Action a layered one. */
BLI_poptail(&anim->curves);
anim->layer_add("layer");
ASSERT_EQ(0, anim->idroot) << "Adding a layer should clear the idroot.";
EXPECT_TRUE(is_action_assignable_to(anim, ID_OB))
<< "Layered Actions should be assignable to any type.";
EXPECT_TRUE(is_action_assignable_to(anim, ID_CA))
<< "Layered Actions should be assignable to any type.";
}
} // namespace blender::animrig::tests

View File

@ -82,7 +82,7 @@ bool CombinedKeyingResult::has_errors() const
return false;
}
void CombinedKeyingResult::generate_reports(ReportList *reports)
void CombinedKeyingResult::generate_reports(ReportList *reports, const eReportType report_level)
{
if (!this->has_errors() && this->get_count(SingleKeyingResult::SUCCESS) == 0) {
BKE_reportf(
@ -173,7 +173,7 @@ void CombinedKeyingResult::generate_reports(ReportList *reports)
}
if (errors.size() == 1) {
BKE_report(reports, RPT_ERROR, errors[0].c_str());
BKE_report(reports, report_level, errors[0].c_str());
return;
}
@ -181,7 +181,7 @@ void CombinedKeyingResult::generate_reports(ReportList *reports)
for (const std::string &error : errors) {
error_message.append(fmt::format("\n- {}", error));
}
BKE_report(reports, RPT_ERROR, error_message.c_str());
BKE_report(reports, report_level, error_message.c_str());
}
const std::optional<StringRefNull> default_channel_group_for_path(
@ -968,11 +968,9 @@ CombinedKeyingResult insert_keyframes(Main *bmain,
BLI_assert(action != nullptr);
if (USER_EXPERIMENTAL_TEST(&U, use_animation_baklava) && action->wrap().is_action_layered()) {
/* TODO: Don't hardcode key settings. */
KeyframeSettings key_settings;
KeyframeSettings key_settings = get_keyframe_settings(
(insert_key_flags & INSERTKEY_NO_USERPREF) == 0);
key_settings.keyframe_type = key_type;
key_settings.handle = HD_AUTO_ANIM;
key_settings.interpolation = BEZT_IPO_BEZ;
return insert_key_layered_action(action->wrap(),
adt->binding_handle,
struct_pointer,

View File

@ -532,7 +532,7 @@ TEST_F(KeyframingTest, insert_keyframes__layered_action__only_available)
EXPECT_EQ(0, result_1.get_count(SingleKeyingResult::SUCCESS));
/* It's unclear why AnimData and an Action should be created if keying fails
* here. It may even be undesireable. These checks are just here to ensure no
* here. It may even be undesirable. These checks are just here to ensure no
* *unintentional* changes in behavior. */
ASSERT_NE(nullptr, object->adt);
ASSERT_NE(nullptr, object->adt->action);

View File

@ -300,7 +300,7 @@ void BLF_draw_buffer(int fontid, const char *str, size_t str_len, ResultBLF *r_i
*
* \note called from a thread, so it bypasses the normal BLF_* api (which isn't thread-safe).
*/
bool BLF_thumb_preview(const char *filename, unsigned char *buf, int w, int h, int channels)
bool BLF_thumb_preview(const char *filepath, unsigned char *buf, int w, int h, int channels)
ATTR_NONNULL();
/* `blf_default.cc` */

View File

@ -305,7 +305,7 @@ static const char32_t *blf_get_sample_text(FT_Face face)
return sample;
}
bool BLF_thumb_preview(const char *filename, uchar *buf, int w, int h, int /*channels*/)
bool BLF_thumb_preview(const char *filepath, uchar *buf, int w, int h, int /*channels*/)
{
/* Use own FT_Library and direct FreeType calls as this is called from multiple threads. */
FT_Library ft_lib = nullptr;
@ -314,7 +314,7 @@ bool BLF_thumb_preview(const char *filename, uchar *buf, int w, int h, int /*cha
}
FT_Face face;
if (FT_New_Face(ft_lib, filename, 0, &face) != FT_Err_Ok) {
if (FT_New_Face(ft_lib, filepath, 0, &face) != FT_Err_Ok) {
FT_Done_FreeType(ft_lib);
return false;
}

View File

@ -40,7 +40,6 @@ typedef enum AttrDomainMask {
ENUM_OPERATORS(AttrDomainMask, ATTR_DOMAIN_MASK_ALL);
enum class AttributeOwnerType {
None = 0,
Mesh,
PointCloud,
Curves,
@ -48,7 +47,7 @@ enum class AttributeOwnerType {
};
class AttributeOwner {
AttributeOwnerType type_ = AttributeOwnerType::None;
AttributeOwnerType type_;
void *ptr_ = nullptr;
public:

View File

@ -1344,6 +1344,7 @@ void BKE_pose_channel_copy_data(bPoseChannel *pchan, const bPoseChannel *pchan_f
copy_v3_v3(pchan->custom_scale_xyz, pchan_from->custom_scale_xyz);
copy_v3_v3(pchan->custom_translation, pchan_from->custom_translation);
copy_v3_v3(pchan->custom_rotation_euler, pchan_from->custom_rotation_euler);
pchan->custom_shape_wire_width = pchan_from->custom_shape_wire_width;
pchan->drawflag = pchan_from->drawflag;
}

View File

@ -67,33 +67,33 @@ AttributeOwnerType AttributeOwner::type() const
bool AttributeOwner::is_valid() const
{
return ptr_ != nullptr && type_ != AttributeOwnerType::None;
return ptr_ != nullptr;
}
Mesh *AttributeOwner::get_mesh() const
{
BLI_assert(ptr_ != nullptr);
BLI_assert(this->is_valid());
BLI_assert(type_ == AttributeOwnerType::Mesh);
return reinterpret_cast<Mesh *>(ptr_);
}
PointCloud *AttributeOwner::get_pointcloud() const
{
BLI_assert(ptr_ != nullptr);
BLI_assert(this->is_valid());
BLI_assert(type_ == AttributeOwnerType::PointCloud);
return reinterpret_cast<PointCloud *>(ptr_);
}
Curves *AttributeOwner::get_curves() const
{
BLI_assert(ptr_ != nullptr);
BLI_assert(this->is_valid());
BLI_assert(type_ == AttributeOwnerType::Curves);
return reinterpret_cast<Curves *>(ptr_);
}
GreasePencil *AttributeOwner::get_grease_pencil() const
{
BLI_assert(ptr_ != nullptr);
BLI_assert(this->is_valid());
BLI_assert(type_ == AttributeOwnerType::GreasePencil);
return reinterpret_cast<GreasePencil *>(ptr_);
}
@ -153,9 +153,6 @@ static std::array<DomainInfo, ATTR_DOMAIN_NUM> get_domains(const AttributeOwner
info[int(AttrDomain::Layer)].length = grease_pencil->layers().size();
break;
}
case AttributeOwnerType::None: {
break;
}
}
return info;
@ -186,9 +183,6 @@ static std::optional<blender::bke::MutableAttributeAccessor> get_attribute_acces
GreasePencil &grease_pencil = *owner.get_grease_pencil();
return grease_pencil.attributes_for_write();
}
case AttributeOwnerType::None: {
break;
}
}
return {};
}
@ -770,8 +764,6 @@ bool BKE_attribute_required(const AttributeOwner &owner, const char *name)
return BKE_mesh_attribute_required(name);
case AttributeOwnerType::GreasePencil:
return false;
case AttributeOwnerType::None:
break;
}
return false;
}
@ -834,9 +826,6 @@ int *BKE_attributes_active_index_p(AttributeOwner &owner)
case AttributeOwnerType::GreasePencil: {
return &(owner.get_grease_pencil())->attributes_active_index;
}
case AttributeOwnerType::None: {
break;
}
}
return nullptr;
}

View File

@ -344,6 +344,60 @@ static void versioning_eevee_shadow_settings(Object *object)
SET_FLAG_FROM_TEST(object->visibility_flag, hide_shadows, OB_HIDE_SHADOW);
}
static void versioning_eevee_material_shadow_none(Material *material)
{
if (!material->use_nodes || material->nodetree == nullptr) {
return;
}
bNodeTree *ntree = material->nodetree;
bNode *output_node = version_eevee_output_node_get(ntree, SH_NODE_OUTPUT_MATERIAL);
if (output_node == nullptr) {
return;
}
bNodeSocket *out_sock = blender::bke::nodeFindSocket(output_node, SOCK_IN, "Surface");
bNode *mix_node = blender::bke::nodeAddNode(nullptr, ntree, "ShaderNodeMixShader");
STRNCPY(mix_node->label, "Disable Shadow");
mix_node->flag |= NODE_HIDDEN;
mix_node->parent = output_node->parent;
mix_node->locx = output_node->locx;
mix_node->locy = output_node->locy - output_node->height - 120;
bNodeSocket *mix_fac = static_cast<bNodeSocket *>(BLI_findlink(&mix_node->inputs, 0));
bNodeSocket *mix_in_1 = static_cast<bNodeSocket *>(BLI_findlink(&mix_node->inputs, 1));
bNodeSocket *mix_in_2 = static_cast<bNodeSocket *>(BLI_findlink(&mix_node->inputs, 2));
bNodeSocket *mix_out = static_cast<bNodeSocket *>(BLI_findlink(&mix_node->outputs, 0));
if (out_sock->link != nullptr) {
blender::bke::nodeAddLink(
ntree, out_sock->link->fromnode, out_sock->link->fromsock, mix_node, mix_in_1);
blender::bke::nodeRemLink(ntree, out_sock->link);
}
blender::bke::nodeAddLink(ntree, mix_node, mix_out, output_node, out_sock);
bNode *lp_node = blender::bke::nodeAddNode(nullptr, ntree, "ShaderNodeLightPath");
lp_node->flag |= NODE_HIDDEN;
lp_node->parent = output_node->parent;
lp_node->locx = output_node->locx;
lp_node->locy = mix_node->locy + 35;
bNodeSocket *is_shadow = blender::bke::nodeFindSocket(lp_node, SOCK_OUT, "Is Shadow Ray");
blender::bke::nodeAddLink(ntree, lp_node, is_shadow, mix_node, mix_fac);
/* Hide unconnected sockets for cleaner look. */
LISTBASE_FOREACH (bNodeSocket *, sock, &lp_node->outputs) {
if (sock != is_shadow) {
sock->flag |= SOCK_HIDDEN;
}
}
bNode *bsdf_node = blender::bke::nodeAddNode(nullptr, ntree, "ShaderNodeBsdfTransparent");
bsdf_node->flag |= NODE_HIDDEN;
bsdf_node->parent = output_node->parent;
bsdf_node->locx = output_node->locx;
bsdf_node->locy = mix_node->locy - 35;
bNodeSocket *bsdf_out = blender::bke::nodeFindSocket(bsdf_node, SOCK_OUT, "BSDF");
blender::bke::nodeAddLink(ntree, bsdf_node, bsdf_out, mix_node, mix_in_2);
}
/**
* Represents a source of transparency inside the closure part of a material node-tree.
* Sources can be combined together down the tree to figure out where the source of the alpha is.
@ -846,18 +900,15 @@ void do_versions_after_linking_400(FileData *fd, Main *bmain)
Scene *scene = static_cast<Scene *>(bmain->scenes.first);
bool scene_uses_eevee_legacy = scene && STREQ(scene->r.engine, RE_engine_id_BLENDER_EEVEE);
if (scene_uses_eevee_legacy) {
LISTBASE_FOREACH (Material *, material, &bmain->materials) {
LISTBASE_FOREACH (Material *, material, &bmain->materials) {
if (scene_uses_eevee_legacy) {
if (!material->use_nodes || material->nodetree == nullptr) {
continue;
/* Nothing to version. */
}
if (ELEM(material->blend_method, MA_BM_HASHED, MA_BM_BLEND)) {
else if (ELEM(material->blend_method, MA_BM_HASHED, MA_BM_BLEND)) {
/* Compatible modes. Nothing to change. */
continue;
}
if (material->blend_shadow == MA_BS_NONE) {
else if (material->blend_shadow == MA_BS_NONE) {
/* No need to match the surface since shadows are disabled. */
}
else if (material->blend_shadow == MA_BS_SOLID) {
@ -866,26 +917,36 @@ void do_versions_after_linking_400(FileData *fd, Main *bmain)
else if ((material->blend_shadow == MA_BS_CLIP && material->blend_method != MA_BM_CLIP) ||
(material->blend_shadow == MA_BS_HASHED))
{
BLO_reportf_wrap(fd->reports,
RPT_WARNING,
RPT_("Couldn't convert material %s because of different Blend Mode "
"and Shadow Mode\n"),
material->id.name + 2);
continue;
}
/* TODO(fclem): Check if threshold is driven or has animation. Bail out if needed? */
float threshold = (material->blend_method == MA_BM_CLIP) ? material->alpha_threshold :
2.0f;
if (!versioning_eevee_material_blend_mode_settings(material->nodetree, threshold)) {
BLO_reportf_wrap(
fd->reports,
RPT_WARNING,
RPT_("Couldn't convert material %s because of non-trivial alpha blending\n"),
RPT_("Material %s could not be converted because of different Blend Mode "
"and Shadow Mode (need manual adjustment)\n"),
material->id.name + 2);
}
else {
/* TODO(fclem): Check if threshold is driven or has animation. Bail out if needed? */
float threshold = (material->blend_method == MA_BM_CLIP) ? material->alpha_threshold :
2.0f;
if (!versioning_eevee_material_blend_mode_settings(material->nodetree, threshold)) {
BLO_reportf_wrap(fd->reports,
RPT_WARNING,
RPT_("Material %s could not be converted because of non-trivial "
"alpha blending (need manual adjustment)\n"),
material->id.name + 2);
}
}
if (material->blend_shadow == MA_BS_NONE) {
versioning_eevee_material_shadow_none(material);
}
/* Set blend_mode & blend_shadow for forward compatibility. */
material->blend_method = (material->blend_method != MA_BM_BLEND) ? MA_BM_HASHED :
MA_BM_BLEND;
material->blend_shadow = (material->blend_shadow == MA_BS_SOLID) ? MA_BS_SOLID :
MA_BS_HASHED;
}
}
}
@ -3845,9 +3906,7 @@ void blo_do_versions_400(FileData *fd, Library * /*lib*/, Main *bmain)
if (!MAIN_VERSION_FILE_ATLEAST(bmain, 402, 31)) {
bool only_uses_eevee_legacy_or_workbench = true;
LISTBASE_FOREACH (Scene *, scene, &bmain->scenes) {
if (!(STREQ(scene->r.engine, RE_engine_id_BLENDER_EEVEE) ||
STREQ(scene->r.engine, RE_engine_id_BLENDER_WORKBENCH)))
{
if (!STR_ELEM(scene->r.engine, RE_engine_id_BLENDER_EEVEE, RE_engine_id_BLENDER_WORKBENCH)) {
only_uses_eevee_legacy_or_workbench = false;
}
}

View File

@ -982,6 +982,21 @@ void blo_do_versions_userdef(UserDef *userdef)
BKE_preferences_extension_repo_add_default_system(userdef);
}
if (!USER_VERSION_ATLEAST(402, 58)) {
/* Remove add-ons which are no longer bundled by default
* and have no upgrade path to extensions in the UI. */
const char *addon_modules[] = {
"depsgraph_debug",
"io_coat3D",
"io_import_images_as_planes",
"io_mesh_stl",
"io_scene_x3d",
};
for (int i = 0; i < ARRAY_SIZE(addon_modules); i++) {
BKE_addon_remove_safe(&userdef->addons, addon_modules[i]);
}
}
/**
* Always bump subversion in BKE_blender_version.h when adding versioning
* code here, and wrap it inside a USER_VERSION_ATLEAST check.

9