Alembic/USD: use geometry sets to import data #115623

Merged
Jesse Yurkovich merged 14 commits from kevindietrich/blender:abc_usd_geometry_sets_review into main 2024-02-28 03:02:48 +01:00
339 changed files with 6912 additions and 3759 deletions
Showing only changes of commit 2c87cafca1 - Show all commits

View File

@ -279,5 +279,6 @@ StatementMacros:
MacroBlockBegin: "^OSL_CLOSURE_STRUCT_BEGIN$"
MacroBlockEnd: "^OSL_CLOSURE_STRUCT_END$"
# Ensure new line at the end of source files.
# Ensure single new line at the end of source files.
InsertNewlineAtEOF: True
KeepEmptyLinesAtEOF: False

1
.gitignore vendored
View File

@ -56,6 +56,7 @@ waveletNoiseTile.bin
# External repositories.
/scripts/addons/
/scripts/addons_contrib/
/tests/benchmarks/
# Ignore old submodules directories.
# Eventually need to get rid of those, but for the first time of transition

29
.gitmodules vendored Normal file
View File

@ -0,0 +1,29 @@
[submodule "lib/linux_x64"]
update = none
path = lib/linux_x64
url = https://projects.blender.org/blender/lib-linux_x64.git
branch = main
[submodule "lib/macos_arm64"]
update = none
path = lib/macos_arm64
url = https://projects.blender.org/blender/lib-macos_arm64.git
branch = main
[submodule "lib/macos_x64"]
update = none
path = lib/macos_x64
url = https://projects.blender.org/blender/lib-macos_x64.git
branch = main
[submodule "lib/windows_x64"]
update = none
path = lib/windows_x64
url = https://projects.blender.org/blender/lib-windows_x64.git
branch = main
[submodule "release/datafiles/assets"]
path = release/datafiles/assets
url = https://projects.blender.org/blender/blender-assets.git
branch = main
[submodule "tests/data"]
update = none
path = tests/data
url = https://projects.blender.org/blender/blender-test-data.git
branch = main

View File

@ -259,6 +259,28 @@ else()
set(WITH_UNITY_BUILD OFF)
endif()
if(COMMAND target_precompile_headers)
# Disabling is needed for `./tools/utils_maintenance/code_clean.py` to function.
option(WITH_COMPILER_PRECOMPILED_HEADERS "\
Use pre-compiled headers to speed up compilation."
ON
)
mark_as_advanced(WITH_COMPILER_PRECOMPILED_HEADERS)
if(WITH_CLANG_TIDY AND CMAKE_COMPILER_IS_GNUCC)
if(WITH_COMPILER_PRECOMPILED_HEADERS)
message(STATUS
"Clang-Tidy and GCC precompiled headers are incompatible, disabling precompiled headers"
)
set(WITH_COMPILER_PRECOMPILED_HEADERS OFF)
endif()
endif()
if(NOT WITH_COMPILER_PRECOMPILED_HEADERS)
set(CMAKE_DISABLE_PRECOMPILE_HEADERS ON)
endif()
endif()
option(WITH_IK_ITASC "\
Enable ITASC IK solver (only disable for development & for incompatible C++ compilers)"
ON
@ -759,8 +781,10 @@ if(WIN32)
option(WITH_TBB_MALLOC_PROXY "Enable the TBB malloc replacement" ON)
endif()
option(WITH_EXPERIMENTAL_FEATURES "Enable experimental features" ON)
# This should be turned off when Blender enter beta/rc/release
if("${BLENDER_VERSION_CYCLE}" STREQUAL "alpha")
if("${BLENDER_VERSION_CYCLE}" STREQUAL "alpha" AND WITH_EXPERIMENTAL_FEATURES)
set(WITH_EXPERIMENTAL_FEATURES ON)
else()
set(WITH_EXPERIMENTAL_FEATURES OFF)

View File

@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: GPL-2.0-or-later
# This Makefile does an out-of-source CMake build in ../build_`OS`_`CPU`
# This Makefile does an out-of-source CMake build in ../build_`OS`
# eg:
# ../build_linux_i386
# This is for users who like to configure & build blender with a single command.
@ -35,7 +35,7 @@ Other Convenience Targets
* deps: Build library dependencies (intended only for platform maintainers).
The existence of locally build dependencies overrides the pre-built dependencies from subversion.
These must be manually removed from '../lib/' to go back to using the pre-compiled libraries.
These must be manually removed from 'lib/' to go back to using the pre-compiled libraries.
Project Files
Generate project files for development environments.
@ -165,6 +165,16 @@ OS:=$(shell uname -s)
OS_NCASE:=$(shell uname -s | tr '[A-Z]' '[a-z]')
CPU:=$(shell uname -m)
# Use our OS and CPU architecture naming conventions.
ifeq ($(CPU),x86_64)
CPU:=x64
endif
ifeq ($(OS_NCASE),darwin)
OS_LIBDIR:=macos
else
OS_LIBDIR:=$(OS_NCASE)
endif
# Source and Build DIR's
BLENDER_DIR:=$(shell pwd -P)
@ -186,26 +196,13 @@ ifndef DEPS_BUILD_DIR
endif
ifndef DEPS_INSTALL_DIR
DEPS_INSTALL_DIR:=$(shell dirname "$(BLENDER_DIR)")/lib/$(OS_NCASE)
# Add processor type to directory name, except for darwin x86_64
# which by convention does not have it.
ifeq ($(OS_NCASE),darwin)
ifneq ($(CPU),x86_64)
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
endif
else
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
endif
DEPS_INSTALL_DIR:=$(shell dirname "$(BLENDER_DIR)")/lib/$(OS_LIBDIR)_$(CPU)
endif
# Set the LIBDIR, an empty string when not found.
LIBDIR:=$(wildcard ../lib/${OS_NCASE}_${CPU})
LIBDIR:=$(wildcard $(BLENDER_DIR)/lib/${OS_LIBDIR}_${CPU})
ifeq (, $(LIBDIR))
LIBDIR:=$(wildcard ../lib/${OS_NCASE}_${CPU}_glibc_228)
endif
ifeq (, $(LIBDIR))
LIBDIR:=$(wildcard ../lib/${OS_NCASE})
LIBDIR:=$(wildcard $(BLENDER_DIR)/lib/${OS_LIBDIR})
endif
# Find the newest Python version bundled in `LIBDIR`.

View File

@ -133,10 +133,19 @@ BUILD_MANDATORY_SUBPACKAGES = (
DISTRO_ID_ARCH: "base-devel",
},
),
Package(name="Git",
Package(name="Git", is_group=True,
sub_packages=(
Package(name="Git LFS",
distro_package_names={DISTRO_ID_DEBIAN: "git-lfs",
DISTRO_ID_FEDORA: "git-lfs",
DISTRO_ID_SUSE: "git-lfs",
DISTRO_ID_ARCH: "git-lfs",
},
),
),
distro_package_names={DISTRO_ID_DEBIAN: "git",
DISTRO_ID_FEDORA: "git",
DISTRO_ID_SUSE: None,
DISTRO_ID_SUSE: "git",
DISTRO_ID_ARCH: "git",
},
),

View File

@ -49,18 +49,17 @@ endif()
if(NOT DEFINED LIBDIR)
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin)
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/macos_x64)
else()
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin_${CMAKE_OSX_ARCHITECTURES})
endif()
else()
if(FIRST_RUN)
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/macos_${CMAKE_OSX_ARCHITECTURES})
endif()
endif()
if(NOT EXISTS "${LIBDIR}/")
if(NOT EXISTS "${LIBDIR}/.git")
message(FATAL_ERROR "Mac OSX requires pre-compiled libs at: '${LIBDIR}'")
endif()
if(FIRST_RUN)
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
endif()
# Avoid searching for headers since this would otherwise override our lib
# directory as well as PYTHON_ROOT_DIR.

View File

@ -16,13 +16,13 @@ else()
set(LIBDIR_NATIVE_ABI ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_NAME})
# Path to precompiled libraries with known glibc 2.28 ABI.
set(LIBDIR_GLIBC228_ABI ${CMAKE_SOURCE_DIR}/../lib/linux_x86_64_glibc_228)
set(LIBDIR_GLIBC228_ABI ${CMAKE_SOURCE_DIR}/lib/linux_x64)
# Choose the best suitable libraries.
if(EXISTS ${LIBDIR_NATIVE_ABI})
set(LIBDIR ${LIBDIR_NATIVE_ABI})
set(WITH_LIBC_MALLOC_HOOK_WORKAROUND TRUE)
elseif(EXISTS ${LIBDIR_GLIBC228_ABI})
elseif(EXISTS "${LIBDIR_GLIBC228_ABI}/.git")
set(LIBDIR ${LIBDIR_GLIBC228_ABI})
if(WITH_MEM_JEMALLOC)
# jemalloc provides malloc hooks.

View File

@ -266,23 +266,23 @@ if(NOT DEFINED LIBDIR)
# Setup 64bit and 64bit windows systems
if(CMAKE_CL_64)
message(STATUS "64 bit compiler detected.")
set(LIBDIR_BASE "win64")
set(LIBDIR_BASE "windows_x64")
else()
message(FATAL_ERROR "32 bit compiler detected, blender no longer provides pre-build libraries for 32 bit windows, please set the LIBDIR cmake variable to your own library folder")
endif()
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 19.30.30423)
message(STATUS "Visual Studio 2022 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/${LIBDIR_BASE})
elseif(MSVC_VERSION GREATER 1919)
message(STATUS "Visual Studio 2019 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/${LIBDIR_BASE})
endif()
else()
if(FIRST_RUN)
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
endif()
endif()
if(NOT EXISTS "${LIBDIR}/")
if(NOT EXISTS "${LIBDIR}/.git")
message(FATAL_ERROR "\n\nWindows requires pre-compiled libs at: '${LIBDIR}'. Please run `make update` in the blender source folder to obtain them.")
endif()

View File

@ -163,14 +163,14 @@ function(blender_add_ctests)
TEST_PREFIX ${ARGS_SUITE_NAME}
WORKING_DIRECTORY "${TEST_INSTALL_DIR}"
EXTRA_ARGS
--test-assets-dir "${CMAKE_SOURCE_DIR}/../lib/tests"
--test-assets-dir "${CMAKE_SOURCE_DIR}/tests/data"
--test-release-dir "${_test_release_dir}"
)
else()
add_test(
NAME ${ARGS_SUITE_NAME}
COMMAND ${ARGS_TARGET}
--test-assets-dir "${CMAKE_SOURCE_DIR}/../lib/tests"
--test-assets-dir "${CMAKE_SOURCE_DIR}/tests/data"
--test-release-dir "${_test_release_dir}"
WORKING_DIRECTORY ${TEST_INSTALL_DIR}
)

View File

@ -13,6 +13,7 @@ import sys
import make_utils
from make_utils import call
from pathlib import Path
# Parse arguments.
@ -21,7 +22,6 @@ def parse_arguments() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument("--ctest-command", default="ctest")
parser.add_argument("--cmake-command", default="cmake")
parser.add_argument("--svn-command", default="svn")
parser.add_argument("--git-command", default="git")
parser.add_argument("--config", default="")
parser.add_argument("build_directory")
@ -30,7 +30,6 @@ def parse_arguments() -> argparse.Namespace:
args = parse_arguments()
git_command = args.git_command
svn_command = args.svn_command
ctest_command = args.ctest_command
cmake_command = args.cmake_command
config = args.config
@ -45,24 +44,18 @@ if make_utils.command_missing(git_command):
sys.exit(1)
# Test if we are building a specific release version.
branch = make_utils.git_branch(git_command)
tag = make_utils.git_tag(git_command)
release_version = make_utils.git_branch_release_version(branch, tag)
lib_tests_dirpath = os.path.join('..', 'lib', "tests")
lib_tests_dirpath = Path("tests") / "data"
if not os.path.exists(lib_tests_dirpath):
if not (lib_tests_dirpath / ".git").exists():
print("Tests files not found, downloading...")
if make_utils.command_missing(svn_command):
sys.stderr.write("svn not found, can't checkout test files\n")
sys.exit(1)
if make_utils.command_missing(cmake_command):
sys.stderr.write("cmake not found, can't checkout test files\n")
sys.exit(1)
svn_url = make_utils.svn_libraries_base_url(release_version) + "/tests"
call([svn_command, "checkout", svn_url, lib_tests_dirpath])
# Ensure the test data files sub-module is configured and present.
make_utils.git_enable_submodule(git_command, "tests/data")
make_utils.git_update_submodule(args.git_command, lib_tests_dirpath)
# Run cmake again to detect tests files.
os.chdir(build_dir)

View File

@ -4,11 +4,11 @@
# SPDX-License-Identifier: GPL-2.0-or-later
"""
"make update" for all platforms, updating svn libraries and tests and Blender
git repository and sub-modules.
"make update" for all platforms, updating Git LFS submodules for libraries and
tests, and Blender git repository.
For release branches, this will check out the appropriate branches of
sub-modules and libraries.
submodules and libraries.
"""
import argparse
@ -20,168 +20,176 @@ import sys
import make_utils
from pathlib import Path
from make_utils import call, check_output
from urllib.parse import urljoin
from urllib.parse import urljoin, urlsplit
from typing import (
Optional,
)
class Submodule:
path: str
branch: str
branch_fallback: str
def __init__(self, path: str, branch: str, branch_fallback: str) -> None:
self.path = path
self.branch = branch
self.branch_fallback = branch_fallback
from typing import Optional
def print_stage(text: str) -> None:
print("")
print(text)
print("=" * len(text))
print("")
# Parse arguments
def parse_arguments() -> argparse.Namespace:
"""
Parse command line line arguments.
Returns parsed object from which the command line arguments can be accessed
as properties. The name of the properties matches the command line argument,
but with the leading dashed omitted and all remaining dashes replaced with
underscore.
"""
parser = argparse.ArgumentParser()
parser.add_argument("--no-libraries", action="store_true")
parser.add_argument("--no-blender", action="store_true")
parser.add_argument("--no-submodules", action="store_true")
parser.add_argument("--use-tests", action="store_true")
parser.add_argument("--svn-command", default="svn")
parser.add_argument("--svn-branch", default=None)
parser.add_argument("--git-command", default="git")
parser.add_argument("--use-linux-libraries", action="store_true")
parser.add_argument("--architecture", type=str, choices=("x86_64", "amd64", "arm64",))
parser.add_argument("--architecture", type=str,
choices=("x86_64", "amd64", "arm64",))
return parser.parse_args()
def get_blender_git_root() -> str:
return check_output([args.git_command, "rev-parse", "--show-toplevel"])
def get_blender_git_root() -> Path:
"""
Get root directory of the current Git directory.
"""
return Path(
check_output([args.git_command, "rev-parse", "--show-toplevel"]))
# Setup for precompiled libraries and tests from svn.
def get_effective_platform(args: argparse.Namespace) -> str:
"""
Get platform of the host.
The result string is normalized to the name used by Blender releases and
library repository name prefixes: linux, macos, windows.
"""
if sys.platform == "darwin":
platform = "macos"
elif sys.platform == "win32":
platform = "windows"
else:
platform = sys.platform
assert (platform in ("linux", "macos", "windows"))
return platform
def get_effective_architecture(args: argparse.Namespace) -> str:
"""
Get architecture of the host.
The result string is normalized to the architecture name used by the Blender
releases and library repository name suffixes: x64, arm64.
NOTE: When cross-compiling the architecture is coming from the command line
argument.
"""
architecture = args.architecture
if architecture:
assert isinstance(architecture, str)
return architecture
# Check platform.version to detect arm64 with x86_64 python binary.
if "ARM64" in platform.version():
return "arm64"
return platform.machine().lower()
def svn_update(args: argparse.Namespace, release_version: Optional[str]) -> None:
svn_non_interactive = [args.svn_command, '--non-interactive']
lib_dirpath = os.path.join(get_blender_git_root(), '..', 'lib')
svn_url = make_utils.svn_libraries_base_url(release_version, args.svn_branch)
# Checkout precompiled libraries
architecture = get_effective_architecture(args)
if sys.platform == 'darwin':
if architecture == 'arm64':
lib_platform = "darwin_arm64"
elif architecture == 'x86_64':
lib_platform = "darwin"
else:
lib_platform = None
elif sys.platform == 'win32':
# Windows checkout is usually handled by bat scripts since python3 to run
# this script is bundled as part of the precompiled libraries. However it
# is used by the buildbot.
lib_platform = "win64_vc15"
elif args.use_linux_libraries:
lib_platform = "linux_x86_64_glibc_228"
elif "ARM64" in platform.version():
# Check platform.version to detect arm64 with x86_64 python binary.
architecture = "arm64"
else:
# No precompiled libraries for Linux.
lib_platform = None
architecture = platform.machine().lower()
if lib_platform:
lib_platform_dirpath = os.path.join(lib_dirpath, lib_platform)
# Normalize the architecture name.
if architecture in ("x86_64", "amd64"):
architecture = "x64"
if not os.path.exists(lib_platform_dirpath):
print_stage("Checking out Precompiled Libraries")
assert (architecture in ("x64", "arm64"))
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't checkout libraries\n")
sys.exit(1)
svn_url_platform = svn_url + lib_platform
call(svn_non_interactive + ["checkout", svn_url_platform, lib_platform_dirpath])
if args.use_tests:
lib_tests = "tests"
lib_tests_dirpath = os.path.join(lib_dirpath, lib_tests)
if not os.path.exists(lib_tests_dirpath):
print_stage("Checking out Tests")
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't checkout tests\n")
sys.exit(1)
svn_url_tests = svn_url + lib_tests
call(svn_non_interactive + ["checkout", svn_url_tests, lib_tests_dirpath])
lib_assets = "assets"
lib_assets_dirpath = os.path.join(lib_dirpath, lib_assets)
if not os.path.exists(lib_assets_dirpath):
print_stage("Checking out Assets")
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't checkout assets\n")
sys.exit(1)
svn_url_assets = svn_url + lib_assets
call(svn_non_interactive + ["checkout", svn_url_assets, lib_assets_dirpath])
# Update precompiled libraries, assets and tests
if not os.path.isdir(lib_dirpath):
print("Library path: %r, not found, skipping" % lib_dirpath)
else:
paths_local_and_remote = []
if os.path.exists(os.path.join(lib_dirpath, ".svn")):
print_stage("Updating Precompiled Libraries, Assets and Tests (one repository)")
paths_local_and_remote.append((lib_dirpath, svn_url))
else:
print_stage("Updating Precompiled Libraries, Assets and Tests (multiple repositories)")
# Separate paths checked out.
for dirname in os.listdir(lib_dirpath):
if dirname.startswith("."):
# Temporary paths such as ".mypy_cache" will report a warning, skip hidden directories.
continue
dirpath = os.path.join(lib_dirpath, dirname)
if not (os.path.isdir(dirpath) and os.path.exists(os.path.join(dirpath, ".svn"))):
continue
paths_local_and_remote.append((dirpath, svn_url + dirname))
if paths_local_and_remote:
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't update libraries\n")
sys.exit(1)
for dirpath, svn_url_full in paths_local_and_remote:
call(svn_non_interactive + ["cleanup", dirpath])
# Switch to appropriate branch and update.
call(svn_non_interactive + ["switch", svn_url_full, dirpath], exit_on_error=False)
call(svn_non_interactive + ["update", dirpath])
return architecture
def get_submodule_directories(args: argparse.Namespace):
"""
Get list of all configured submodule directories.
"""
blender_git_root = get_blender_git_root()
dot_modules = blender_git_root / ".gitmodules"
if not dot_modules.exists():
return ()
submodule_directories_output = check_output(
[args.git_command, "config", "--file", dot_modules, "--get-regexp", "path"])
return (Path(line.split(' ', 1)[1]) for line in submodule_directories_output.strip().splitlines())
def ensure_git_lfs(args: argparse.Namespace) -> None:
# Use `--skip-repo` to avoid creating git hooks.
# This is called from the `blender.git` checkout, so we don't need to install hooks there.
call((args.git_command, "lfs", "install", "--skip-repo"), exit_on_error=True)
def update_precompiled_libraries(args: argparse.Namespace) -> str:
"""
Configure and update submodule for precompiled libraries
This function detects the current host architecture and enables
corresponding submodule, and updates the submodule.
NOTE: When cross-compiling the architecture is coming from the command line
argument.
"""
print_stage("Configuring Precompiled Libraries")
platform = get_effective_platform(args)
arch = get_effective_architecture(args)
print(f"Detected platform : {platform}")
print(f"Detected architecture : {arch}")
print()
if sys.platform == "linux" and not args.use_linux_libraries:
print("Skipping Linux libraries configuration")
return ""
submodule_dir = f"lib/{platform}_{arch}"
submodule_directories = get_submodule_directories(args)
if Path(submodule_dir) not in submodule_directories:
return "Skipping libraries update: no configured submodule\n"
make_utils.git_enable_submodule(args.git_command, submodule_dir)
if not make_utils.git_update_submodule(args.git_command, submodule_dir):
return "Error updating precompiled libraries\n"
return ""
def update_tests_data_files(args: argparse.Namespace) -> str:
"""
Configure and update submodule with files used by regression tests
"""
print_stage("Configuring Tests Data Files")
submodule_dir = "tests/data"
make_utils.git_enable_submodule(args.git_command, submodule_dir)
if not make_utils.git_update_submodule(args.git_command, submodule_dir):
return "Error updating test data\n"
return ""
# Test if git repo can be updated.
def git_update_skip(args: argparse.Namespace, check_remote_exists: bool = True) -> str:
"""Test if git repo can be updated."""
if make_utils.command_missing(args.git_command):
sys.stderr.write("git not found, can't update code\n")
sys.exit(1)
@ -274,23 +282,22 @@ def resolve_external_url(blender_url: str, repo_name: str) -> str:
return urljoin(blender_url + "/", "../" + repo_name)
def external_script_copy_old_submodule_over(args: argparse.Namespace, directory_name: str) -> None:
blender_git_root = Path(get_blender_git_root())
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
def external_script_copy_old_submodule_over(
args: argparse.Namespace,
directory: Path,
old_submodules_dir: Path) -> None:
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
old_submodule_relative_dir = Path("release") / "scripts" / directory_name
print(f"Moving {old_submodule_relative_dir} to scripts/{directory_name} ...")
old_submodule_dir = blender_git_root / old_submodule_relative_dir
shutil.move(old_submodule_dir, external_dir)
print(f"Moving {old_submodules_dir} to {directory} ...")
shutil.move(blender_git_root / old_submodules_dir, external_dir)
# Remove old ".git" which is a file with path to a submodule bare repo inside of main
# repo .git/modules directory.
(external_dir / ".git").unlink()
bare_repo_relative_dir = Path(".git") / "modules" / "release" / "scripts" / directory_name
print(f"Copying {bare_repo_relative_dir} to scripts/{directory_name}/.git ...")
bare_repo_relative_dir = Path(".git") / "modules" / old_submodules_dir
print(f"Copying {bare_repo_relative_dir} to {directory}/.git ...")
bare_repo_dir = blender_git_root / bare_repo_relative_dir
shutil.copytree(bare_repo_dir, external_dir / ".git")
@ -298,25 +305,26 @@ def external_script_copy_old_submodule_over(args: argparse.Namespace, directory_
call((args.git_command, "config", "--file", str(git_config), "--unset", "core.worktree"))
def external_script_initialize_if_needed(args: argparse.Namespace,
repo_name: str,
directory_name: str) -> None:
"""Initialize checkout of an external repository scripts directory"""
def floating_checkout_initialize_if_needed(args: argparse.Namespace,
repo_name: str,
directory: Path,
old_submodules_dir: Path = None) -> None:
"""Initialize checkout of an external repository"""
blender_git_root = Path(get_blender_git_root())
blender_git_root = get_blender_git_root()
blender_dot_git = blender_git_root / ".git"
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
external_dir = blender_git_root / directory
if external_dir.exists():
return
print(f"Initializing scripts/{directory_name} ...")
print(f"Initializing {directory} ...")
old_submodule_dot_git = blender_git_root / "release" / "scripts" / directory_name / ".git"
if old_submodule_dot_git.exists() and blender_dot_git.is_dir():
external_script_copy_old_submodule_over(args, directory_name)
return
if old_submodules_dir is not None:
old_submodule_dot_git = blender_git_root / old_submodules_dir / ".git"
if old_submodule_dot_git.exists() and blender_dot_git.is_dir():
external_script_copy_old_submodule_over(args, directory, old_submodules_dir)
return
origin_name = "upstream" if use_upstream_workflow(args) else "origin"
blender_url = make_utils.git_get_remote_url(args.git_command, origin_name)
@ -330,9 +338,9 @@ def external_script_initialize_if_needed(args: argparse.Namespace,
call((args.git_command, "clone", "--origin", origin_name, external_url, str(external_dir)))
def external_script_add_origin_if_needed(args: argparse.Namespace,
repo_name: str,
directory_name: str) -> None:
def floating_checkout_add_origin_if_needed(args: argparse.Namespace,
repo_name: str,
directory: Path) -> None:
"""
Add remote called 'origin' if there is a fork of the external repository available
@ -344,9 +352,8 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
cwd = os.getcwd()
blender_git_root = Path(get_blender_git_root())
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
origin_blender_url = make_utils.git_get_remote_url(args.git_command, "origin")
origin_external_url = resolve_external_url(origin_blender_url, repo_name)
@ -361,7 +368,7 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
if not make_utils.git_is_remote_repository(args.git_command, origin_external_url):
return
print(f"Adding origin remote to {directory_name} pointing to fork ...")
print(f"Adding origin remote to {directory} pointing to fork ...")
# Non-obvious tricks to introduce the new remote called "origin" to the existing
# submodule configuration.
@ -390,23 +397,30 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
return
def external_scripts_update(args: argparse.Namespace,
repo_name: str,
directory_name: str,
branch: Optional[str]) -> str:
def floating_checkout_update(args: argparse.Namespace,
repo_name: str,
directory: Path,
branch: Optional[str],
old_submodules_dir: Path = None,
only_update=False) -> str:
"""Update a single external checkout with the given name in the scripts folder"""
external_script_initialize_if_needed(args, repo_name, directory_name)
external_script_add_origin_if_needed(args, repo_name, directory_name)
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
print(f"Updating scripts/{directory_name} ...")
if only_update and not external_dir.exists():
return ""
floating_checkout_initialize_if_needed(args, repo_name, directory, old_submodules_dir)
floating_checkout_add_origin_if_needed(args, repo_name, directory)
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
print(f"* Updating {directory} ...")
cwd = os.getcwd()
blender_git_root = Path(get_blender_git_root())
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
# Update externals to appropriate given branch, falling back to main if none is given and/or
# found in a sub-repository.
branch_fallback = "main"
@ -419,7 +433,7 @@ def external_scripts_update(args: argparse.Namespace,
os.chdir(external_dir)
msg = git_update_skip(args, check_remote_exists=False)
if msg:
skip_msg += directory_name + " skipped: " + msg + "\n"
skip_msg += str(directory) + " skipped: " + msg + "\n"
else:
# Find a matching branch that exists.
for remote in ("origin", "upstream"):
@ -465,6 +479,17 @@ def external_scripts_update(args: argparse.Namespace,
return skip_msg
def external_scripts_update(args: argparse.Namespace,
repo_name: str,
directory_name: str,
branch: Optional[str]) -> str:
return floating_checkout_update(args,
repo_name,
Path("scripts") / directory_name,
branch,
old_submodules_dir=Path("release") / "scripts" / directory_name)
def scripts_submodules_update(args: argparse.Namespace, branch: Optional[str]) -> str:
"""Update working trees of addons and addons_contrib within the scripts/ directory"""
msg = ""
@ -475,18 +500,91 @@ def scripts_submodules_update(args: argparse.Namespace, branch: Optional[str]) -
return msg
def floating_libraries_update(args: argparse.Namespace, branch: Optional[str]) -> str:
"""Update libraries checkouts which are floating (not attached as Git submodules)"""
msg = ""
msg += floating_checkout_update(args,
"benchmarks",
Path("tests") / "benchmarks",
branch,
only_update=True)
return msg
def add_submodule_push_url(args: argparse.Namespace):
"""
Add pushURL configuration for all locally activated submodules, pointing to SSH protocol.
"""
blender_git_root = get_blender_git_root()
modules = blender_git_root / ".git" / "modules"
submodule_directories = get_submodule_directories(args)
for submodule_path in submodule_directories:
module_path = modules / submodule_path
config = module_path / "config"
if not config.exists():
# Ignore modules which are not initialized
continue
push_url = check_output((args.git_command, "config", "--file", str(config),
"--get", "remote.origin.pushURL"), exit_on_error=False)
if push_url and push_url != "git@projects.blender.org:blender/lib-darwin_arm64.git":
# Ignore modules which have pushURL configured.
# Keep special exception, as some debug code sneaked into the production for a short
# while.
continue
url = make_utils.git_get_config(args.git_command, "remote.origin.url", str(config))
if not url.startswith("https:"):
# Ignore non-URL URLs.
continue
url_parts = urlsplit(url)
push_url = f"git@{url_parts.netloc}:{url_parts.path[1:]}"
print(f"Setting pushURL to {push_url} for {submodule_path}")
make_utils.git_set_config(args.git_command, "remote.origin.pushURL", push_url, str(config))
def submodules_update(args: argparse.Namespace, branch: Optional[str]) -> str:
"""Update submodules or other externally tracked source trees"""
print_stage("Updating Submodules")
msg = ""
msg += scripts_submodules_update(args, branch)
msg += floating_libraries_update(args, branch)
print("* Updating Git submodules")
submodule_directories = get_submodule_directories(args)
for submodule_path in submodule_directories:
if submodule_path.parts[0] == "lib" and args.no_libraries:
print(f"Skipping library submodule {submodule_path}")
continue
if submodule_path.parts[0] == "tests" and not args.use_tests:
print(f"Skipping tests submodule {submodule_path}")
continue
if not make_utils.git_update_submodule(args.git_command, submodule_path):
msg += f"Error updating Git submodule {submodule_path}\n"
add_submodule_push_url(args)
return msg
if __name__ == "__main__":
args = parse_arguments()
blender_skip_msg = ""
libraries_skip_msg = ""
submodules_skip_msg = ""
blender_version = make_utils. parse_blender_version()
@ -494,26 +592,32 @@ if __name__ == "__main__":
major = blender_version.version // 100
minor = blender_version.version % 100
branch = f"blender-v{major}.{minor}-release"
release_version: Optional[str] = f"{major}.{minor}"
else:
branch = 'main'
release_version = None
if not args.no_libraries:
svn_update(args, release_version)
# Submodules and precompiled libraries require Git LFS.
ensure_git_lfs(args)
if not args.no_blender:
blender_skip_msg = git_update_skip(args)
if not blender_skip_msg:
blender_skip_msg = blender_update(args)
if blender_skip_msg:
blender_skip_msg = "Blender repository skipped: " + blender_skip_msg + "\n"
if not args.no_libraries:
libraries_skip_msg += update_precompiled_libraries(args)
if args.use_tests:
libraries_skip_msg += update_tests_data_files(args)
if not args.no_submodules:
submodules_skip_msg = submodules_update(args, branch)
# Report any skipped repositories at the end, so it's not as easy to miss.
skip_msg = blender_skip_msg + submodules_skip_msg
skip_msg = blender_skip_msg + libraries_skip_msg + submodules_skip_msg
if skip_msg:
print_stage(skip_msg.strip())
print_stage("Update finished with the following messages")
print(skip_msg.strip())
# For failed submodule update we throw an error, since not having correct
# submodules can make Blender throw errors.

View File

@ -8,12 +8,11 @@ Utility functions for make update and make tests.
"""
import re
import os
import shutil
import subprocess
import sys
import os
from pathlib import Path
from urllib.parse import urljoin
from typing import (
Sequence,
@ -21,18 +20,30 @@ from typing import (
)
def call(cmd: Sequence[str], exit_on_error: bool = True, silent: bool = False) -> int:
def call(cmd: Sequence[str], exit_on_error: bool = True, silent: bool = False, env=None) -> int:
if not silent:
print(" ".join([str(x) for x in cmd]))
cmd_str = ""
if env:
cmd_str += " ".join([f"{item[0]}={item[1]}" for item in env.items()])
cmd_str += " "
cmd_str += " ".join([str(x) for x in cmd])
print(cmd_str)
env_full = None
if env:
env_full = os.environ.copy()
for key, value in env.items():
env_full[key] = value
# Flush to ensure correct order output on Windows.
sys.stdout.flush()
sys.stderr.flush()
if silent:
retcode = subprocess.call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
retcode = subprocess.call(
cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, env=env_full)
else:
retcode = subprocess.call(cmd)
retcode = subprocess.call(cmd, env=env_full)
if exit_on_error and retcode != 0:
sys.exit(retcode)
@ -48,7 +59,7 @@ def check_output(cmd: Sequence[str], exit_on_error: bool = True) -> str:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True)
except subprocess.CalledProcessError as e:
if exit_on_error:
sys.stderr.write(" ".join(cmd))
sys.stderr.write(" ".join(cmd) + "\n")
sys.stderr.write(e.output + "\n")
sys.exit(e.returncode)
output = ""
@ -87,25 +98,6 @@ def git_remote_exist(git_command: str, remote_name: str) -> bool:
return remote_url != remote_name
def git_get_resolved_submodule_url(git_command: str, blender_url: str, submodule_path: str) -> str:
git_root = check_output([git_command, "rev-parse", "--show-toplevel"])
dot_gitmodules = os.path.join(git_root, ".gitmodules")
submodule_key_prefix = f"submodule.{submodule_path}"
submodule_key_url = f"{submodule_key_prefix}.url"
gitmodule_url = git_get_config(
git_command, submodule_key_url, file=dot_gitmodules)
# A bit of a trickery to construct final URL.
# Only works for the relative submodule URLs.
#
# Note that unless the LHS URL ends up with a slash urljoin treats the last component as a
# file.
assert gitmodule_url.startswith('..')
return urljoin(blender_url + "/", gitmodule_url)
def git_is_remote_repository(git_command: str, repo: str) -> bool:
"""Returns true if the given repository is a valid/clonable git repo"""
exit_code = call((git_command, "ls-remote", repo, "HEAD"), exit_on_error=False, silent=True)
@ -113,7 +105,8 @@ def git_is_remote_repository(git_command: str, repo: str) -> bool:
def git_branch(git_command: str) -> str:
# Get current branch name.
"""Get current branch name."""
try:
branch = subprocess.check_output([git_command, "rev-parse", "--abbrev-ref", "HEAD"])
except subprocess.CalledProcessError as e:
@ -137,44 +130,60 @@ def git_set_config(git_command: str, key: str, value: str, file: Optional[str] =
return check_output([git_command, "config", key, value])
def git_tag(git_command: str) -> Optional[str]:
# Get current tag name.
try:
tag = subprocess.check_output([git_command, "describe", "--exact-match"], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
return None
def git_enable_submodule(git_command: str, submodule_dir: str):
"""Enable submodule denoted by its directory within the repository"""
return tag.strip().decode('utf8')
command = (git_command,
"config",
"--local",
f"submodule.{submodule_dir}.update", "checkout")
call(command, exit_on_error=True, silent=False)
def git_branch_release_version(branch: str, tag: Optional[str]) -> Optional[str]:
re_match = re.search("^blender-v(.*)-release$", branch)
release_version = None
if re_match:
release_version = re_match.group(1)
elif tag:
re_match = re.search(r"^v([0-9]*\.[0-9]*).*", tag)
if re_match:
release_version = re_match.group(1)
return release_version
def git_update_submodule(git_command: str, submodule_dir: str) -> bool:
"""
Update the given submodule.
The submodule is denoted by its path within the repository.
This function will initialize the submodule if it has not been initialized.
def svn_libraries_base_url(release_version: Optional[str], branch: Optional[str] = None) -> str:
if release_version:
svn_branch = "tags/blender-" + release_version + "-release"
elif branch:
svn_branch = "branches/" + branch
else:
svn_branch = "trunk"
return "https://svn.blender.org/svnroot/bf-blender/" + svn_branch + "/lib/"
Returns true if the update succeeded
"""
# Use the two stage update process:
# - Step 1: checkout the submodule to the desired (by the parent repository) hash, but
# skip the LFS smudging.
# - Step 2: Fetch LFS files, if needed.
#
# This allows to show download progress, potentially allowing resuming the download
# progress, and even recovering from partial/corrupted checkout of submodules.
#
# This bypasses the limitation of submodules which are configured as "update=checkout"
# with regular `git submodule update` which, depending on the Git version will not report
# any progress. This is because submodule--helper.c configures Git checkout process with
# the "quiet" flag, so that there is no detached head information printed after submodule
# update, and since Git 2.33 the LFS messages "Filtering contents..." is suppressed by
#
# https://github.com/git/git/commit/7a132c628e57b9bceeb88832ea051395c0637b16
#
# Doing "git lfs pull" after checkout with GIT_LFS_SKIP_SMUDGE=true seems to be the
# valid process. For example, https://www.mankier.com/7/git-lfs-faq
env = {"GIT_LFS_SKIP_SMUDGE": "1"}
if call((git_command, "submodule", "update", "--init", "--progress", submodule_dir),
exit_on_error=False, env=env) != 0:
return False
return call((git_command, "-C", submodule_dir, "lfs", "pull"),
exit_on_error=False) == 0
def command_missing(command: str) -> bool:
# Support running with Python 2 for macOS
if sys.version_info >= (3, 0):
return shutil.which(command) is None
else:
return False
return False
class BlenderVersion:

View File

@ -1,49 +1,55 @@
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
set BUILD_VS_LIBDIR=lib/windows_x64
if NOT "%verbose%" == "" (
echo Library Directory = "%BUILD_VS_LIBDIR%"
)
if NOT EXIST %BUILD_VS_LIBDIR% (
rem libs not found, but svn is on the system
if not "%SVN%"=="" (
if NOT EXIST "%BUILD_VS_LIBDIR%\.git" (
rem libs not found, but git is on the system
if not "%GIT%"=="" (
echo.
echo The required external libraries in %BUILD_VS_LIBDIR% are missing
echo.
set /p GetLibs= "Would you like to download them? (y/n)"
if /I "!GetLibs!"=="Y" (
echo.
echo Downloading %BUILD_VS_SVNDIR% libraries, please wait.
echo Downloading %BUILD_VS_LIBDIR% libraries, please wait.
echo.
:RETRY
"%SVN%" checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/%BUILD_VS_SVNDIR% %BUILD_VS_LIBDIR%
echo *********************************************************
echo * *
echo * Note: Once the initial download finishes and you see *
echo * "Resolving deltas: 100%% (nnn/nnn) done" *
echo * a second, much larger, update will occur with *
echo * no visible updates. Please do not interrupt *
echo * this process. It may take over an hour to *
echo * complete depending on your internet connection. *
echo * *
echo *********************************************************
:RETRY
"%GIT%" -C "%BLENDER_DIR%\" config --local "submodule.%BUILD_VS_LIBDIR%.update" "checkout"
"%GIT%" -C "%BLENDER_DIR%\" submodule update --progress --init "%BUILD_VS_LIBDIR%"
if errorlevel 1 (
set /p LibRetry= "Error during download, retry? y/n"
if /I "!LibRetry!"=="Y" (
cd %BUILD_VS_LIBDIR%
"%SVN%" cleanup
cd %BLENDER_DIR%
goto RETRY
)
echo.
echo Error: Download of external libraries failed.
echo This is needed for building, please manually run 'svn cleanup' and 'svn update' in
echo %BUILD_VS_LIBDIR% , until this is resolved you CANNOT make a successful blender build
echo Until this is resolved you CANNOT make a successful blender build.
echo.
exit /b 1
)
)
) else (
echo Not downloading libraries, until this is resolved you CANNOT make a successful blender build.
exit /b 1
)
)
) else (
if NOT EXIST %PYTHON% (
if not "%SVN%"=="" (
if not "%GIT%"=="" (
echo.
echo Python not found in external libraries, updating to latest version
echo.
"%SVN%" update %BUILD_VS_LIBDIR%
"%GIT%" -C "%BLENDER_DIR%" submodule update "%BUILD_VS_LIBDIR%"
)
)
)
@ -53,8 +59,8 @@ if NOT EXIST %BUILD_VS_LIBDIR% (
echo Error: Required libraries not found at "%BUILD_VS_LIBDIR%"
echo This is needed for building, aborting!
echo.
if "%SVN%"=="" (
echo This is most likely caused by svn.exe not being available.
if "%GIT%"=="" (
echo This is most likely caused by git.exe not being available.
)
exit /b 1
)

View File

@ -1,5 +1,4 @@
REM find all dependencies and set the corresponding environment variables.
for %%X in (svn.exe) do (set SVN=%%~$PATH:X)
for %%X in (cmake.exe) do (set CMAKE=%%~$PATH:X)
for %%X in (ctest.exe) do (set CTEST=%%~$PATH:X)
for %%X in (git.exe) do (set GIT=%%~$PATH:X)
@ -7,31 +6,32 @@ REM For python, default on 310 but if that does not exist also check
REM the 311, 312 and finally 39 folders to see if those are there, it checks
REM this far ahead to ensure good lib folder compatibility in the future
REM it falls back to 3.9 just incase it is a very old lib folder.
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\310\bin\python.exe
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\310\bin\python.exe
if EXIST %PYTHON% (
goto detect_python_done
)
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\311\bin\python.exe
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\311\bin\python.exe
if EXIST %PYTHON% (
goto detect_python_done
)
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\312\bin\python.exe
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\312\bin\python.exe
if EXIST %PYTHON% (
goto detect_python_done
)
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\39\bin\python.exe
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\39\bin\python.exe
if EXIST %PYTHON% (
goto detect_python_done
)
if NOT EXIST %PYTHON% (
echo Warning: Python not found, there is likely an issue with the library folder
if EXIST %BLENDER_DIR%\lib\windows_x64\ (
echo Warning: Python not found, there is likely an issue with the library folder
)
set PYTHON=""
)
:detect_python_done
if NOT "%verbose%" == "" (
echo svn : "%SVN%"
echo cmake : "%CMAKE%"
echo ctest : "%CTEST%"
echo git : "%GIT%"

View File

@ -1,5 +1,5 @@
if EXIST %BLENDER_DIR%\..\lib\win64_vc15\llvm\bin\clang-format.exe (
set CF_PATH=..\lib\win64_vc15\llvm\bin
if EXIST %BLENDER_DIR%\lib\windows_x64\llvm\bin\clang-format.exe (
set CF_PATH=lib\windows_x64\llvm\bin
goto detect_done
)

View File

@ -0,0 +1,19 @@
set BUILD_VS_LIBDIR=lib/windows_x64
:RETRY
"%GIT%" -C "%BLENDER_DIR%\" config --local "submodule.%BUILD_VS_LIBDIR%.update" "checkout"
"%GIT%" -C "%BLENDER_DIR%\" submodule update --progress --init "%BUILD_VS_LIBDIR%"
if errorlevel 1 (
set /p LibRetry= "Error during update, retry? y/n"
if /I "!LibRetry!"=="Y" (
goto RETRY
)
echo.
echo Error: Download of external libraries failed.
echo Until this is resolved you CANNOT make a successful blender build.
echo.
exit /b 1
)
REM re-detect the dependencies after updating the libraries so any python version
REM changes are accounted for.
call "%~dp0\find_dependencies.cmd"

View File

@ -110,9 +110,6 @@ if NOT "%1" == "" (
) else if "%1" == "doc_py" (
set DOC_PY=1
goto EOF
) else if "%1" == "svnfix" (
set SVN_FIX=1
goto EOF
) else (
echo Command "%1" unknown, aborting!
goto ERR

View File

@ -4,9 +4,7 @@ set BUILD_CMAKE_ARGS=
set BUILD_ARCH=
set BUILD_VS_VER=
set BUILD_VS_YEAR=
set BUILD_VS_LIBDIRPOST=
set BUILD_VS_LIBDIR=
set BUILD_VS_SVNDIR=
set KEY_NAME=
set MSBUILD_PLATFORM=
set MUST_CLEAN=

View File

@ -8,13 +8,10 @@ for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Branch_hash=%%i
cd "%BLENDER_DIR%/scripts/addons"
for /f "delims=" %%i in ('"%GIT%" rev-parse --abbrev-ref HEAD') do echo Addons_Branch_name=%%i
for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Addons_Branch_hash=%%i
if "%SVN%" == "" (
echo SVN not found, cannot library information.
goto EOF
)
set BUILD_VS_LIBDIR=%BLENDER_DIR%..\lib\win64_vc15
for /f "delims=" %%i in ('"%SVN%" info --show-item=url --no-newline %BUILD_VS_LIBDIR% ') do echo Libs_URL=%%i
for /f "delims=" %%i in ('"%SVN%" info --show-item=revision --no-newline %BUILD_VS_LIBDIR% ') do echo Libs_Revision=%%i
for /f "delims=" %%i in ('"%SVN%" info --show-item=last-changed-date --no-newline %BUILD_VS_LIBDIR% ') do echo Libs_LastChange=%%i
cd "%BLENDER_DIR%/lib/windows_x64"
for /f "delims=" %%i in ('"%GIT%" rev-parse --abbrev-ref HEAD') do echo Libs_Branch_name=%%i
for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Libs_Branch_hash=%%i
cd "%BLENDER_DIR%"
:EOF

View File

@ -1,25 +0,0 @@
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
echo Starting cleanup in %BUILD_VS_LIBDIR%.
cd %BUILD_VS_LIBDIR%
:RETRY
"%SVN%" cleanup
"%SVN%" update
if errorlevel 1 (
set /p LibRetry= "Error during update, retry? y/n"
if /I "!LibRetry!"=="Y" (
goto RETRY
)
echo.
echo Error: Download of external libraries failed.
echo This is needed for building, please manually run 'svn cleanup' and 'svn update' in
echo %BUILD_VS_LIBDIR% , until this is resolved you CANNOT make a successful blender build
echo.
exit /b 1
)
echo Cleanup complete

View File

@ -1,24 +0,0 @@
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
cd %BUILD_VS_LIBDIR%
:RETRY
"%SVN%" update
if errorlevel 1 (
set /p LibRetry= "Error during update, retry? y/n"
if /I "!LibRetry!"=="Y" (
"%SVN%" cleanup
goto RETRY
)
echo.
echo Error: Download of external libraries failed.
echo This is needed for building, please manually run 'svn cleanup' and 'svn update' in
echo %BUILD_VS_LIBDIR% , until this is resolved you CANNOT make a successful blender build
echo.
exit /b 1
)
cd %BLENDER_DIR%

View File

@ -8,6 +8,6 @@ exit /b 1
:detect_python_done
REM Use -B to avoid writing __pycache__ in lib directory and causing update conflicts.
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_test.py --git-command "%GIT%" --svn-command "%SVN%" --cmake-command="%CMAKE%" --ctest-command="%CTEST%" --config="%BUILD_TYPE%" %BUILD_DIR%
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_test.py --git-command "%GIT%" --cmake-command="%CMAKE%" --ctest-command="%CTEST%" --config="%BUILD_TYPE%" %BUILD_DIR%
:EOF

View File

@ -5,6 +5,6 @@ if NOT EXIST %PYTHON% (
:detect_python_done
REM Use -B to avoid writing __pycache__ in lib directory and causing update conflicts.
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_update.py --git-command "%GIT%" --svn-command "%SVN%" %BUILD_UPDATE_ARGS%
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_update.py --git-command "%GIT%" %BUILD_UPDATE_ARGS%
:EOF

View File

@ -29,6 +29,12 @@ typedef struct HuangHairExtra {
/* Squared Eccentricity. */
float e2;
/* The projected width of half a pixel at `sd->P` in `h` space. */
float pixel_coverage;
/* Valid integration interval. */
float gamma_m_min, gamma_m_max;
} HuangHairExtra;
typedef struct HuangHairBSDF {
@ -135,6 +141,14 @@ ccl_device_inline float to_gamma(float phi, float b)
return atan2f(sin_phi, b * cos_phi);
}
/* Intersect `wi` with the ellipse defined by `x = sin_gamma, y = b * cos_gamma` results in solving
* for `gamma` in equation `-cos_phi_i * sin_gamma + b * sin_phi_i * cos_gamma = h`.
* Also, make use of `r = sqrt(sqr(cos_phi_i) + sqr(b * sin_phi_i))` to pre-map `h` to [-1, 1]. */
ccl_device_inline float h_to_gamma(const float h_div_r, const float b, const float3 wi)
{
return (b == 1.0f) ? -asinf(h_div_r) : atan2f(wi.z, -b * wi.x) - acosf(-h_div_r);
}
/* Compute the coordinate on the ellipse, given `gamma` and the aspect ratio between the minor axis
* and the major axis. */
ccl_device_inline float2 to_point(float gamma, float b)
@ -170,6 +184,11 @@ ccl_device_inline float arc_length(float e2, float gamma)
return e2 == 0 ? 1.0f : sqrtf(1.0f - e2 * sqr(sinf(gamma)));
}
ccl_device_inline bool is_nearfield(ccl_private const HuangHairBSDF *bsdf)
{
return bsdf->extra->radius > bsdf->extra->pixel_coverage;
}
/** \} */
#ifdef __HAIR__
@ -188,8 +207,8 @@ ccl_device int bsdf_hair_huang_setup(ccl_private ShaderData *sd,
/* Compute local frame. The Y axis is aligned with the curve tangent; the X axis is perpendicular
to the ray direction for circular cross-sections, or aligned with the major axis for elliptical
cross-sections. */
const float3 Y = safe_normalize(sd->dPdu);
const float3 X = safe_normalize(cross(Y, sd->wi));
bsdf->extra->Y = safe_normalize(sd->dPdu);
const float3 X = safe_normalize(cross(sd->dPdu, sd->wi));
/* h from -1..0..1 means the rays goes from grazing the hair, to hitting it at the center, to
* grazing the other edge. This is the cosine of the angle between `sd->N` and `X`. */
@ -199,6 +218,8 @@ ccl_device int bsdf_hair_huang_setup(ccl_private ShaderData *sd,
kernel_assert(isfinite_safe(bsdf->h));
if (bsdf->aspect_ratio != 1.0f && (sd->type & PRIMITIVE_CURVE)) {
/* Adjust `bsdf->N` to be orthogonal to `sd->dPdu`. */
bsdf->N = safe_normalize(cross(sd->dPdu, safe_normalize(cross(bsdf->N, sd->dPdu))));
/* Align local frame with the curve normal. */
if (bsdf->aspect_ratio > 1.0f) {
/* Switch major and minor axis. */
@ -214,14 +235,12 @@ ccl_device int bsdf_hair_huang_setup(ccl_private ShaderData *sd,
/* Fill extra closure. */
if (is_zero(bsdf->N) || !isfinite_safe(bsdf->N)) {
bsdf->extra->Y = Y;
/* Construct arbitrary local coordinate system. The implementation should ensure smooth
* transition along the hair shaft. */
make_orthonormals(Y, &bsdf->extra->Z, &bsdf->N);
make_orthonormals(bsdf->extra->Y, &bsdf->extra->Z, &bsdf->N);
}
else {
bsdf->extra->Z = safe_normalize(cross(bsdf->N, sd->dPdu));
bsdf->extra->Y = safe_normalize(cross(bsdf->extra->Z, bsdf->N));
}
const float3 I = make_float3(
@ -311,50 +330,6 @@ ccl_device Spectrum bsdf_hair_huang_eval_r(KernelGlobals kg,
/* Get minor axis, assuming major axis is 1. */
const float b = bsdf->aspect_ratio;
const bool is_circular = (b == 1.0f);
const float phi_i = is_circular ? 0.0f : dir_phi(wi);
const float phi_o = dir_phi(wo);
/* Compute visible azimuthal range from incoming and outgoing directions. */
/* `dot(wi, wmi) > 0` */
const float tan_tilt = tanf(bsdf->tilt);
float phi_m_max1 = acosf(fmaxf(-tan_tilt * tan_theta(wi), 0.0f)) + phi_i;
if (isnan_safe(phi_m_max1)) {
return zero_spectrum();
}
float phi_m_min1 = -phi_m_max1 + 2.0f * phi_i;
/* `dot(wo, wmi) > 0` */
float phi_m_max2 = acosf(fmaxf(-tan_tilt * tan_theta(wo), 0.0f)) + phi_o;
if (isnan_safe(phi_m_max2)) {
return zero_spectrum();
}
float phi_m_min2 = -phi_m_max2 + 2.0f * phi_o;
if (!is_circular) {
/* Try to wrap range. */
if ((phi_m_max2 - phi_m_min1) > M_2PI_F) {
phi_m_min2 -= M_2PI_F;
phi_m_max2 -= M_2PI_F;
}
if ((phi_m_max1 - phi_m_min2) > M_2PI_F) {
phi_m_min1 -= M_2PI_F;
phi_m_max1 -= M_2PI_F;
}
}
const float phi_m_min = fmaxf(phi_m_min1, phi_m_min2) + 1e-3f;
const float phi_m_max = fminf(phi_m_max1, phi_m_max2) - 1e-3f;
if (phi_m_min > phi_m_max) {
return zero_spectrum();
}
const float gamma_m_min = to_gamma(phi_m_min, b);
float gamma_m_max = to_gamma(phi_m_max, b);
if (gamma_m_max < gamma_m_min) {
gamma_m_max += M_2PI_F;
}
const float3 wh = normalize(wi + wo);
@ -363,16 +338,19 @@ ccl_device Spectrum bsdf_hair_huang_eval_r(KernelGlobals kg,
/* Maximal sample resolution. */
float res = roughness * 0.7f;
const float gamma_m_range = bsdf->extra->gamma_m_max - bsdf->extra->gamma_m_min;
/* Number of intervals should be even. */
const size_t intervals = 2 * (size_t)ceilf((gamma_m_max - gamma_m_min) / res * 0.5f);
const size_t intervals = 2 * (size_t)ceilf(gamma_m_range / res * 0.5f);
/* Modified resolution based on numbers of intervals. */
res = (gamma_m_max - gamma_m_min) / float(intervals);
res = gamma_m_range / float(intervals);
/* Integrate using Composite Simpson's 1/3 rule. */
float integral = 0.0f;
for (size_t i = 0; i <= intervals; i++) {
const float gamma_m = gamma_m_min + i * res;
const float gamma_m = bsdf->extra->gamma_m_min + i * res;
const float3 wm = sphg_dir(bsdf->tilt, gamma_m, b);
if (microfacet_visible(wi, wo, make_float3(wm.x, 0.0f, wm.z), wh)) {
@ -385,11 +363,12 @@ ccl_device Spectrum bsdf_hair_huang_eval_r(KernelGlobals kg,
}
}
/* Simpson coefficient */
integral *= (2.0f / 3.0f * res);
const float F = fresnel_dielectric_cos(dot(wi, wh), bsdf->eta);
return make_spectrum(bsdf->extra->R * 0.125f * F * integral / bsdf->extra->radius);
return make_spectrum(bsdf->extra->R * 0.25f * F * integral);
}
/* Approximate components beyond TRT (starting TRRT) by summing up a geometric series. Attenuations
@ -420,46 +399,25 @@ ccl_device Spectrum bsdf_hair_huang_eval_residual(KernelGlobals kg,
const float b = bsdf->aspect_ratio;
const bool is_circular = (b == 1.0f);
const float phi_i = is_circular ? 0.0f : dir_phi(wi);
/* Compute visible azimuthal range from the incoming direction. */
const float tan_tilt = tanf(bsdf->tilt);
const float phi_m_max = acosf(fmaxf(-tan_tilt * tan_theta(wi), 0.0f)) + phi_i;
if (isnan_safe(phi_m_max)) {
/* Early detection of `dot(wi, wmi) < 0`. */
return zero_spectrum();
}
const float phi_m_min = -phi_m_max + 2.0f * phi_i;
if (tan_tilt * tan_theta(wo) < -1.0f) {
/* Early detection of `dot(wo, wmo) < 0`. */
return zero_spectrum();
}
const Spectrum mu_a = bsdf->sigma;
const float eta = bsdf->eta;
const float inv_eta = 1.0f / eta;
const float gamma_m_min = to_gamma(phi_m_min, b) + 1e-3f;
float gamma_m_max = to_gamma(phi_m_max, b) - 1e-3f;
if (gamma_m_max < gamma_m_min) {
gamma_m_max += M_2PI_F;
}
const float roughness = bsdf->roughness;
const float roughness2 = sqr(roughness);
const float sqrt_roughness = sqrtf(roughness);
float res = roughness * 0.8f;
const size_t intervals = 2 * (size_t)ceilf((gamma_m_max - gamma_m_min) / res * 0.5f);
res = (gamma_m_max - gamma_m_min) / intervals;
const float gamma_m_range = bsdf->extra->gamma_m_max - bsdf->extra->gamma_m_min;
const size_t intervals = 2 * (size_t)ceilf(gamma_m_range / res * 0.5f);
res = gamma_m_range / intervals;
Spectrum S_tt = zero_spectrum();
Spectrum S_trt = zero_spectrum();
Spectrum S_trrt = zero_spectrum();
for (size_t i = 0; i <= intervals; i++) {
const float gamma_mi = gamma_m_min + i * res;
const float gamma_mi = bsdf->extra->gamma_m_min + i * res;
const float3 wmi = sphg_dir(bsdf->tilt, gamma_mi, b);
const float3 wmi_ = sphg_dir(0.0f, gamma_mi, b);
@ -470,7 +428,7 @@ ccl_device Spectrum bsdf_hair_huang_eval_residual(KernelGlobals kg,
const float3 wh1 = sample_wh(kg, roughness, wi, wmi, sample1);
const float cos_hi1 = dot(wi, wh1);
if (!(cos_hi1 > 0)) {
if (!(cos_hi1 > 0.0f)) {
continue;
}
@ -535,7 +493,7 @@ ccl_device Spectrum bsdf_hair_huang_eval_residual(KernelGlobals kg,
lcg_step_float(&rng_quadrature));
const float3 wh2 = sample_wh(kg, roughness, -wt, wmt, sample2);
const float cos_hi2 = dot(-wt, wh2);
if (!(cos_hi2 > 0)) {
if (!(cos_hi2 > 0.0f)) {
continue;
}
const float R2 = fresnel_dielectric_cos(cos_hi2, inv_eta);
@ -603,8 +561,9 @@ ccl_device Spectrum bsdf_hair_huang_eval_residual(KernelGlobals kg,
sin_theta(wi), cos_theta(wi), sin_theta(wo), cos_theta(wo), 4.0f * bsdf->roughness);
const float N = M_1_2PI_F;
return ((S_tt + S_trt) * sqr(inv_eta) / bsdf->extra->radius + S_trrt * M * N * M_2_PI_F) * res /
3.0f;
const float simpson_coeff = 2.0f / 3.0f * res;
return ((S_tt + S_trt) * sqr(inv_eta) + S_trrt * M * N * M_2_PI_F) * simpson_coeff;
}
ccl_device int bsdf_hair_huang_sample(const KernelGlobals kg,
@ -635,20 +594,14 @@ ccl_device int bsdf_hair_huang_sample(const KernelGlobals kg,
/* Get `wi` in local coordinate. */
const float3 wi = bsdf->extra->wi;
const float2 sincos_phi_i = sincos_phi(wi);
const float sin_phi_i = sincos_phi_i.x;
const float cos_phi_i = sincos_phi_i.y;
/* Get minor axis, assuming major axis is 1. */
const float b = bsdf->aspect_ratio;
const bool is_circular = (b == 1.0f);
const float h = sample_h * 2.0f - 1.0f;
const float gamma_mi = is_circular ?
asinf(h) :
atan2f(cos_phi_i, -b * sin_phi_i) -
acosf(h * bsdf->extra->radius *
inversesqrtf(sqr(cos_phi_i) + sqr(b * sin_phi_i)));
/* Sample `h` for farfield model, as the computed intersection might have numerical issues. */
const float h_div_r = is_nearfield(bsdf) ? bsdf->h / bsdf->extra->radius :
(sample_h * 2.0f - 1.0f);
const float gamma_mi = h_to_gamma(h_div_r, b, wi);
/* Macronormal. */
const float3 wmi_ = sphg_dir(0, gamma_mi, b);
@ -717,7 +670,7 @@ ccl_device int bsdf_hair_huang_sample(const KernelGlobals kg,
wtt = refract_angle(-wt, wh2, cos_theta_t2, bsdf->eta);
if (dot(wmt, -wtt) > 0.0f && cos_theta_t2 != 0.0f && microfacet_visible(-wtt, wmt_, wh2)) {
if (dot(wmt, -wtt) > 0.0f && T2 > 0.0f && microfacet_visible(-wtt, wmt_, wh2)) {
TT = bsdf->extra->TT * T1 * A_t * T2 * scale2 * bsdf_Go(roughness2, cos_mi2, dot(wmt, -wtt));
}
@ -746,9 +699,7 @@ ccl_device int bsdf_hair_huang_sample(const KernelGlobals kg,
const float T3 = 1.0f - R3;
if (cos_theta_t3 != 0.0f &&
microfacet_visible(wtr, -wtrt, make_float3(wmtr.x, 0.0f, wmtr.z), wh3))
{
if (T3 > 0.0f && microfacet_visible(wtr, -wtrt, make_float3(wmtr.x, 0.0f, wmtr.z), wh3)) {
TRT = bsdf->extra->TRT * TR * make_spectrum(T3) *
bsdf_Go(roughness2, cos_mi3, dot(wmtr, -wtrt));
}
@ -760,8 +711,8 @@ ccl_device int bsdf_hair_huang_sample(const KernelGlobals kg,
/* Sample `theta_o`. */
const float rand_theta = max(lcg_step_float(&sd->lcg_state), 1e-5f);
const float fac = 1.0f +
bsdf->roughness *
logf(rand_theta + (1.0f - rand_theta) * expf(-2.0f / bsdf->roughness));
4.0f * bsdf->roughness *
logf(rand_theta + (1.0f - rand_theta) * expf(-0.5f / bsdf->roughness));
const float sin_theta_o = -fac * sin_theta(wi) +
cos_from_sin(fac) *
cosf(M_2PI_F * lcg_step_float(&sd->lcg_state)) * cos_theta(wi);
@ -855,9 +806,74 @@ ccl_device Spectrum bsdf_hair_huang_eval(KernelGlobals kg,
/* TODO: better estimation of the pdf */
*pdf = 1.0f;
/* Early detection of `dot(wo, wmo) < 0`. */
const float tan_tilt = tanf(bsdf->tilt);
if (tan_tilt * tan_theta(local_O) < -1.0f) {
return zero_spectrum();
}
/* Compute visible azimuthal range from the incoming direction. */
const float half_span = acosf(fmaxf(-tan_tilt * tan_theta(local_I), 0.0f));
if (isnan_safe(half_span)) {
/* Early detection of `dot(wi, wmi) < 0`. */
return zero_spectrum();
}
const float r = bsdf->extra->radius;
const float b = bsdf->aspect_ratio;
const float phi_i = (b == 1.0f) ? 0.0f : dir_phi(local_I);
float gamma_m_min = to_gamma(phi_i - half_span, b);
float gamma_m_max = to_gamma(phi_i + half_span, b);
if (gamma_m_max < gamma_m_min) {
gamma_m_max += M_2PI_F;
}
/* Prevent numerical issues at the boundary. */
gamma_m_min += 1e-3f;
gamma_m_max -= 1e-3f;
/* Length of the integral interval. */
float dh = 2.0f * r;
if (is_nearfield(bsdf)) {
/* Reduce the integration interval to the subset that's visible to the current pixel.
* Inspired by [An Efficient and Practical Near and Far Field Fur Reflectance Model]
* (https://sites.cs.ucsb.edu/~lingqi/publications/paper_fur2.pdf) by Ling-Qi Yan, Henrik Wann
* Jensen and Ravi Ramamoorthi. */
const float h_max = min(bsdf->h + bsdf->extra->pixel_coverage, r);
const float h_min = max(bsdf->h - bsdf->extra->pixel_coverage, -r);
/* At the boundaries the hair might not cover the whole pixel. */
dh = h_max - h_min;
float nearfield_gamma_min = h_to_gamma(h_max / r, bsdf->aspect_ratio, local_I);
float nearfield_gamma_max = h_to_gamma(h_min / r, bsdf->aspect_ratio, local_I);
if (nearfield_gamma_max < nearfield_gamma_min) {
nearfield_gamma_max += M_2PI_F;
}
/* Wrap range to compute the intersection. */
if ((gamma_m_max - nearfield_gamma_min) > M_2PI_F) {
gamma_m_min -= M_2PI_F;
gamma_m_max -= M_2PI_F;
}
else if ((nearfield_gamma_max - gamma_m_min) > M_2PI_F) {
nearfield_gamma_min -= M_2PI_F;
nearfield_gamma_max -= M_2PI_F;
}
gamma_m_min = fmaxf(gamma_m_min, nearfield_gamma_min);
gamma_m_max = fminf(gamma_m_max, nearfield_gamma_max);
}
bsdf->extra->gamma_m_min = gamma_m_min;
bsdf->extra->gamma_m_max = gamma_m_max;
const float projected_area = cos_theta(local_I) * dh;
return (bsdf_hair_huang_eval_r(kg, sc, local_I, local_O) +
bsdf_hair_huang_eval_residual(kg, sc, local_I, local_O, sd->lcg_state)) /
cos_theta(local_I);
projected_area;
}
/* Implements Filter Glossy by capping the effective roughness. */

View File

@ -135,18 +135,26 @@ ccl_device_forceinline bool triangle_light_sample(KernelGlobals kg,
const float3 e1 = V[2] - V[0];
const float3 e2 = V[2] - V[1];
const float longest_edge_squared = max(len_squared(e0), max(len_squared(e1), len_squared(e2)));
const float3 N0 = cross(e0, e1);
float3 N0 = cross(e0, e1);
/* Flip normal if necessary. */
const int object_flag = kernel_data_fetch(object_flag, object);
if (object_flag & SD_OBJECT_NEGATIVE_SCALE) {
N0 = -N0;
}
/* Do not draw samples from the side without MIS. */
ls->shader = kernel_data_fetch(tri_shader, prim);
const float distance_to_plane = dot(N0, V[0] - P) / dot(N0, N0);
const int ls_shader_flag = kernel_data_fetch(shaders, ls->shader & SHADER_MASK).flags;
if (!(ls_shader_flag & (distance_to_plane > 0 ? SD_MIS_BACK : SD_MIS_FRONT))) {
return false;
}
float Nl = 0.0f;
ls->Ng = safe_normalize_len(N0, &Nl);
const float area = 0.5f * Nl;
/* flip normal if necessary */
const int object_flag = kernel_data_fetch(object_flag, object);
if (object_flag & SD_OBJECT_NEGATIVE_SCALE) {
ls->Ng = -ls->Ng;
}
ls->eval_fac = 1.0f;
ls->shader = kernel_data_fetch(tri_shader, prim);
ls->object = object;
ls->prim = prim;
ls->lamp = LAMP_NONE;
@ -154,8 +162,6 @@ ccl_device_forceinline bool triangle_light_sample(KernelGlobals kg,
ls->type = LIGHT_TRIANGLE;
ls->group = object_lightgroup(kg, object);
float distance_to_plane = fabsf(dot(N0, V[0] - P) / dot(N0, N0));
if (!in_volume_segment && (longest_edge_squared > distance_to_plane * distance_to_plane)) {
/* A modified version of James Arvo, "Stratified Sampling of Spherical Triangles"
* http://www.graphics.cornell.edu/pubs/1995/Arv95c.pdf */

View File

@ -988,6 +988,21 @@ ccl_device void osl_closure_hair_huang_setup(KernelGlobals kg,
bsdf->extra->TT = closure->tt_lobe;
bsdf->extra->TRT = closure->trt_lobe;
bsdf->extra->pixel_coverage = 1.0f;
/* For camera ray, check if the hair covers more than one pixel, in which case a nearfield model
* is needed to prevent ribbon-like appearance. */
if ((path_flag & PATH_RAY_CAMERA) && (sd->type & PRIMITIVE_CURVE)) {
/* Interpolate radius between curve keys. */
const KernelCurve kcurve = kernel_data_fetch(curves, sd->prim);
const int k0 = kcurve.first_key + PRIMITIVE_UNPACK_SEGMENT(sd->type);
const int k1 = k0 + 1;
const float radius = mix(
kernel_data_fetch(curve_keys, k0).w, kernel_data_fetch(curve_keys, k1).w, sd->u);
bsdf->extra->pixel_coverage = 0.5f * sd->dP / radius;
}
sd->flag |= bsdf_hair_huang_setup(sd, bsdf, path_flag);
#endif
}

View File

@ -790,6 +790,21 @@ ccl_device
bsdf->extra->TT = fmaxf(0.0f, TT);
bsdf->extra->TRT = fmaxf(0.0f, TRT);
bsdf->extra->pixel_coverage = 1.0f;
/* For camera ray, check if the hair covers more than one pixel, in which case a
* nearfield model is needed to prevent ribbon-like appearance. */
if ((path_flag & PATH_RAY_CAMERA) && (sd->type & PRIMITIVE_CURVE)) {
/* Interpolate radius between curve keys. */
const KernelCurve kcurve = kernel_data_fetch(curves, sd->prim);
const int k0 = kcurve.first_key + PRIMITIVE_UNPACK_SEGMENT(sd->type);
const int k1 = k0 + 1;
const float radius = mix(
kernel_data_fetch(curve_keys, k0).w, kernel_data_fetch(curve_keys, k1).w, sd->u);
bsdf->extra->pixel_coverage = 0.5f * sd->dP / radius;
}
bsdf->aspect_ratio = stack_load_float_default(stack, shared_ofs1, data_node3.w);
if (bsdf->aspect_ratio != 1.0f) {
/* Align ellipse major axis with the curve normal direction. */

View File

@ -1331,7 +1331,7 @@ void LightManager::device_update_lights(Device *device, DeviceScene *dscene, Sce
float3 dir = safe_normalize(light->get_dir());
if (light->use_mis && area != 0.0f) {
if (light->use_mis && area != 0.0f && light->spread > 0.0f) {
shader_id |= SHADER_USE_MIS;
}

View File

@ -108,7 +108,9 @@ LightTreeEmitter::LightTreeEmitter(Scene *scene,
/* TODO: need a better way to handle this when textures are used. */
float area = triangle_area(vertices[0], vertices[1], vertices[2]);
measure.energy = area * average(shader->emission_estimate);
/* Use absolute value of emission_estimate so lights with negative strength are properly
* supported in the light tree. */
measure.energy = area * average(fabs(shader->emission_estimate));
/* NOTE: the original implementation used the bounding box centroid, but triangle centroid
* seems to work fine */
@ -220,7 +222,7 @@ LightTreeEmitter::LightTreeEmitter(Scene *scene,
/* Use absolute value of energy so lights with negative strength are properly supported in the
* light tree. */
measure.energy = fabsf(average(strength));
measure.energy = average(fabs(strength));
light_set_membership = lamp->get_light_set_membership();
}

View File

@ -264,7 +264,7 @@ void Shader::estimate_emission()
}
ShaderInput *surf = graph->output()->input("Surface");
emission_estimate = fabs(output_estimate_emission(surf->link, emission_is_constant));
emission_estimate = output_estimate_emission(surf->link, emission_is_constant);
if (is_zero(emission_estimate)) {
emission_sampling = EMISSION_SAMPLING_NONE;
@ -274,8 +274,9 @@ void Shader::estimate_emission()
* using a lot of memory in the light tree and potentially wasting samples
* where indirect light samples are sufficient.
* Possible optimization: estimate front and back emission separately. */
emission_sampling = (reduce_max(emission_estimate) > 0.5f) ? EMISSION_SAMPLING_FRONT_BACK :
EMISSION_SAMPLING_NONE;
emission_sampling = (reduce_max(fabs(emission_estimate)) > 0.5f) ?
EMISSION_SAMPLING_FRONT_BACK :
EMISSION_SAMPLING_NONE;
}
else {
emission_sampling = emission_sampling_method;

View File

@ -844,7 +844,7 @@ class PrincipledHairBsdfNode : public BsdfBaseNode {
NODE_SOCKET_API(float, random)
/* Selected coloring parametrization. */
NODE_SOCKET_API(NodePrincipledHairParametrization, parametrization)
/* Selected scattering model (near-/far-field). */
/* Selected scattering model (chiang/huang). */
NODE_SOCKET_API(NodePrincipledHairModel, model)
virtual int get_feature()

View File

@ -111,6 +111,19 @@ static const char *get_egl_error_message_string(EGLint error)
}
}
static void egl_print_error(const char *message, const EGLint error)
{
const char *code = get_egl_error_enum_string(error);
const char *msg = get_egl_error_message_string(error);
fprintf(stderr,
"%sEGL Error (0x%04X): %s: %s\n",
message,
uint(error),
code ? code : "<Unknown>",
msg ? msg : "<Unknown>");
}
static bool egl_chk(bool result,
const char *file = nullptr,
int line = 0,
@ -118,11 +131,9 @@ static bool egl_chk(bool result,
{
if (!result) {
const EGLint error = eglGetError();
#ifndef NDEBUG
const char *code = get_egl_error_enum_string(error);
const char *msg = get_egl_error_message_string(error);
#ifndef NDEBUG
fprintf(stderr,
"%s:%d: [%s] -> EGL Error (0x%04X): %s: %s\n",
file,
@ -132,11 +143,7 @@ static bool egl_chk(bool result,
code ? code : "<Unknown>",
msg ? msg : "<Unknown>");
#else
fprintf(stderr,
"EGL Error (0x%04X): %s: %s\n",
uint(error),
code ? code : "<Unknown>",
msg ? msg : "<Unknown>");
egl_print_error("", error);
(void)(file);
(void)(line);
(void)(text);
@ -343,33 +350,43 @@ GHOST_TSuccess GHOST_ContextEGL::initializeDrawingContext()
goto error;
}
if (!EGL_CHK(::eglInitialize(m_display, &egl_major, &egl_minor)) ||
(egl_major == 0 && egl_minor == 0))
{
/* We failed to create a regular render window, retry and see if we can create a headless
* render context. */
::eglTerminate(m_display);
const EGLBoolean init_display_result = ::eglInitialize(m_display, &egl_major, &egl_minor);
const EGLint init_display_error = (init_display_result) ? 0 : eglGetError();
const char *egl_extension_st = eglQueryString(EGL_NO_DISPLAY, EGL_EXTENSIONS);
assert(egl_extension_st != nullptr);
assert(egl_extension_st == nullptr ||
strstr(egl_extension_st, "EGL_MESA_platform_surfaceless") != nullptr);
if (egl_extension_st == nullptr ||
strstr(egl_extension_st, "EGL_MESA_platform_surfaceless") == nullptr)
{
goto error;
if (!init_display_result || (egl_major == 0 && egl_minor == 0)) {
/* We failed to create a regular render window, retry and see if we can create a headless
* render context. */
::eglTerminate(m_display);
const char *egl_extension_st = eglQueryString(EGL_NO_DISPLAY, EGL_EXTENSIONS);
assert(egl_extension_st != nullptr);
assert(egl_extension_st == nullptr ||
strstr(egl_extension_st, "EGL_MESA_platform_surfaceless") != nullptr);
if (egl_extension_st == nullptr ||
strstr(egl_extension_st, "EGL_MESA_platform_surfaceless") == nullptr)
{
egl_print_error("Failed to create display GPU context: ", init_display_error);
fprintf(
stderr,
"Failed to create headless GPU context: No EGL_MESA_platform_surfaceless extension");
goto error;
}
m_display = eglGetPlatformDisplayEXT(
EGL_PLATFORM_SURFACELESS_MESA, EGL_DEFAULT_DISPLAY, nullptr);
const EGLBoolean headless_result = ::eglInitialize(m_display, &egl_major, &egl_minor);
const EGLint init_headless_error = (headless_result) ? 0 : eglGetError();
if (!headless_result) {
egl_print_error("Failed to create display GPU context: ", init_display_error);
egl_print_error("Failed to create headless GPU context: ", init_headless_error);
goto error;
}
}
m_display = eglGetPlatformDisplayEXT(
EGL_PLATFORM_SURFACELESS_MESA, EGL_DEFAULT_DISPLAY, nullptr);
if (!EGL_CHK(::eglInitialize(m_display, &egl_major, &egl_minor))) {
goto error;
}
/* Because the first eglInitialize will print an error to the terminal, print a "success"
* message here to let the user know that we successfully recovered from the error. */
fprintf(stderr, "\nManaged to successfully fallback to surfaceless EGL rendering!\n\n");
}
#ifdef WITH_GHOST_DEBUG
fprintf(stderr, "EGL Version %d.%d\n", egl_major, egl_minor);
#endif

View File

@ -135,6 +135,12 @@ static int gwl_registry_handler_interface_slot_max();
static int gwl_registry_handler_interface_slot_from_string(const char *interface);
static const GWL_RegistryHandler *gwl_registry_handler_from_interface_slot(int interface_slot);
static bool xkb_compose_state_feed_and_get_utf8(
xkb_compose_state *compose_state,
xkb_state *state,
const xkb_keycode_t key,
char r_utf8_buf[sizeof(GHOST_TEventKeyData::utf8_buf)]);
#ifdef USE_EVENT_BACKGROUND_THREAD
static void gwl_display_event_thread_destroy(GWL_Display *display);
@ -1103,6 +1109,37 @@ static void gwl_seat_key_layout_active_state_update_mask(GWL_Seat *seat)
}
}
/** Callback that runs from GHOST's timer. */
static void gwl_seat_key_repeat_timer_fn(GHOST_ITimerTask *task, uint64_t time_ms)
{
GWL_KeyRepeatPlayload *payload = static_cast<GWL_KeyRepeatPlayload *>(task->getUserData());
GWL_Seat *seat = payload->seat;
wl_surface *wl_surface_focus = seat->keyboard.wl.surface_window;
if (UNLIKELY(wl_surface_focus == nullptr)) {
return;
}
GHOST_IWindow *win = ghost_wl_surface_user_data(wl_surface_focus);
GHOST_SystemWayland *system = seat->system;
const uint64_t event_ms = payload->time_ms_init + time_ms;
/* Calculate this value every time in case modifier keys are pressed. */
char utf8_buf[sizeof(GHOST_TEventKeyData::utf8_buf)] = {'\0'};
if (seat->xkb.compose_state &&
xkb_compose_state_feed_and_get_utf8(
seat->xkb.compose_state, seat->xkb.state, payload->key_code, utf8_buf))
{
/* `utf8_buf` has been filled by a compose action. */
}
else {
xkb_state_key_get_utf8(seat->xkb.state, payload->key_code, utf8_buf, sizeof(utf8_buf));
}
system->pushEvent_maybe_pending(new GHOST_EventKey(
event_ms, GHOST_kEventKeyDown, win, payload->key_data.gkey, true, utf8_buf));
}
/**
* \note Caller must lock `timer_mutex`.
*/
@ -1743,6 +1780,8 @@ static void ghost_wayland_log_handler(const char *msg, va_list arg)
__attribute__((format(printf, 1, 0)));
#endif
static bool ghost_wayland_log_handler_is_background = false;
/**
* Callback for WAYLAND to run when there is an error.
*
@ -1751,6 +1790,15 @@ static void ghost_wayland_log_handler(const char *msg, va_list arg)
*/
static void ghost_wayland_log_handler(const char *msg, va_list arg)
{
/* This is fine in background mode, we will try to fall back to headless GPU context.
* Happens when render farm process runs without user login session. */
if (ghost_wayland_log_handler_is_background &&
(strstr(msg, "error: XDG_RUNTIME_DIR not set in the environment") ||
strstr(msg, "error: XDG_RUNTIME_DIR is invalid or not set in the environment")))
{
return;
}
fprintf(stderr, "GHOST/Wayland: ");
vfprintf(stderr, msg, arg); /* Includes newline. */
@ -2761,13 +2809,11 @@ static void keyboard_depressed_state_key_event(GWL_Seat *seat,
}
static void keyboard_depressed_state_push_events_from_change(
GWL_Seat *seat, const GWL_KeyboardDepressedState &key_depressed_prev)
GWL_Seat *seat,
GHOST_IWindow *win,
const uint64_t event_ms,
const GWL_KeyboardDepressedState &key_depressed_prev)
{
GHOST_IWindow *win = ghost_wl_surface_user_data(seat->keyboard.wl.surface_window);
const GHOST_SystemWayland *system = seat->system;
/* Caller has no time-stamp, set from system. */
const uint64_t event_ms = system->getMilliSeconds();
/* Separate key up and down into separate passes so key down events always come after key up.
* Do this so users of GHOST can use the last pressed or released modifier to check
* if the modifier is held instead of counting modifiers pressed as is done here,
@ -4718,6 +4764,8 @@ static void keyboard_handle_enter(void *data,
CLOG_INFO(LOG, 2, "enter");
GWL_Seat *seat = static_cast<GWL_Seat *>(data);
GHOST_IWindow *win = ghost_wl_surface_user_data(wl_surface);
seat->keyboard.serial = serial;
seat->keyboard.wl.surface_window = wl_surface;
@ -4729,6 +4777,12 @@ static void keyboard_handle_enter(void *data,
GWL_KeyboardDepressedState key_depressed_prev = seat->key_depressed;
keyboard_depressed_state_reset(seat);
/* Keep track of the last held repeating key, start the repeat timer if one exists. */
struct {
uint32_t key = std::numeric_limits<uint32_t>::max();
xkb_keysym_t sym = 0;
} repeat;
uint32_t *key;
WL_ARRAY_FOR_EACH (key, keys) {
const xkb_keycode_t key_code = *key + EVDEV_OFFSET;
@ -4738,9 +4792,41 @@ static void keyboard_handle_enter(void *data,
if (gkey != GHOST_kKeyUnknown) {
keyboard_depressed_state_key_event(seat, gkey, GHOST_kEventKeyDown);
}
if (xkb_keymap_key_repeats(xkb_state_get_keymap(seat->xkb.state), key_code)) {
repeat.key = *key;
repeat.sym = sym;
}
}
keyboard_depressed_state_push_events_from_change(seat, key_depressed_prev);
/* Caller has no time-stamp, set from system. */
const uint64_t event_ms = seat->system->getMilliSeconds();
keyboard_depressed_state_push_events_from_change(seat, win, event_ms, key_depressed_prev);
if ((repeat.key != std::numeric_limits<uint32_t>::max()) && (seat->key_repeat.rate > 0)) {
/* Since the key has been held, immediately send a press event.
* This also ensures the key will be registered as pressed, see #117896. */
#ifdef USE_EVENT_BACKGROUND_THREAD
std::lock_guard lock_timer_guard{*seat->system->timer_mutex};
#endif
/* Should have been cleared on leave, set here just in case. */
if (UNLIKELY(seat->key_repeat.timer)) {
keyboard_handle_key_repeat_cancel(seat);
}
const xkb_keycode_t key_code = repeat.key + EVDEV_OFFSET;
const GHOST_TKey gkey = xkb_map_gkey_or_scan_code(repeat.sym, repeat.key);
GWL_KeyRepeatPlayload *key_repeat_payload = new GWL_KeyRepeatPlayload();
key_repeat_payload->seat = seat;
key_repeat_payload->key_code = key_code;
key_repeat_payload->key_data.gkey = gkey;
gwl_seat_key_repeat_timer_add(seat, gwl_seat_key_repeat_timer_fn, key_repeat_payload, false);
/* Ensure there is a press event on enter so this is known to be held before any mouse
* button events which may use a key-binding that depends on this key being held. */
gwl_seat_key_repeat_timer_fn(seat->key_repeat.timer, 0);
}
}
/**
@ -5031,33 +5117,7 @@ static void keyboard_handle_key(void *data,
}
if (key_repeat_payload) {
auto key_repeat_fn = [](GHOST_ITimerTask *task, uint64_t time_ms) {
GWL_KeyRepeatPlayload *payload = static_cast<GWL_KeyRepeatPlayload *>(task->getUserData());
GWL_Seat *seat = payload->seat;
if (wl_surface *wl_surface_focus = seat->keyboard.wl.surface_window) {
GHOST_IWindow *win = ghost_wl_surface_user_data(wl_surface_focus);
GHOST_SystemWayland *system = seat->system;
const uint64_t event_ms = payload->time_ms_init + time_ms;
/* Calculate this value every time in case modifier keys are pressed. */
char utf8_buf[sizeof(GHOST_TEventKeyData::utf8_buf)] = {'\0'};
if (seat->xkb.compose_state &&
xkb_compose_state_feed_and_get_utf8(
seat->xkb.compose_state, seat->xkb.state, payload->key_code, utf8_buf))
{
/* `utf8_buf` has been filled by a compose action. */
}
else {
xkb_state_key_get_utf8(seat->xkb.state, payload->key_code, utf8_buf, sizeof(utf8_buf));
}
system->pushEvent_maybe_pending(new GHOST_EventKey(
event_ms, GHOST_kEventKeyDown, win, payload->key_data.gkey, true, utf8_buf));
}
};
gwl_seat_key_repeat_timer_add(seat, key_repeat_fn, key_repeat_payload, true);
gwl_seat_key_repeat_timer_add(seat, gwl_seat_key_repeat_timer_fn, key_repeat_payload, true);
}
}
@ -6853,6 +6913,7 @@ GHOST_SystemWayland::GHOST_SystemWayland(bool background)
#endif
display_(new GWL_Display)
{
ghost_wayland_log_handler_is_background = background;
wl_log_set_handler_client(ghost_wayland_log_handler);
display_->system = this;

View File

@ -339,7 +339,7 @@ template<typename T> inline T *MEM_cnew(const char *allocation_name, const T &ot
public: \
void *operator new(size_t num_bytes) \
{ \
return MEM_mallocN(num_bytes, _id); \
return MEM_mallocN_aligned(num_bytes, __STDCPP_DEFAULT_NEW_ALIGNMENT__, _id); \
} \
void *operator new(size_t num_bytes, std::align_val_t alignment) \
{ \
@ -353,7 +353,7 @@ template<typename T> inline T *MEM_cnew(const char *allocation_name, const T &ot
} \
void *operator new[](size_t num_bytes) \
{ \
return MEM_mallocN(num_bytes, _id "[]"); \
return MEM_mallocN_aligned(num_bytes, __STDCPP_DEFAULT_NEW_ALIGNMENT__, _id "[]"); \
} \
void *operator new[](size_t num_bytes, std::align_val_t alignment) \
{ \

View File

@ -34,6 +34,7 @@ class Renderdoc {
RENDERDOC_WindowHandle window_handle);
void end_frame_capture(RENDERDOC_DevicePointer device_handle,
RENDERDOC_WindowHandle window_handle);
void set_frame_capture_title(const char *capture_title);
private:
/**
@ -44,4 +45,4 @@ class Renderdoc {
bool check_loaded();
void load();
};
} // namespace renderdoc::api
} // namespace renderdoc::api

View File

@ -33,6 +33,14 @@ void Renderdoc::end_frame_capture(RENDERDOC_DevicePointer device_handle,
renderdoc_api_->EndFrameCapture(device_handle, window_handle);
}
void Renderdoc::set_frame_capture_title(const char *title)
{
if (!check_loaded()) {
return;
}
renderdoc_api_->SetCaptureTitle(title);
}
bool Renderdoc::check_loaded()
{
switch (state_) {
@ -64,4 +72,4 @@ void Renderdoc::load()
#endif
}
} // namespace renderdoc::api
} // namespace renderdoc::api

1
lib/linux_x64 Submodule

@ -0,0 +1 @@
Subproject commit 014844518a90dd96685f6f3f114b9ce6b2522e43

1
lib/macos_arm64 Submodule

@ -0,0 +1 @@
Subproject commit 9e78c2e377a75ab8fa33430d8e6567f34b2b05b2

1
lib/macos_x64 Submodule

@ -0,0 +1 @@
Subproject commit 0a6d9ad4c6effa4227a9c0a324c7b588c1f9f71b

1
lib/windows_x64 Submodule

@ -0,0 +1 @@
Subproject commit 19b2b87f5ef0d8caa39e0882fbf832052974b785

View File

@ -56,11 +56,6 @@ if "%BUILD_VS_YEAR%" == "" (
)
)
if "%SVN_FIX%" == "1" (
call "%BLENDER_DIR%\build_files\windows\svn_fix.cmd"
goto EOF
)
if "%BUILD_UPDATE%" == "1" (
REM First see if the SVN libs are there and check them out if they are not.
call "%BLENDER_DIR%\build_files\windows\check_libraries.cmd"
@ -70,7 +65,7 @@ if "%BUILD_UPDATE%" == "1" (
REM running tends to be problematic. The python script that update_sources
REM calls later on may still try to switch branches and run into trouble,
REM but for *most* people this will side step the problem.
call "%BLENDER_DIR%\build_files\windows\svn_update.cmd"
call "%BLENDER_DIR%\build_files\windows\lib_update.cmd"
)
REM Finally call the python script shared between all platforms that updates git
REM and does any other SVN work like update the tests or branch switches

@ -0,0 +1 @@
Subproject commit 3a36a5abc7b8866fe1e4d23ddea9aed1ff01c80d

View File

@ -57,7 +57,7 @@ if not os.path.exists(blender_bin):
blender_bin = blender_app_path
icons_blend = (
os.path.join(ROOTDIR, "..", "lib", "resources", "icon_geom.blend"),
os.path.join(ROOTDIR, "release", "datafiles", "assets", "icons", "toolbar.blend"),
)

View File

@ -450,7 +450,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
** AutoPackage; version 1.0 -- http://autopackage.org
BinReloc - a library for creating relocatable executables
Written by: Hongli Lai <h.lai@chello.nl>
** LZMA SDK; version 5.2.5 -- https://www.7-zip.org/sdk.html
** LZMA SDK; version 23.01 -- https://www.7-zip.org/sdk.html
LZMA SDK: Public Domain
Creative Commons Legal Code
@ -3462,7 +3462,7 @@ SOFTWARE.
------
** {fmt}; version 10.0.0 -- https://github.com/fmtlib/fmt
** {fmt}; version 10.1.1 -- https://github.com/fmtlib/fmt
Copyright (c) 2012 - present, Victor Zverovich and {fmt} contributors
** Brotli; version 1.0.9 -- https://github.com/google/brotli
Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.

View File

@ -1312,24 +1312,6 @@ class RenderEngine(StructRNA, metaclass=RNAMeta):
__slots__ = ()
class UserExtensionRepo(StructRNA):
__slots__ = ()
@property
def directory(self):
"""Return ``directory`` or a default path derived from the users scripts path."""
if self.use_custom_directory:
return self.custom_directory
import bpy
import os
# TODO: this should eventually be accessed via `bpy.utils.user_resource('EXTENSIONS')`
# which points to the same location (by default).
if (path := bpy.utils.resource_path('USER')):
return os.path.join(path, "extensions", self.module)
# Unlikely this is ever encountered.
return ""
class HydraRenderEngine(RenderEngine):
__slots__ = ()

View File

@ -427,9 +427,9 @@ class PREFERENCES_OT_keyconfig_remove(Operator):
# Add-on Operators
class PREFERENCES_OT_addon_enable(Operator):
"""Enable an add-on"""
"""Turn on this extension"""
bl_idname = "preferences.addon_enable"
bl_label = "Enable Add-on"
bl_label = "Enable Extension"
module: StringProperty(
name="Module",
@ -473,9 +473,9 @@ class PREFERENCES_OT_addon_enable(Operator):
class PREFERENCES_OT_addon_disable(Operator):
"""Disable an add-on"""
"""Turn off this extension"""
bl_idname = "preferences.addon_disable"
bl_label = "Disable Add-on"
bl_label = "Disable Extension"
module: StringProperty(
name="Module",

View File

@ -666,6 +666,7 @@ class NODE_MT_category_GEO_VOLUME(Menu):
layout = self.layout
if context.preferences.experimental.use_new_volume_nodes:
layout.menu("NODE_MT_geometry_node_GEO_VOLUME_READ")
layout.menu("NODE_MT_geometry_node_volume_sample")
layout.menu("NODE_MT_geometry_node_GEO_VOLUME_WRITE")
layout.separator()
layout.menu("NODE_MT_geometry_node_GEO_VOLUME_OPERATIONS")
@ -693,6 +694,16 @@ class NODE_MT_geometry_node_GEO_VOLUME_WRITE(Menu):
node_add_menu.draw_assets_for_catalog(layout, "Volume/Write")
class NODE_MT_geometry_node_volume_sample(Menu):
bl_idname = "NODE_MT_geometry_node_volume_sample"
bl_label = "Sample"
def draw(self, context):
layout = self.layout
node_add_menu.add_node_type(layout, "GeometryNodeSampleGrid")
node_add_menu.draw_assets_for_catalog(layout, "Volume/Sample")
class NODE_MT_geometry_node_GEO_VOLUME_OPERATIONS(Menu):
bl_idname = "NODE_MT_geometry_node_GEO_VOLUME_OPERATIONS"
bl_label = "Operations"
@ -785,6 +796,7 @@ classes = (
NODE_MT_category_simulation,
NODE_MT_category_GEO_VOLUME,
NODE_MT_geometry_node_GEO_VOLUME_READ,
NODE_MT_geometry_node_volume_sample,
NODE_MT_geometry_node_GEO_VOLUME_WRITE,
NODE_MT_geometry_node_GEO_VOLUME_OPERATIONS,
NODE_MT_geometry_node_GEO_VOLUME_PRIMITIVES,

View File

@ -143,6 +143,11 @@ class DATA_PT_grease_pencil_layer_relations(LayerDataButtonsPanel, Panel):
row = layout.row(align=True)
row.prop_search(layer, "parent_bone", layer.parent.data, "bones", text="Bone")
layout.separator()
col = layout.row(align=True)
col.prop(layer, "pass_index")
classes = (
DATA_PT_context_grease_pencil,

View File

@ -196,6 +196,7 @@ class OBJECT_MT_modifier_add_deform(ModifierAddMenu, Menu):
if ob_type == 'VOLUME':
self.operator_modifier_add(layout, 'VOLUME_DISPLACE')
if ob_type == 'GREASEPENCIL':
self.operator_modifier_add(layout, 'GREASE_PENCIL_HOOK')
self.operator_modifier_add(layout, 'GREASE_PENCIL_LATTICE')
self.operator_modifier_add(layout, 'GREASE_PENCIL_NOISE')
self.operator_modifier_add(layout, 'GREASE_PENCIL_OFFSET')

View File

@ -606,6 +606,8 @@ class GreasePencilMaterialsPanel:
if is_grease_pencil_version3 and ob.mode == 'EDIT':
row = layout.row(align=True)
row.operator("grease_pencil.stroke_material_set", text="Assign")
row.operator("grease_pencil.material_select", text="Select").deselect = False
row.operator("grease_pencil.material_select", text="Deselect").deselect = True
elif not is_grease_pencil_version3 and ob.data.use_stroke_edit_mode:
row = layout.row(align=True)
row.operator("gpencil.stroke_change_color", text="Assign")

View File

@ -106,7 +106,7 @@ class PHYSICS_PT_add(PhysicButtonsPanel, Panel):
)
# cache-type can be 'PSYS' 'HAIR' 'FLUID' etc.
# cache-type can be 'PSYS' 'HAIR' etc. ('FLUID' uses its own cache)
def point_cache_ui(self, cache, enabled, cachetype):
layout = self.layout
@ -130,12 +130,8 @@ def point_cache_ui(self, cache, enabled, cachetype):
col.operator("ptcache.add", icon='ADD', text="")
col.operator("ptcache.remove", icon='REMOVE', text="")
if cachetype in {'PSYS', 'HAIR', 'FLUID'}:
if cachetype in {'PSYS', 'HAIR'}:
col = layout.column()
if cachetype == 'FLUID':
col.prop(cache, "use_library_path", text="Use Library Path")
col.prop(cache, "use_external")
if cache.use_external:
@ -149,14 +145,14 @@ def point_cache_ui(self, cache, enabled, cachetype):
col.alignment = 'RIGHT'
col.label(text=cache_info)
else:
if cachetype in {'FLUID', 'DYNAMIC_PAINT'}:
if cachetype == 'DYNAMIC_PAINT':
if not is_saved:
col = layout.column(align=True)
col.alignment = 'RIGHT'
col.label(text="Cache is disabled until the file is saved")
layout.enabled = False
if not cache.use_external or cachetype == 'FLUID':
if not cache.use_external:
col = layout.column(align=True)
if cachetype not in {'PSYS', 'DYNAMIC_PAINT'}:
@ -164,18 +160,18 @@ def point_cache_ui(self, cache, enabled, cachetype):
col.prop(cache, "frame_start", text="Simulation Start")
col.prop(cache, "frame_end")
if cachetype not in {'FLUID', 'CLOTH', 'DYNAMIC_PAINT', 'RIGID_BODY'}:
if cachetype not in {'CLOTH', 'DYNAMIC_PAINT', 'RIGID_BODY'}:
col.prop(cache, "frame_step")
cache_info = cache.info
if cachetype != 'FLUID' and cache_info: # avoid empty space.
if cache_info: # avoid empty space.
col = layout.column(align=True)
col.alignment = 'RIGHT'
col.label(text=cache_info)
can_bake = True
if cachetype not in {'FLUID', 'DYNAMIC_PAINT', 'RIGID_BODY'}:
if cachetype not in {'DYNAMIC_PAINT', 'RIGID_BODY'}:
if not is_saved:
col = layout.column(align=True)
col.alignment = 'RIGHT'

View File

@ -1597,7 +1597,7 @@ class USERPREF_UL_asset_libraries(UIList):
class USERPREF_UL_extension_repos(UIList):
def draw_item(self, _context, layout, _data, item, icon, _active_data, _active_propname, _index):
repo = item
icon = 'WORLD' if repo.use_remote_path else 'DISK_DRIVE'
icon = 'NETWORK_DRIVE' if repo.use_remote_path else 'DISK_DRIVE'
if self.layout_type in {'DEFAULT', 'COMPACT'}:
layout.prop(repo, "name", text="", icon=icon, emboss=False)
elif self.layout_type == 'GRID':
@ -1614,6 +1614,24 @@ class USERPREF_UL_extension_repos(UIList):
layout.prop(repo, "enabled", text="", emboss=False, icon='CHECKBOX_HLT' if repo.enabled else 'CHECKBOX_DEHLT')
def filter_items(self, _context, data, propname):
# Repositories has no index, converting to a list.
items = list(getattr(data, propname))
flags = [self.bitflag_filter_item] * len(items)
indices = [None] * len(items)
for index, orig_index in enumerate(sorted(
range(len(items)),
key=lambda i: (
items[i].use_remote_path is False,
items[i].name.lower(),
)
)):
indices[orig_index] = index
return flags, indices
# -----------------------------------------------------------------------------
# Save/Load Panels
@ -2103,9 +2121,13 @@ class USERPREF_PT_extensions_repos(Panel):
if active_repo.use_custom_directory:
if active_repo.custom_directory == "":
row.alert = True
row.prop(active_repo, "custom_directory", text="")
else:
row.active = False
row.prop(active_repo, "custom_directory", text="")
# Show the read-only directory property.
# Apart from being consistent with the custom directory UI,
# prefer a read-only property over a label because this is not necessarily
# valid UTF-8 which will raise a Python exception when passed in as text.
row.prop(active_repo, "directory", text="")
layout_panel.separator()

View File

@ -7,14 +7,6 @@ if(WITH_LEGACY_OPENGL)
endif()
if(WITH_CLANG_TIDY AND NOT MSVC)
if(NOT CMAKE_C_COMPILER_ID MATCHES "Clang")
message(WARNING "Currently Clang-Tidy might fail with GCC toolchain, switch to Clang toolchain if that happens")
if(COMMAND target_precompile_headers)
message(STATUS "Clang-Tidy and GCC precompiled headers are incompatible, disabling precompiled headers")
set(CMAKE_DISABLE_PRECOMPILE_HEADERS ON)
endif()
endif()
find_package(ClangTidy REQUIRED)
set(CMAKE_C_CLANG_TIDY
${CLANG_TIDY_EXECUTABLE};--extra-arg=-Wno-error=unknown-warning-option)

View File

@ -183,7 +183,7 @@ class AssetCatalogService {
*/
void update_catalog_path(CatalogID catalog_id, const AssetCatalogPath &new_catalog_path);
AssetCatalogTree *get_catalog_tree() const;
const AssetCatalogTree *get_catalog_tree() const;
/** Return true only if there are no catalogs known. */
bool is_empty() const;

View File

@ -28,7 +28,7 @@ class AssetCatalogTreeItem {
/** Container for child items. Uses a #std::map to keep items ordered by their name (i.e. their
* last catalog component). */
using ChildMap = std::map<std::string, AssetCatalogTreeItem>;
using ItemIterFn = FunctionRef<void(AssetCatalogTreeItem &)>;
using ItemIterFn = FunctionRef<void(const AssetCatalogTreeItem &)>;
private:
/** Child tree items, ordered by their names. */
@ -65,10 +65,10 @@ class AssetCatalogTreeItem {
/** Iterate over children calling \a callback for each of them, but do not recurse into their
* children. */
void foreach_child(ItemIterFn callback);
void foreach_child(ItemIterFn callback) const;
private:
static void foreach_item_recursive(ChildMap &children_, ItemIterFn callback);
static void foreach_item_recursive(const ChildMap &children_, ItemIterFn callback);
};
class AssetCatalogTree {
@ -82,15 +82,15 @@ class AssetCatalogTree {
/** Ensure an item representing \a path is in the tree, adding it if necessary. */
void insert_item(const AssetCatalog &catalog);
void foreach_item(ItemIterFn callback);
void foreach_item(ItemIterFn callback) const;
/** Iterate over root items calling \a callback for each of them, but do not recurse into their
* children. */
void foreach_root_item(ItemIterFn callback);
void foreach_root_item(ItemIterFn callback) const;
bool is_empty() const;
AssetCatalogTreeItem *find_item(const AssetCatalogPath &path);
AssetCatalogTreeItem *find_root_item(const AssetCatalogPath &path);
const AssetCatalogTreeItem *find_item(const AssetCatalogPath &path) const;
const AssetCatalogTreeItem *find_root_item(const AssetCatalogPath &path) const;
};
} // namespace blender::asset_system

View File

@ -179,6 +179,7 @@ class AssetLibrary {
Vector<AssetLibraryReference> all_valid_asset_library_refs();
AssetLibraryReference all_library_reference();
void all_library_reload_catalogs_if_dirty();
} // namespace blender::asset_system

View File

@ -238,7 +238,7 @@ void AssetCatalogService::prune_catalogs_by_path(const AssetCatalogPath &path)
}
this->rebuild_tree();
AssetLibraryService::get()->rebuild_all_library();
AssetLibraryService::get()->tag_all_library_catalogs_dirty();
}
void AssetCatalogService::prune_catalogs_by_id(const CatalogID catalog_id)
@ -273,7 +273,7 @@ void AssetCatalogService::update_catalog_path(const CatalogID catalog_id,
}
this->rebuild_tree();
AssetLibraryService::get()->rebuild_all_library();
AssetLibraryService::get()->tag_all_library_catalogs_dirty();
}
AssetCatalog *AssetCatalogService::create_catalog(const AssetCatalogPath &catalog_path)
@ -299,8 +299,7 @@ AssetCatalog *AssetCatalogService::create_catalog(const AssetCatalogPath &catalo
BLI_assert_msg(catalog_tree_, "An Asset Catalog tree should always exist.");
catalog_tree_->insert_item(*catalog_ptr);
AssetLibraryService::get()->rebuild_all_library();
AssetLibraryService::get()->tag_all_library_catalogs_dirty();
return catalog_ptr;
}
@ -575,7 +574,7 @@ std::unique_ptr<AssetCatalogDefinitionFile> AssetCatalogService::construct_cdf_i
return cdf;
}
AssetCatalogTree *AssetCatalogService::get_catalog_tree() const
const AssetCatalogTree *AssetCatalogService::get_catalog_tree() const
{
return catalog_tree_.get();
}
@ -655,7 +654,7 @@ void AssetCatalogService::undo()
redo_snapshots_.append(std::move(catalog_collection_));
catalog_collection_ = undo_snapshots_.pop_last();
this->rebuild_tree();
AssetLibraryService::get()->rebuild_all_library();
AssetLibraryService::get()->tag_all_library_catalogs_dirty();
}
void AssetCatalogService::redo()
@ -666,7 +665,7 @@ void AssetCatalogService::redo()
undo_snapshots_.append(std::move(catalog_collection_));
catalog_collection_ = redo_snapshots_.pop_last();
this->rebuild_tree();
AssetLibraryService::get()->rebuild_all_library();
AssetLibraryService::get()->tag_all_library_catalogs_dirty();
}
void AssetCatalogService::undo_push()

View File

@ -60,7 +60,7 @@ bool AssetCatalogTreeItem::has_children() const
return !children_.empty();
}
void AssetCatalogTreeItem::foreach_item_recursive(AssetCatalogTreeItem::ChildMap &children,
void AssetCatalogTreeItem::foreach_item_recursive(const AssetCatalogTreeItem::ChildMap &children,
const ItemIterFn callback)
{
for (auto &[key, item] : children) {
@ -69,7 +69,7 @@ void AssetCatalogTreeItem::foreach_item_recursive(AssetCatalogTreeItem::ChildMap
}
}
void AssetCatalogTreeItem::foreach_child(const ItemIterFn callback)
void AssetCatalogTreeItem::foreach_child(const ItemIterFn callback) const
{
for (auto &[key, item] : children_) {
callback(item);
@ -115,12 +115,12 @@ void AssetCatalogTree::insert_item(const AssetCatalog &catalog)
});
}
void AssetCatalogTree::foreach_item(AssetCatalogTreeItem::ItemIterFn callback)
void AssetCatalogTree::foreach_item(AssetCatalogTreeItem::ItemIterFn callback) const
{
AssetCatalogTreeItem::foreach_item_recursive(root_items_, callback);
}
void AssetCatalogTree::foreach_root_item(const ItemIterFn callback)
void AssetCatalogTree::foreach_root_item(const ItemIterFn callback) const
{
for (auto &[key, item] : root_items_) {
callback(item);
@ -132,10 +132,10 @@ bool AssetCatalogTree::is_empty() const
return root_items_.empty();
}
AssetCatalogTreeItem *AssetCatalogTree::find_item(const AssetCatalogPath &path)
const AssetCatalogTreeItem *AssetCatalogTree::find_item(const AssetCatalogPath &path) const
{
AssetCatalogTreeItem *result = nullptr;
this->foreach_item([&](AssetCatalogTreeItem &item) {
const AssetCatalogTreeItem *result = nullptr;
this->foreach_item([&](const AssetCatalogTreeItem &item) {
if (result) {
/* There is no way to stop iteration. */
return;
@ -147,10 +147,10 @@ AssetCatalogTreeItem *AssetCatalogTree::find_item(const AssetCatalogPath &path)
return result;
}
AssetCatalogTreeItem *AssetCatalogTree::find_root_item(const AssetCatalogPath &path)
const AssetCatalogTreeItem *AssetCatalogTree::find_root_item(const AssetCatalogPath &path) const
{
AssetCatalogTreeItem *result = nullptr;
this->foreach_root_item([&](AssetCatalogTreeItem &item) {
const AssetCatalogTreeItem *result = nullptr;
this->foreach_root_item([&](const AssetCatalogTreeItem &item) {
if (result) {
/* There is no way to stop iteration. */
return;

View File

@ -340,4 +340,10 @@ AssetLibraryReference all_library_reference()
return all_library_ref;
}
void all_library_reload_catalogs_if_dirty()
{
AssetLibraryService *service = AssetLibraryService::get();
service->reload_all_library_catalogs_if_dirty();
}
} // namespace blender::asset_system

View File

@ -57,7 +57,19 @@ void AllAssetLibrary::rebuild_catalogs_from_nested(const bool reload_nested_cata
false);
new_catalog_service->rebuild_tree();
this->catalog_service = std::move(new_catalog_service);
catalogs_dirty_ = false;
}
void AllAssetLibrary::tag_catalogs_dirty()
{
catalogs_dirty_ = true;
}
bool AllAssetLibrary::is_catalogs_dirty() const
{
return catalogs_dirty_;
}
void AllAssetLibrary::refresh_catalogs()

View File

@ -13,6 +13,8 @@
namespace blender::asset_system {
class AllAssetLibrary : public AssetLibrary {
bool catalogs_dirty_ = true;
public:
AllAssetLibrary();
@ -26,6 +28,9 @@ class AllAssetLibrary : public AssetLibrary {
* merge them into the in-memory representations.
*/
void rebuild_catalogs_from_nested(bool reload_nested_catalogs);
void tag_catalogs_dirty();
bool is_catalogs_dirty() const;
};
} // namespace blender::asset_system

View File

@ -187,15 +187,32 @@ AssetLibrary *AssetLibraryService::get_asset_library_current_file()
return lib;
}
void AssetLibraryService::rebuild_all_library()
void AssetLibraryService::tag_all_library_catalogs_dirty()
{
if (all_library_) {
all_library_->rebuild_catalogs_from_nested(false);
all_library_->tag_catalogs_dirty();
}
}
void AssetLibraryService::reload_all_library_catalogs_if_dirty()
{
if (all_library_ && all_library_->is_catalogs_dirty()) {
/* Don't reload catalogs from nested libraries from disk, just reflect their currently known
* state in the "All" library. Loading catalog changes from disk is only done with a
* #AS_asset_library_load()/#AssetLibraryService:get_asset_library() call. */
const bool reload_nested_catalogs = false;
all_library_->rebuild_catalogs_from_nested(reload_nested_catalogs);
}
}
AssetLibrary *AssetLibraryService::get_asset_library_all(const Main *bmain)
{
if (all_library_) {
CLOG_INFO(&LOG, 2, "get all lib (cached)");
all_library_->refresh_catalogs();
return all_library_.get();
}
/* (Re-)load all other asset libraries. */
for (AssetLibraryReference &library_ref : all_valid_asset_library_refs()) {
/* Skip self :) */
@ -207,12 +224,6 @@ AssetLibrary *AssetLibraryService::get_asset_library_all(const Main *bmain)
this->get_asset_library(bmain, library_ref);
}
if (all_library_) {
CLOG_INFO(&LOG, 2, "get all lib (cached)");
all_library_->refresh_catalogs();
return all_library_.get();
}
CLOG_INFO(&LOG, 2, "get all lib (loaded)");
all_library_ = std::make_unique<AllAssetLibrary>();

View File

@ -88,7 +88,13 @@ class AssetLibraryService {
AssetLibrary *get_asset_library_current_file();
/** Get the "All" asset library, which loads all others and merges them into one. */
AssetLibrary *get_asset_library_all(const Main *bmain);
void rebuild_all_library();
/**
* Tag the "All" asset library as needing to reload catalogs. This should be called when catalog
* data of other asset libraries changes. Note that changes to the catalog definition file on
* disk don't ever affect this "dirty" flag. It only reflects changes from this Blender session.
*/
void tag_all_library_catalogs_dirty();
void reload_all_library_catalogs_if_dirty();
/**
* Return the start position of the last blend-file extension in given path,

View File

@ -19,7 +19,7 @@
namespace blender::asset_system::tests {
/* UUIDs from lib/tests/asset_library/blender_assets.cats.txt */
/* UUIDs from tests/data/asset_library/blender_assets.cats.txt */
const bUUID UUID_ID_WITHOUT_PATH("e34dd2c5-5d2e-4668-9794-1db5de2a4f71");
const bUUID UUID_POSES_ELLIE("df60e1f6-2259-475b-93d9-69a1b4a8db78");
const bUUID UUID_POSES_ELLIE_WHITESPACE("b06132f6-5687-4751-a6dd-392740eb3c46");
@ -31,7 +31,7 @@ const bUUID UUID_POSES_RUZENA_FACE("82162c1f-06cc-4d91-a9bf-4f72c104e348");
const bUUID UUID_WITHOUT_SIMPLENAME("d7916a31-6ca9-4909-955f-182ca2b81fa3");
const bUUID UUID_ANOTHER_RUZENA("00000000-d9fa-4b91-b704-e6af1f1339ef");
/* UUIDs from lib/tests/asset_library/modified_assets.cats.txt */
/* UUIDs from tests/data/asset_library/modified_assets.cats.txt */
const bUUID UUID_AGENT_47("c5744ba5-43f5-4f73-8e52-010ad4a61b34");
/* Subclass that adds accessors such that protected fields can be used in tests. */
@ -566,8 +566,8 @@ TEST_F(AssetCatalogTest, delete_catalog_leaf)
"path/without/simplename",
};
AssetCatalogTree *tree = service.get_catalog_tree();
AssetCatalogTreeTestFunctions::expect_tree_items(tree, expected_paths);
const AssetCatalogTree *tree = service.get_catalog_tree();
AssetCatalogTreeTestFunctions::expect_tree_items(*tree, expected_paths);
}
TEST_F(AssetCatalogTest, delete_catalog_parent_by_id)
@ -620,8 +620,8 @@ TEST_F(AssetCatalogTest, delete_catalog_parent_by_path)
"path/without/simplename",
};
AssetCatalogTree *tree = service.get_catalog_tree();
AssetCatalogTreeTestFunctions::expect_tree_items(tree, expected_paths);
const AssetCatalogTree *tree = service.get_catalog_tree();
AssetCatalogTreeTestFunctions::expect_tree_items(*tree, expected_paths);
}
TEST_F(AssetCatalogTest, delete_catalog_write_to_disk)

View File

@ -22,7 +22,7 @@ TEST_F(AssetCatalogTreeTest, insert_item_into_tree)
std::unique_ptr<AssetCatalog> catalog_empty_path = AssetCatalog::from_path("");
tree.insert_item(*catalog_empty_path);
expect_tree_items(&tree, {});
expect_tree_items(tree, {});
}
{
@ -30,12 +30,12 @@ TEST_F(AssetCatalogTreeTest, insert_item_into_tree)
std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path("item");
tree.insert_item(*catalog);
expect_tree_items(&tree, {"item"});
expect_tree_items(tree, {"item"});
/* Insert child after parent already exists. */
std::unique_ptr<AssetCatalog> child_catalog = AssetCatalog::from_path("item/child");
tree.insert_item(*catalog);
expect_tree_items(&tree, {"item", "item/child"});
expect_tree_items(tree, {"item", "item/child"});
std::vector<AssetCatalogPath> expected_paths;
@ -45,7 +45,7 @@ TEST_F(AssetCatalogTreeTest, insert_item_into_tree)
tree.insert_item(*catalog);
expected_paths = {
"item", "item/child", "item/child/grandchild", "item/child/grandchild/grandgrandchild"};
expect_tree_items(&tree, expected_paths);
expect_tree_items(tree, expected_paths);
std::unique_ptr<AssetCatalog> root_level_catalog = AssetCatalog::from_path("root level");
tree.insert_item(*catalog);
@ -54,7 +54,7 @@ TEST_F(AssetCatalogTreeTest, insert_item_into_tree)
"item/child/grandchild",
"item/child/grandchild/grandgrandchild",
"root level"};
expect_tree_items(&tree, expected_paths);
expect_tree_items(tree, expected_paths);
}
{
@ -62,7 +62,7 @@ TEST_F(AssetCatalogTreeTest, insert_item_into_tree)
std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path("item/child");
tree.insert_item(*catalog);
expect_tree_items(&tree, {"item", "item/child"});
expect_tree_items(tree, {"item", "item/child"});
}
{
@ -70,7 +70,7 @@ TEST_F(AssetCatalogTreeTest, insert_item_into_tree)
std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path("white space");
tree.insert_item(*catalog);
expect_tree_items(&tree, {"white space"});
expect_tree_items(tree, {"white space"});
}
{
@ -78,7 +78,7 @@ TEST_F(AssetCatalogTreeTest, insert_item_into_tree)
std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path("/item/white space");
tree.insert_item(*catalog);
expect_tree_items(&tree, {"item", "item/white space"});
expect_tree_items(tree, {"item", "item/white space"});
}
{
@ -86,11 +86,11 @@ TEST_F(AssetCatalogTreeTest, insert_item_into_tree)
std::unique_ptr<AssetCatalog> catalog_unicode_path = AssetCatalog::from_path("Ružena");
tree.insert_item(*catalog_unicode_path);
expect_tree_items(&tree, {"Ružena"});
expect_tree_items(tree, {"Ružena"});
catalog_unicode_path = AssetCatalog::from_path("Ružena/Ružena");
tree.insert_item(*catalog_unicode_path);
expect_tree_items(&tree, {"Ružena", "Ružena/Ružena"});
expect_tree_items(tree, {"Ružena", "Ružena/Ružena"});
}
}
@ -117,8 +117,8 @@ TEST_F(AssetCatalogTreeTest, load_single_file_into_tree)
"path/without/simplename", /* From CDF. */
};
AssetCatalogTree *tree = service.get_catalog_tree();
expect_tree_items(tree, expected_paths);
const AssetCatalogTree *tree = service.get_catalog_tree();
expect_tree_items(*tree, expected_paths);
}
TEST_F(AssetCatalogTreeTest, foreach_in_tree)
@ -127,13 +127,13 @@ TEST_F(AssetCatalogTreeTest, foreach_in_tree)
AssetCatalogTree tree{};
const std::vector<AssetCatalogPath> no_catalogs{};
expect_tree_items(&tree, no_catalogs);
expect_tree_root_items(&tree, no_catalogs);
expect_tree_items(tree, no_catalogs);
expect_tree_root_items(tree, no_catalogs);
/* Need a root item to check child items. */
std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path("something");
tree.insert_item(*catalog);
tree.foreach_root_item([&no_catalogs](AssetCatalogTreeItem &item) {
expect_tree_item_child_items(&item, no_catalogs);
tree.foreach_root_item([&no_catalogs](const AssetCatalogTreeItem &item) {
expect_tree_item_child_items(item, no_catalogs);
});
}
@ -141,8 +141,8 @@ TEST_F(AssetCatalogTreeTest, foreach_in_tree)
service.load_from_disk(asset_library_root_ + SEP_STR + "blender_assets.cats.txt");
std::vector<AssetCatalogPath> expected_root_items{{"character", "path"}};
AssetCatalogTree *tree = service.get_catalog_tree();
expect_tree_root_items(tree, expected_root_items);
const AssetCatalogTree *tree = service.get_catalog_tree();
expect_tree_root_items(*tree, expected_root_items);
/* Test if the direct children of the root item are what's expected. */
std::vector<std::vector<AssetCatalogPath>> expected_root_child_items = {
@ -152,8 +152,8 @@ TEST_F(AssetCatalogTreeTest, foreach_in_tree)
{"path/without"},
};
int i = 0;
tree->foreach_root_item([&expected_root_child_items, &i](AssetCatalogTreeItem &item) {
expect_tree_item_child_items(&item, expected_root_child_items[i]);
tree->foreach_root_item([&expected_root_child_items, &i](const AssetCatalogTreeItem &item) {
expect_tree_item_child_items(item, expected_root_child_items[i]);
i++;
});
}

View File

@ -99,7 +99,7 @@ class AssetCatalogTreeTestFunctions {
* Recursively iterate over all tree items using #AssetCatalogTree::foreach_item() and check if
* the items map exactly to \a expected_paths.
*/
static void expect_tree_items(AssetCatalogTree *tree,
static void expect_tree_items(const AssetCatalogTree &tree,
const std::vector<AssetCatalogPath> &expected_paths);
/**
@ -107,7 +107,7 @@ class AssetCatalogTreeTestFunctions {
* expected_paths. Similar to #assert_expected_tree_items() but calls
* #AssetCatalogTree::foreach_root_item() instead of #AssetCatalogTree::foreach_item().
*/
static void expect_tree_root_items(AssetCatalogTree *tree,
static void expect_tree_root_items(const AssetCatalogTree &tree,
const std::vector<AssetCatalogPath> &expected_paths);
/**
@ -115,7 +115,7 @@ class AssetCatalogTreeTestFunctions {
* expected_paths. Similar to #assert_expected_tree_items() but calls
* #AssetCatalogTreeItem::foreach_child() instead of #AssetCatalogTree::foreach_item().
*/
static void expect_tree_item_child_items(AssetCatalogTreeItem *parent_item,
static void expect_tree_item_child_items(const AssetCatalogTreeItem &parent_item,
const std::vector<AssetCatalogPath> &expected_paths);
};
@ -139,10 +139,10 @@ static inline void compare_item_with_path(const AssetCatalogPath &expected_path,
}
inline void AssetCatalogTreeTestFunctions::expect_tree_items(
AssetCatalogTree *tree, const std::vector<AssetCatalogPath> &expected_paths)
const AssetCatalogTree &tree, const std::vector<AssetCatalogPath> &expected_paths)
{
int i = 0;
tree->foreach_item([&](const AssetCatalogTreeItem &actual_item) {
tree.foreach_item([&](const AssetCatalogTreeItem &actual_item) {
ASSERT_LT(i, expected_paths.size())
<< "More catalogs in tree than expected; did not expect " << actual_item.catalog_path();
compare_item_with_path(expected_paths[i], actual_item);
@ -151,10 +151,10 @@ inline void AssetCatalogTreeTestFunctions::expect_tree_items(
}
inline void AssetCatalogTreeTestFunctions::expect_tree_root_items(
AssetCatalogTree *tree, const std::vector<AssetCatalogPath> &expected_paths)
const AssetCatalogTree &tree, const std::vector<AssetCatalogPath> &expected_paths)
{
int i = 0;
tree->foreach_root_item([&](const AssetCatalogTreeItem &actual_item) {
tree.foreach_root_item([&](const AssetCatalogTreeItem &actual_item) {
ASSERT_LT(i, expected_paths.size())
<< "More catalogs in tree root than expected; did not expect "
<< actual_item.catalog_path();
@ -164,10 +164,10 @@ inline void AssetCatalogTreeTestFunctions::expect_tree_root_items(
}
inline void AssetCatalogTreeTestFunctions::expect_tree_item_child_items(
AssetCatalogTreeItem *parent_item, const std::vector<AssetCatalogPath> &expected_paths)
const AssetCatalogTreeItem &parent_item, const std::vector<AssetCatalogPath> &expected_paths)
{
int i = 0;
parent_item->foreach_child([&](const AssetCatalogTreeItem &actual_item) {
parent_item.foreach_child([&](const AssetCatalogTreeItem &actual_item) {
ASSERT_LT(i, expected_paths.size())
<< "More catalogs in tree item than expected; did not expect "
<< actual_item.catalog_path();

View File

@ -9,7 +9,9 @@
#pragma once
#include "BLI_compiler_attrs.h"
#include "BLI_string_ref.hh"
#include "BLI_sys_types.h"
#include "BLI_vector.hh"
/* Name of sub-directory inside #BLENDER_DATAFILES that contains font files. */
#define BLF_DATAFILES_FONTS_DIR "fonts"
@ -254,6 +256,10 @@ void BLF_rotation(int fontid, float angle);
void BLF_clipping(int fontid, int xmin, int ymin, int xmax, int ymax);
void BLF_wordwrap(int fontid, int wrap_width);
blender::Vector<blender::StringRef> BLF_string_wrap(int fontid,
blender::StringRef str,
const int max_pixel_width);
#if BLF_BLUR_ENABLE
void BLF_blur(int fontid, int size);
#endif

View File

@ -23,8 +23,6 @@ set(SRC
intern/blf_font_default.cc
intern/blf_glyph.cc
intern/blf_thumbs.cc
intern/blf_util.cc
BLF_api.hh
intern/blf_internal.hh
intern/blf_internal_types.hh

View File

@ -935,6 +935,17 @@ void BLF_draw_buffer(int fontid, const char *str, const size_t str_len)
BLF_draw_buffer_ex(fontid, str, str_len, nullptr);
}
blender::Vector<blender::StringRef> BLF_string_wrap(int fontid,
blender::StringRef str,
const int max_pixel_width)
{
FontBLF *font = blf_get(fontid);
if (!font) {
return {};
}
return blf_font_string_wrap(font, str, max_pixel_width);
}
char *BLF_display_name_from_file(const char *filepath)
{
/* While listing font directories this function can be called simultaneously from a greater

View File

@ -37,6 +37,7 @@
#include "BLI_string_cursor_utf8.h"
#include "BLI_string_utf8.h"
#include "BLI_threads.h"
#include "BLI_vector.hh"
#include "BLF_api.hh"
@ -1085,6 +1086,7 @@ void blf_str_offset_to_glyph_bounds(FontBLF *font,
static void blf_font_wrap_apply(FontBLF *font,
const char *str,
const size_t str_len,
const int max_pixel_width,
ResultBLF *r_info,
void (*callback)(FontBLF *font,
GlyphCacheBLF *gc,
@ -1109,7 +1111,7 @@ static void blf_font_wrap_apply(FontBLF *font,
struct WordWrapVars {
ft_pix wrap_width;
size_t start, last[2];
} wrap = {font->wrap_width != -1 ? ft_pix_from_int(font->wrap_width) : INT_MAX, 0, {0, 0}};
} wrap = {max_pixel_width != -1 ? ft_pix_from_int(max_pixel_width) : INT_MAX, 0, {0, 0}};
// printf("%s wrapping (%d, %d) `%s`:\n", __func__, str_len, strlen(str), str);
while ((i < str_len) && str[i]) {
@ -1198,7 +1200,8 @@ static void blf_font_draw__wrap_cb(FontBLF *font,
}
void blf_font_draw__wrap(FontBLF *font, const char *str, const size_t str_len, ResultBLF *r_info)
{
blf_font_wrap_apply(font, str, str_len, r_info, blf_font_draw__wrap_cb, nullptr);
blf_font_wrap_apply(
font, str, str_len, font->wrap_width, r_info, blf_font_draw__wrap_cb, nullptr);
}
/** Utility for #blf_font_boundbox__wrap. */
@ -1223,7 +1226,8 @@ void blf_font_boundbox__wrap(
box->ymin = 32000;
box->ymax = -32000;
blf_font_wrap_apply(font, str, str_len, r_info, blf_font_boundbox_wrap_cb, box);
blf_font_wrap_apply(
font, str, str_len, font->wrap_width, r_info, blf_font_boundbox_wrap_cb, box);
}
/** Utility for #blf_font_draw_buffer__wrap. */
@ -1241,7 +1245,37 @@ void blf_font_draw_buffer__wrap(FontBLF *font,
const size_t str_len,
ResultBLF *r_info)
{
blf_font_wrap_apply(font, str, str_len, r_info, blf_font_draw_buffer__wrap_cb, nullptr);
blf_font_wrap_apply(
font, str, str_len, font->wrap_width, r_info, blf_font_draw_buffer__wrap_cb, nullptr);
}
/** Wrap a blender::StringRef. */
static void blf_font_string_wrap_cb(FontBLF * /*font*/,
GlyphCacheBLF * /*gc*/,
const char *str,
const size_t str_len,
ft_pix /*pen_y*/,
void *str_list_ptr)
{
blender::Vector<blender::StringRef> *list = static_cast<blender::Vector<blender::StringRef> *>(
str_list_ptr);
blender::StringRef line(str, str + str_len);
list->append(line);
}
blender::Vector<blender::StringRef> blf_font_string_wrap(FontBLF *font,
blender::StringRef str,
int max_pixel_width)
{
blender::Vector<blender::StringRef> list;
blf_font_wrap_apply(font,
str.data(),
size_t(str.size()),
max_pixel_width,
nullptr,
blf_font_string_wrap_cb,
&list);
return list;
}
/** \} */

View File

@ -91,7 +91,7 @@ static GlyphCacheBLF *blf_glyph_cache_find(FontBLF *font)
static GlyphCacheBLF *blf_glyph_cache_new(FontBLF *font)
{
std::unique_ptr<GlyphCacheBLF> gc = std::make_unique<GlyphCacheBLF>(GlyphCacheBLF{});
std::unique_ptr<GlyphCacheBLF> gc = std::make_unique<GlyphCacheBLF>();
gc->size = font->size;
gc->bold = ((font->flags & BLF_BOLD) != 0);
@ -101,8 +101,6 @@ static GlyphCacheBLF *blf_glyph_cache_new(FontBLF *font)
gc->char_width = font->char_width;
gc->char_spacing = font->char_spacing;
memset(gc->bucket, 0, sizeof(gc->bucket));
blf_ensure_size(font);
/* Determine ideal fixed-width size for monospaced output. */
@ -146,11 +144,7 @@ void blf_glyph_cache_release(FontBLF *font)
GlyphCacheBLF::~GlyphCacheBLF()
{
for (uint i = 0; i < ARRAY_SIZE(this->bucket); i++) {
while (GlyphBLF *g = static_cast<GlyphBLF *>(BLI_pophead(&this->bucket[i]))) {
blf_glyph_free(g);
}
}
this->glyphs.clear_and_shrink();
if (this->texture) {
GPU_texture_free(this->texture);
}
@ -174,12 +168,10 @@ static GlyphBLF *blf_glyph_cache_find_glyph(const GlyphCacheBLF *gc,
uint charcode,
uint8_t subpixel)
{
GlyphBLF *g = static_cast<GlyphBLF *>(gc->bucket[blf_hash(charcode << 6 | subpixel)].first);
while (g) {
if (g->c == charcode && g->subpixel == subpixel) {
return g;
}
g = g->next;
const std::unique_ptr<GlyphBLF> *ptr = gc->glyphs.lookup_ptr_as(
GlyphCacheKey{charcode, subpixel});
if (ptr != nullptr) {
return ptr->get();
}
return nullptr;
}
@ -231,7 +223,7 @@ static GlyphBLF *blf_glyph_cache_add_glyph(FontBLF *font,
FT_UInt glyph_index,
uint8_t subpixel)
{
GlyphBLF *g = (GlyphBLF *)MEM_callocN(sizeof(GlyphBLF), "blf_glyph_get");
std::unique_ptr<GlyphBLF> g = std::make_unique<GlyphBLF>();
g->c = charcode;
g->idx = glyph_index;
g->advance_x = (ft_pix)glyph->advance.x;
@ -344,9 +336,9 @@ static GlyphBLF *blf_glyph_cache_add_glyph(FontBLF *font,
}
}
BLI_addhead(&(gc->bucket[blf_hash(g->c << 6 | subpixel)]), g);
return g;
GlyphCacheKey key = {charcode, subpixel};
gc->glyphs.add(key, std::move(g));
return gc->glyphs.lookup(key).get();
}
/** \} */
@ -1326,12 +1318,11 @@ GlyphBLF *blf_glyph_ensure_subpixel(FontBLF *font, GlyphCacheBLF *gc, GlyphBLF *
}
#endif
void blf_glyph_free(GlyphBLF *g)
GlyphBLF::~GlyphBLF()
{
if (g->bitmap) {
MEM_freeN(g->bitmap);
if (this->bitmap) {
MEM_freeN(this->bitmap);
}
MEM_freeN(g);
}
/** \} */

View File

@ -8,6 +8,9 @@
#pragma once
#include "BLI_string_ref.hh"
#include "BLI_vector.hh"
struct FontBLF;
struct GlyphBLF;
struct GlyphCacheBLF;
@ -51,8 +54,6 @@ extern struct FontBLF *global_font[BLF_MAX_FONT];
void blf_batch_draw_begin(struct FontBLF *font);
void blf_batch_draw(void);
unsigned int blf_next_p2(unsigned int x);
unsigned int blf_hash(unsigned int val);
/**
* Some font have additional file with metrics information,
* in general, the extension of the file is: `.afm` or `.pfm`
@ -96,6 +97,10 @@ void blf_font_draw__wrap(struct FontBLF *font,
size_t str_len,
struct ResultBLF *r_info);
blender::Vector<blender::StringRef> blf_font_string_wrap(FontBLF *font,
blender::StringRef str,
int max_pixel_width);
/**
* Use fixed column width, but an utf8 character may occupy multiple columns.
*/
@ -193,7 +198,6 @@ float blf_character_to_curves(FontBLF *font,
struct ListBase *nurbsbase,
const float scale);
void blf_glyph_free(struct GlyphBLF *g);
void blf_glyph_draw(
struct FontBLF *font, struct GlyphCacheBLF *gc, struct GlyphBLF *g, int x, int y);

View File

@ -10,6 +10,7 @@
#include <mutex>
#include "BLI_map.hh"
#include "BLI_vector.hh"
#include "GPU_texture.h"
@ -116,6 +117,19 @@ struct KerningCacheBLF {
int ascii_table[KERNING_CACHE_TABLE_SIZE][KERNING_CACHE_TABLE_SIZE];
};
struct GlyphCacheKey {
uint charcode;
uint8_t subpixel;
friend bool operator==(const GlyphCacheKey &a, const GlyphCacheKey &b)
{
return a.charcode == b.charcode && a.subpixel == b.subpixel;
}
uint64_t hash() const
{
return blender::get_default_hash(charcode, subpixel);
}
};
struct GlyphCacheBLF {
/** Font size. */
float size;
@ -132,7 +146,7 @@ struct GlyphCacheBLF {
int fixed_width;
/** The glyphs. */
ListBase bucket[257];
blender::Map<GlyphCacheKey, std::unique_ptr<GlyphBLF>> glyphs;
/** Texture array, to draw the glyphs. */
GPUTexture *texture;
@ -145,9 +159,6 @@ struct GlyphCacheBLF {
};
struct GlyphBLF {
GlyphBLF *next;
GlyphBLF *prev;
/** The character, as UTF-32. */
unsigned int c;
@ -192,6 +203,8 @@ struct GlyphBLF {
int pos[2];
GlyphCacheBLF *glyph_cache;
~GlyphBLF();
};
struct FontBufInfoBLF {

View File

@ -1,43 +0,0 @@
/* SPDX-FileCopyrightText: 2009 Blender Authors
*
* SPDX-License-Identifier: GPL-2.0-or-later */
/** \file
* \ingroup blf
*
* Internal utility API for BLF.
*/
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include "BLI_utildefines.h"
#include "blf_internal.hh"
uint blf_next_p2(uint x)
{
x -= 1;
x |= (x >> 16);
x |= (x >> 8);
x |= (x >> 4);
x |= (x >> 2);
x |= (x >> 1);
x += 1;
return x;
}
uint blf_hash(uint val)
{
uint key;
key = val;
key += ~(key << 16);
key ^= (key >> 5);
key += (key << 3);
key ^= (key >> 13);
key += ~(key << 9);
key ^= (key >> 17);
return key % 257;
}

View File

@ -176,7 +176,7 @@ enum {
BLENDER_SYSTEM_PYTHON = 54,
};
/* for BKE_appdir_folder_id_version only */
/** For #BKE_appdir_folder_id_version only. */
enum {
BLENDER_RESOURCE_PATH_USER = 0,
BLENDER_RESOURCE_PATH_LOCAL = 1,

View File

@ -76,6 +76,5 @@ PreviewImage *BKE_asset_metadata_preview_get_from_id(const AssetMetaData *asset_
void BKE_asset_metadata_write(BlendWriter *writer, AssetMetaData *asset_data);
void BKE_asset_metadata_read(BlendDataReader *reader, AssetMetaData *asset_data);
AssetWeakReference *BKE_asset_weak_reference_copy(AssetWeakReference *weak_ref);
void BKE_asset_weak_reference_write(BlendWriter *writer, const AssetWeakReference *weak_ref);
void BKE_asset_weak_reference_read(BlendDataReader *reader, AssetWeakReference *weak_ref);

View File

@ -29,7 +29,7 @@ extern "C" {
/* Blender file format version. */
#define BLENDER_FILE_VERSION BLENDER_VERSION
#define BLENDER_FILE_SUBVERSION 5
#define BLENDER_FILE_SUBVERSION 6
/* Minimum Blender version that supports reading file written with the current
* version. Older Blender versions will test this and cancel loading the file, showing a warning to

View File

@ -110,6 +110,7 @@ enum eCbEvent {
BKE_CB_EVT_EXTENSION_REPOS_UPDATE_POST,
BKE_CB_EVT_EXTENSION_REPOS_SYNC,
BKE_CB_EVT_EXTENSION_REPOS_UPGRADE,
BKE_CB_EVT_EXTENSION_DROP_URL,
BKE_CB_EVT_TOT,
};

View File

@ -812,4 +812,5 @@ void CustomData_debug_info_from_layers(const CustomData *data, const char *inden
namespace blender::bke {
std::optional<VolumeGridType> custom_data_type_to_volume_grid_type(eCustomDataType type);
std::optional<eCustomDataType> volume_grid_type_to_custom_data_type(VolumeGridType type);
} // namespace blender::bke

View File

@ -27,6 +27,7 @@
#include "BLI_math_matrix_types.hh"
#include "BLI_shared_cache.hh"
#include "BLI_vector.hh"
#include "BLI_virtual_array_fwd.hh"
#include "DNA_customdata_types.h"
@ -99,8 +100,7 @@ class Instances {
*/
Vector<InstanceReference> references_;
/** Transformation of the instances. */
Vector<float4x4> transforms_;
int instances_num_ = 0;
CustomData attributes_;
@ -159,8 +159,8 @@ class Instances {
Span<int> reference_handles() const;
MutableSpan<int> reference_handles_for_write();
MutableSpan<float4x4> transforms();
Span<float4x4> transforms() const;
MutableSpan<float4x4> transforms_for_write();
int instances_num() const;
int references_num() const;
@ -193,6 +193,9 @@ class Instances {
}
};
VArray<float3> instance_position_varray(const Instances &instances);
VMutableArray<float3> instance_position_varray_for_write(Instances &instances);
/* -------------------------------------------------------------------- */
/** \name #InstanceReference Inline Methods
* \{ */

View File

@ -31,9 +31,9 @@
*/
#include "BLI_compiler_attrs.h"
#include "BLI_utildefines.h"
#include "BLI_set.hh"
#include "BLI_utildefines.h"
#include "BLI_vector.hh"
#include "DNA_userdef_enums.h"
@ -662,9 +662,8 @@ bool BKE_id_is_editable(const Main *bmain, const ID *id);
/**
* Returns ordered list of data-blocks for display in the UI.
* Result is list of #LinkData of IDs that must be freed.
*/
void BKE_id_ordered_list(ListBase *ordered_lb, const ListBase *lb);
blender::Vector<ID *> BKE_id_ordered_list(const ListBase *lb);
/**
* Reorder ID in the list, before or after the "relative" ID.
*/

View File

@ -21,6 +21,7 @@
#include "DNA_ID.h"
#include "BLI_function_ref.hh"
#include "BLI_sys_types.h"
#include <array>
@ -324,15 +325,24 @@ void BKE_library_ID_test_usages(Main *bmain,
/** Parameters and result data structure for the 'unused IDs' functions below. */
struct LibQueryUnusedIDsData {
/** Process local data-blocks. */
bool do_local_ids;
bool do_local_ids = false;
/** Process linked data-blocks. */
bool do_linked_ids;
bool do_linked_ids = false;
/**
* Process all actually unused data-blocks, including these that are currently only used by
* other unused data-blocks, and 'dependency islands' of several data-blocks using each-other,
* without any external valid user.
*/
bool do_recursive;
bool do_recursive = false;
/**
* Callback filter, if defined and it returns `true`, the given `id` may be considered as unused,
* otherwise it will always be considered as used.
*
* Allows for more complex handling of which IDs should be deleted, on top of the basic
* local/linked choices.
*/
blender::FunctionRef<bool(ID *id)> filter_fn = nullptr;
/**
* Amount of detected as unused data-blocks, per type and total as the last value of the array

View File

@ -86,6 +86,12 @@ enum {
* etc.). */
ID_REMAP_DO_LIBRARY_POINTERS = 1 << 8,
/** Allow remapping of an ID opinter of a certain to another one of a different type.
*
* WARNING: Use with caution. Should only be needed in a very small amount of cases, e.g. when
* converting an ID type to another. */
ID_REMAP_ALLOW_IDTYPE_MISMATCH = 1 << 9,
/**
* Don't touch the special user counts (use when the 'old' remapped ID remains in use):
* - Do not transfer 'fake user' status from old to new ID.
@ -267,6 +273,13 @@ class IDRemapper {
*/
blender::Set<ID *> never_null_users_;
public:
/**
* In almost all cases, the original pointer and its new replacement should be of the same type.
* however, there are some rare exceptions, e.g. when converting from one ID type to another.
*/
bool allow_idtype_mismatch = false;
public:
void clear(void)
{

View File

@ -15,3 +15,9 @@ struct Library;
struct Main;
void BKE_library_filepath_set(Main *bmain, Library *lib, const char *filepath);
/**
* Rebuild the hierarchy of libraries, after e.g. deleting or relocating one, often some indirectly
* linked libraries lose their 'parent' pointer, making them wrongly directly used ones.
*/
void BKE_library_main_rebuild_hierarchy(Main *bmain);

View File

@ -1329,6 +1329,7 @@ void BKE_nodetree_remove_layer_n(struct bNodeTree *ntree, struct Scene *scene, i
#define GEO_NODE_STORE_NAMED_GRID 2122
#define GEO_NODE_SORT_ELEMENTS 2123
#define GEO_NODE_MENU_SWITCH 2124
#define GEO_NODE_SAMPLE_GRID 2125
/** \} */

View File

@ -481,6 +481,11 @@ inline const bNode *bNodeTree::group_output_node() const
return this->runtime->group_output_node;
}
inline blender::Span<bNode *> bNodeTree::group_input_nodes()
{
return this->nodes_by_type("NodeGroupInput");
}
inline blender::Span<const bNode *> bNodeTree::group_input_nodes() const
{
return this->nodes_by_type("NodeGroupInput");

View File

@ -13,6 +13,7 @@ extern "C" {
#endif
#include "BLI_compiler_attrs.h"
#include "BLI_sys_types.h"
struct UserDef;
struct bUserExtensionRepo;
@ -84,6 +85,8 @@ bUserExtensionRepo *BKE_preferences_extension_repo_add(UserDef *userdef,
const char *module,
const char *custom_dirpath);
void BKE_preferences_extension_repo_remove(UserDef *userdef, bUserExtensionRepo *repo);
bUserExtensionRepo *BKE_preferences_extension_repo_add_default(UserDef *userdef);
bUserExtensionRepo *BKE_preferences_extension_repo_add_default_user(UserDef *userdef);
void BKE_preferences_extension_repo_name_set(UserDef *userdef,
bUserExtensionRepo *repo,
@ -93,13 +96,27 @@ void BKE_preferences_extension_repo_module_set(UserDef *userdef,
const char *module);
void BKE_preferences_extension_repo_custom_dirpath_set(bUserExtensionRepo *repo, const char *path);
void BKE_preferences_extension_repo_dirpath_get(const bUserExtensionRepo *repo,
char *dirpath,
int dirpath_maxncpy);
size_t BKE_preferences_extension_repo_dirpath_get(const bUserExtensionRepo *repo,
char *dirpath,
int dirpath_maxncpy);
bUserExtensionRepo *BKE_preferences_extension_repo_find_index(const UserDef *userdef, int index);
bUserExtensionRepo *BKE_preferences_extension_repo_find_by_module(const UserDef *userdef,
const char *module);
/**
* Using a full URL/remote path to find a repository that shares its prefix.
*/
bUserExtensionRepo *BKE_preferences_extension_repo_find_by_remote_path_prefix(
const UserDef *userdef, const char *remote_path_full, const bool only_enabled);
/**
* Skip the `https` or `http` part of a URL `https://`, return zero if none is found.
*/
int BKE_preferences_extension_repo_remote_scheme_end(const char *url);
/**
* Set a name based on a URL, e.g. `https://www.example.com/path` -> `www.example.com`.
*/
void BKE_preferences_extension_remote_to_name(const char *remote_path, char name[64]);
int BKE_preferences_extension_repo_get_index(const UserDef *userdef,
const bUserExtensionRepo *repo);

View File

@ -41,24 +41,24 @@ struct ReportList;
struct Scene;
struct SwsContext;
int BKE_ffmpeg_start(void *context_v,
const Scene *scene,
RenderData *rd,
int rectx,
int recty,
ReportList *reports,
bool preview,
const char *suffix);
void BKE_ffmpeg_end(void *context_v);
int BKE_ffmpeg_append(void *context_v,
struct ImBuf;
bool BKE_ffmpeg_start(void *context_v,
const Scene *scene,
RenderData *rd,
int start_frame,
int frame,
int *pixels,
int rectx,
int recty,
const char *suffix,
ReportList *reports);
ReportList *reports,
bool preview,
const char *suffix);
void BKE_ffmpeg_end(void *context_v);
bool BKE_ffmpeg_append(void *context_v,
RenderData *rd,
int start_frame,
int frame,
const ImBuf *image,
const char *suffix,
ReportList *reports);
void BKE_ffmpeg_filepath_get(char filepath[/*FILE_MAX*/ 1024],
const RenderData *rd,
bool preview,

View File

@ -10,28 +10,27 @@
/* generic blender movie support, could move to own module */
struct ImBuf;
struct RenderData;
struct ReportList;
struct Scene;
struct bMovieHandle {
int (*start_movie)(void *context_v,
const Scene *scene,
RenderData *rd,
int rectx,
int recty,
ReportList *reports,
bool preview,
const char *suffix);
int (*append_movie)(void *context_v,
bool (*start_movie)(void *context_v,
const Scene *scene,
RenderData *rd,
int start_frame,
int frame,
int *pixels,
int rectx,
int recty,
const char *suffix,
ReportList *reports);
ReportList *reports,
bool preview,
const char *suffix);
bool (*append_movie)(void *context_v,
RenderData *rd,
int start_frame,
int frame,
const ImBuf *image,
const char *suffix,
ReportList *reports);
void (*end_movie)(void *context_v);
/* Optional function. */

View File

@ -139,8 +139,9 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
return nullptr;
}
const int expected_length = avs->path_ef - avs->path_sf;
BLI_assert(expected_length > 0); /* Because the `if` above. */
/* Adding 1 because the avs range is inclusive on both ends. */
const int expected_length = (avs->path_ef - avs->path_sf) + 1;
BLI_assert(expected_length > 1); /* Because the `if` above. */
/* If there is already a motionpath, just return that, provided its settings
* are ok (saves extra free+alloc). */
@ -157,7 +158,7 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
/* Only reuse a path if it was already a valid path, and of the expected length. */
if (mpath->start_frame != mpath->end_frame && mpath->length == expected_length) {
mpath->start_frame = avs->path_sf;
mpath->end_frame = avs->path_ef;
mpath->end_frame = avs->path_ef + 1;
return mpath;
}
@ -171,7 +172,7 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
/* Copy mpath settings from the viz settings. */
mpath->start_frame = avs->path_sf;
mpath->end_frame = avs->path_ef;
mpath->end_frame = avs->path_ef + 1;
mpath->length = expected_length;
if (avs->path_bakeflag & MOTIONPATH_BAKE_HEADS) {

View File

@ -30,6 +30,13 @@ AssetWeakReference::AssetWeakReference()
{
}
AssetWeakReference::AssetWeakReference(const AssetWeakReference &other)
: asset_library_type(other.asset_library_type),
asset_library_identifier(BLI_strdup_null(other.asset_library_identifier)),
relative_asset_identifier(BLI_strdup_null(other.relative_asset_identifier))
{
}
AssetWeakReference::AssetWeakReference(AssetWeakReference &&other)
: asset_library_type(other.asset_library_type),
asset_library_identifier(other.asset_library_identifier),
@ -46,18 +53,24 @@ AssetWeakReference::~AssetWeakReference()
MEM_delete(relative_asset_identifier);
}
AssetWeakReference *BKE_asset_weak_reference_copy(AssetWeakReference *weak_ref)
AssetWeakReference &AssetWeakReference::operator=(const AssetWeakReference &other)
{
if (weak_ref == nullptr) {
return nullptr;
if (this == &other) {
return *this;
}
std::destroy_at(this);
new (this) AssetWeakReference(other);
return *this;
}
AssetWeakReference *weak_ref_copy = MEM_new<AssetWeakReference>(__func__);
weak_ref_copy->asset_library_type = weak_ref->asset_library_type;
weak_ref_copy->asset_library_identifier = BLI_strdup(weak_ref->asset_library_identifier);
weak_ref_copy->relative_asset_identifier = BLI_strdup(weak_ref->relative_asset_identifier);
return weak_ref_copy;
AssetWeakReference &AssetWeakReference::operator=(AssetWeakReference &&other)
{
if (this == &other) {
return *this;
}
std::destroy_at(this);
new (this) AssetWeakReference(std::move(other));
return *this;
}
AssetWeakReference AssetWeakReference::make_reference(

View File

@ -45,7 +45,7 @@ IndexRange NodeBakeCache::frame_range() const
}
const int start_frame = this->frames.first()->frame.frame();
const int end_frame = this->frames.last()->frame.frame();
return {start_frame, end_frame - start_frame + 1};
return IndexRange::from_begin_end_inclusive(start_frame, end_frame);
}
SimulationNodeCache *ModifierCache::get_simulation_node_cache(const int id)

View File

@ -716,14 +716,6 @@ static std::unique_ptr<Instances> try_load_instances(const DictionaryValue &io_g
instances->add_reference(std::move(reference_geometry));
}
const auto *io_transforms = io_instances->lookup_dict("transforms");
if (!io_transforms) {
return {};
}
if (!read_blob_simple_gspan(blob_reader, *io_transforms, instances->transforms())) {
return {};
}
MutableAttributeAccessor attributes = instances->attributes_for_write();
if (!load_attributes(*io_attributes, attributes, blob_reader, blob_sharing)) {
return {};
@ -743,6 +735,18 @@ static std::unique_ptr<Instances> try_load_instances(const DictionaryValue &io_g
}
}
if (!attributes.contains("instance_transform")) {
/* Try reading the transform attribute from the old bake format from before it was an
* attribute. */
const auto *io_handles = io_instances->lookup_dict("transforms");
if (!io_handles) {
return {};
}
if (!read_blob_simple_gspan(blob_reader, *io_handles, instances->transforms_for_write())) {
return {};
}
}
return instances;
}
@ -973,11 +977,8 @@ static std::shared_ptr<DictionaryValue> serialize_geometry_set(const GeometrySet
serialize_geometry_set(reference.geometry_set(), blob_writer, blob_sharing));
}
io_instances->append(
"transforms", write_blob_simple_gspan(blob_writer, blob_sharing, instances.transforms()));
auto io_attributes = serialize_attributes(
instances.attributes(), blob_writer, blob_sharing, {"position"});
instances.attributes(), blob_writer, blob_sharing, {});
io_instances->append("attributes", io_attributes);
}
return io_geometry;

View File

@ -329,7 +329,7 @@ static void swap_old_bmain_data_for_blendfile(ReuseOldBMainData *reuse_data, con
std::swap(*new_lb, *old_lb);
/* TODO: Could add per-IDType control over namemaps clearing, if this becomes a performances
/* TODO: Could add per-IDType control over name-maps clearing, if this becomes a performances
* concern. */
BKE_main_namemap_clear(old_bmain);
BKE_main_namemap_clear(new_bmain);
@ -1225,6 +1225,9 @@ UserDef *BKE_blendfile_userdef_from_defaults()
BKE_preferences_asset_library_default_add(userdef);
BKE_preferences_extension_repo_add_default(userdef);
BKE_preferences_extension_repo_add_default_user(userdef);
return userdef;
}

View File

@ -43,6 +43,7 @@
#include "BKE_lib_override.hh"
#include "BKE_lib_query.hh"
#include "BKE_lib_remap.hh"
#include "BKE_library.hh"
#include "BKE_main.hh"
#include "BKE_main_namemap.hh"
#include "BKE_material.h"
@ -1894,7 +1895,7 @@ void BKE_blendfile_library_relocate(BlendfileLinkAppendContext *lapp_context,
if (lib->id.tag & LIB_TAG_DOIT) {
id_us_clear_real(&lib->id);
if (lib->id.us == 0) {
BKE_id_free(bmain, (ID *)lib);
BKE_id_delete(bmain, lib);
}
}
}
@ -1916,6 +1917,8 @@ void BKE_blendfile_library_relocate(BlendfileLinkAppendContext *lapp_context,
}
FOREACH_MAIN_ID_END;
BKE_library_main_rebuild_hierarchy(bmain);
/* Resync overrides if needed. */
if (!USER_EXPERIMENTAL_TEST(&U, no_override_auto_resync)) {
BlendFileReadReport report{};

View File

@ -592,7 +592,8 @@ int cloth_uses_vgroup(ClothModifierData *clmd)
(clmd->coll_parms->vgroup_objcol > 0)) ||
(clmd->sim_parms->vgroup_pressure > 0) || (clmd->sim_parms->vgroup_struct > 0) ||
(clmd->sim_parms->vgroup_bend > 0) || (clmd->sim_parms->vgroup_shrink > 0) ||
(clmd->sim_parms->vgroup_intern > 0) || (clmd->sim_parms->vgroup_mass > 0));
(clmd->sim_parms->vgroup_intern > 0) || (clmd->sim_parms->vgroup_mass > 0) ||
(clmd->sim_parms->vgroup_shear > 0));
}
/**

View File

@ -238,7 +238,6 @@ static ID **collection_owner_pointer_get(ID *id)
if ((id->flag & LIB_EMBEDDED_DATA) == 0) {
return nullptr;
}
BLI_assert((id->tag & LIB_TAG_NO_MAIN) == 0);
Collection *master_collection = (Collection *)id;
BLI_assert((master_collection->flag & COLLECTION_IS_MASTER) != 0);

View File

@ -1713,6 +1713,8 @@ void BKE_scopes_update(Scopes *scopes,
}
if (scopes->vecscope) {
MEM_freeN(scopes->vecscope);
}
if (scopes->vecscope_rgb) {
MEM_freeN(scopes->vecscope_rgb);
}

View File

@ -4593,7 +4593,7 @@ void CustomData_validate_layer_name(const CustomData *data,
int index = -1;
/* if a layer name was given, try to find that layer */
if (name[0]) {
if (!name.is_empty()) {
index = CustomData_get_named_layer_index(data, type, name);
}
@ -5542,6 +5542,22 @@ std::optional<VolumeGridType> custom_data_type_to_volume_grid_type(const eCustom
}
}
std::optional<eCustomDataType> volume_grid_type_to_custom_data_type(const VolumeGridType type)
{
switch (type) {
case VOLUME_GRID_FLOAT:
return CD_PROP_FLOAT;
case VOLUME_GRID_VECTOR_FLOAT:
return CD_PROP_FLOAT3;
case VOLUME_GRID_INT:
return CD_PROP_INT32;
case VOLUME_GRID_BOOLEAN:
return CD_PROP_BOOL;
default:
return std::nullopt;
}
}
/** \} */
} // namespace blender::bke

Some files were not shown because too many files have changed in this diff Show More