WIP: Brush assets project #106303

Draft
Julian Eisel wants to merge 358 commits from brush-assets-project into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
95 changed files with 1186 additions and 838 deletions
Showing only changes of commit c86d6bf244 - Show all commits

View File

@ -279,5 +279,6 @@ StatementMacros:
MacroBlockBegin: "^OSL_CLOSURE_STRUCT_BEGIN$"
MacroBlockEnd: "^OSL_CLOSURE_STRUCT_END$"
# Ensure new line at the end of source files.
# Ensure single new line at the end of source files.
InsertNewlineAtEOF: True
KeepEmptyLinesAtEOF: False

1
.gitignore vendored
View File

@ -56,6 +56,7 @@ waveletNoiseTile.bin
# External repositories.
/scripts/addons/
/scripts/addons_contrib/
/tests/benchmarks/
# Ignore old submodules directories.
# Eventually need to get rid of those, but for the first time of transition

29
.gitmodules vendored Normal file
View File

@ -0,0 +1,29 @@
[submodule "lib/linux_x64"]
update = none
path = lib/linux_x64
url = https://projects.blender.org/blender/lib-linux_x64.git
branch = main
[submodule "lib/macos_arm64"]
update = none
path = lib/macos_arm64
url = https://projects.blender.org/blender/lib-macos_arm64.git
branch = main
[submodule "lib/macos_x64"]
update = none
path = lib/macos_x64
url = https://projects.blender.org/blender/lib-macos_x64.git
branch = main
[submodule "lib/windows_x64"]
update = none
path = lib/windows_x64
url = https://projects.blender.org/blender/lib-windows_x64.git
branch = main
[submodule "release/datafiles/assets"]
path = release/datafiles/assets
url = https://projects.blender.org/blender/blender-assets.git
branch = main
[submodule "tests/data"]
update = none
path = tests/data
url = https://projects.blender.org/blender/blender-test-data.git
branch = main

View File

@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: GPL-2.0-or-later
# This Makefile does an out-of-source CMake build in ../build_`OS`_`CPU`
# This Makefile does an out-of-source CMake build in ../build_`OS`
# eg:
# ../build_linux_i386
# This is for users who like to configure & build blender with a single command.
@ -35,7 +35,7 @@ Other Convenience Targets
* deps: Build library dependencies (intended only for platform maintainers).
The existence of locally build dependencies overrides the pre-built dependencies from subversion.
These must be manually removed from '../lib/' to go back to using the pre-compiled libraries.
These must be manually removed from 'lib/' to go back to using the pre-compiled libraries.
Project Files
Generate project files for development environments.
@ -165,6 +165,16 @@ OS:=$(shell uname -s)
OS_NCASE:=$(shell uname -s | tr '[A-Z]' '[a-z]')
CPU:=$(shell uname -m)
# Use our OS and CPU architecture naming conventions.
ifeq ($(CPU),x86_64)
CPU:=x64
endif
ifeq ($(OS_NCASE),darwin)
OS_LIBDIR:=macos
else
OS_LIBDIR:=$(OS_NCASE)
endif
# Source and Build DIR's
BLENDER_DIR:=$(shell pwd -P)
@ -186,26 +196,13 @@ ifndef DEPS_BUILD_DIR
endif
ifndef DEPS_INSTALL_DIR
DEPS_INSTALL_DIR:=$(shell dirname "$(BLENDER_DIR)")/lib/$(OS_NCASE)
# Add processor type to directory name, except for darwin x86_64
# which by convention does not have it.
ifeq ($(OS_NCASE),darwin)
ifneq ($(CPU),x86_64)
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
endif
else
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
endif
DEPS_INSTALL_DIR:=$(shell dirname "$(BLENDER_DIR)")/lib/$(OS_LIBDIR)_$(CPU)
endif
# Set the LIBDIR, an empty string when not found.
LIBDIR:=$(wildcard ../lib/${OS_NCASE}_${CPU})
LIBDIR:=$(wildcard $(BLENDER_DIR)/lib/${OS_LIBDIR}_${CPU})
ifeq (, $(LIBDIR))
LIBDIR:=$(wildcard ../lib/${OS_NCASE}_${CPU}_glibc_228)
endif
ifeq (, $(LIBDIR))
LIBDIR:=$(wildcard ../lib/${OS_NCASE})
LIBDIR:=$(wildcard $(BLENDER_DIR)/lib/${OS_LIBDIR})
endif
# Find the newest Python version bundled in `LIBDIR`.

View File

@ -49,18 +49,17 @@ endif()
if(NOT DEFINED LIBDIR)
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin)
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/macos_x64)
else()
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin_${CMAKE_OSX_ARCHITECTURES})
endif()
else()
if(FIRST_RUN)
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/macos_${CMAKE_OSX_ARCHITECTURES})
endif()
endif()
if(NOT EXISTS "${LIBDIR}/")
if(NOT EXISTS "${LIBDIR}/.git")
message(FATAL_ERROR "Mac OSX requires pre-compiled libs at: '${LIBDIR}'")
endif()
if(FIRST_RUN)
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
endif()
# Avoid searching for headers since this would otherwise override our lib
# directory as well as PYTHON_ROOT_DIR.

View File

@ -16,13 +16,13 @@ else()
set(LIBDIR_NATIVE_ABI ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_NAME})
# Path to precompiled libraries with known glibc 2.28 ABI.
set(LIBDIR_GLIBC228_ABI ${CMAKE_SOURCE_DIR}/../lib/linux_x86_64_glibc_228)
set(LIBDIR_GLIBC228_ABI ${CMAKE_SOURCE_DIR}/lib/linux_x64)
# Choose the best suitable libraries.
if(EXISTS ${LIBDIR_NATIVE_ABI})
set(LIBDIR ${LIBDIR_NATIVE_ABI})
set(WITH_LIBC_MALLOC_HOOK_WORKAROUND TRUE)
elseif(EXISTS ${LIBDIR_GLIBC228_ABI})
elseif(EXISTS "${LIBDIR_GLIBC228_ABI}/.git")
set(LIBDIR ${LIBDIR_GLIBC228_ABI})
if(WITH_MEM_JEMALLOC)
# jemalloc provides malloc hooks.

View File

@ -266,23 +266,23 @@ if(NOT DEFINED LIBDIR)
# Setup 64bit and 64bit windows systems
if(CMAKE_CL_64)
message(STATUS "64 bit compiler detected.")
set(LIBDIR_BASE "win64")
set(LIBDIR_BASE "windows_x64")
else()
message(FATAL_ERROR "32 bit compiler detected, blender no longer provides pre-build libraries for 32 bit windows, please set the LIBDIR cmake variable to your own library folder")
endif()
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 19.30.30423)
message(STATUS "Visual Studio 2022 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/${LIBDIR_BASE})
elseif(MSVC_VERSION GREATER 1919)
message(STATUS "Visual Studio 2019 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/${LIBDIR_BASE})
endif()
else()
if(FIRST_RUN)
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
endif()
endif()
if(NOT EXISTS "${LIBDIR}/")
if(NOT EXISTS "${LIBDIR}/.git")
message(FATAL_ERROR "\n\nWindows requires pre-compiled libs at: '${LIBDIR}'. Please run `make update` in the blender source folder to obtain them.")
endif()

View File

@ -163,14 +163,14 @@ function(blender_add_ctests)
TEST_PREFIX ${ARGS_SUITE_NAME}
WORKING_DIRECTORY "${TEST_INSTALL_DIR}"
EXTRA_ARGS
--test-assets-dir "${CMAKE_SOURCE_DIR}/../lib/tests"
--test-assets-dir "${CMAKE_SOURCE_DIR}/tests/data"
--test-release-dir "${_test_release_dir}"
)
else()
add_test(
NAME ${ARGS_SUITE_NAME}
COMMAND ${ARGS_TARGET}
--test-assets-dir "${CMAKE_SOURCE_DIR}/../lib/tests"
--test-assets-dir "${CMAKE_SOURCE_DIR}/tests/data"
--test-release-dir "${_test_release_dir}"
WORKING_DIRECTORY ${TEST_INSTALL_DIR}
)

View File

@ -13,6 +13,7 @@ import sys
import make_utils
from make_utils import call
from pathlib import Path
# Parse arguments.
@ -21,7 +22,6 @@ def parse_arguments() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument("--ctest-command", default="ctest")
parser.add_argument("--cmake-command", default="cmake")
parser.add_argument("--svn-command", default="svn")
parser.add_argument("--git-command", default="git")
parser.add_argument("--config", default="")
parser.add_argument("build_directory")
@ -30,7 +30,6 @@ def parse_arguments() -> argparse.Namespace:
args = parse_arguments()
git_command = args.git_command
svn_command = args.svn_command
ctest_command = args.ctest_command
cmake_command = args.cmake_command
config = args.config
@ -45,24 +44,18 @@ if make_utils.command_missing(git_command):
sys.exit(1)
# Test if we are building a specific release version.
branch = make_utils.git_branch(git_command)
tag = make_utils.git_tag(git_command)
release_version = make_utils.git_branch_release_version(branch, tag)
lib_tests_dirpath = os.path.join('..', 'lib', "tests")
lib_tests_dirpath = Path("tests") / "data"
if not os.path.exists(lib_tests_dirpath):
if not (lib_tests_dirpath / ".git").exists():
print("Tests files not found, downloading...")
if make_utils.command_missing(svn_command):
sys.stderr.write("svn not found, can't checkout test files\n")
sys.exit(1)
if make_utils.command_missing(cmake_command):
sys.stderr.write("cmake not found, can't checkout test files\n")
sys.exit(1)
svn_url = make_utils.svn_libraries_base_url(release_version) + "/tests"
call([svn_command, "checkout", svn_url, lib_tests_dirpath])
# Ensure the test data files sub-module is configured and present.
make_utils.git_enable_submodule(git_command, "tests/data")
make_utils.git_update_submodule(args.git_command, lib_tests_dirpath)
# Run cmake again to detect tests files.
os.chdir(build_dir)

View File

@ -4,11 +4,11 @@
# SPDX-License-Identifier: GPL-2.0-or-later
"""
"make update" for all platforms, updating svn libraries and tests and Blender
git repository and sub-modules.
"make update" for all platforms, updating Git LFS submodules for libraries and
tests, and Blender git repository.
For release branches, this will check out the appropriate branches of
sub-modules and libraries.
submodules and libraries.
"""
import argparse
@ -20,168 +20,169 @@ import sys
import make_utils
from pathlib import Path
from make_utils import call, check_output
from urllib.parse import urljoin
from urllib.parse import urljoin, urlsplit
from typing import (
Optional,
)
class Submodule:
path: str
branch: str
branch_fallback: str
def __init__(self, path: str, branch: str, branch_fallback: str) -> None:
self.path = path
self.branch = branch
self.branch_fallback = branch_fallback
from typing import Optional
def print_stage(text: str) -> None:
print("")
print(text)
print("=" * len(text))
print("")
# Parse arguments
def parse_arguments() -> argparse.Namespace:
"""
Parse command line line arguments.
Returns parsed object from which the command line arguments can be accessed
as properties. The name of the properties matches the command line argument,
but with the leading dashed omitted and all remaining dashes replaced with
underscore.
"""
parser = argparse.ArgumentParser()
parser.add_argument("--no-libraries", action="store_true")
parser.add_argument("--no-blender", action="store_true")
parser.add_argument("--no-submodules", action="store_true")
parser.add_argument("--use-tests", action="store_true")
parser.add_argument("--svn-command", default="svn")
parser.add_argument("--svn-branch", default=None)
parser.add_argument("--git-command", default="git")
parser.add_argument("--use-linux-libraries", action="store_true")
parser.add_argument("--architecture", type=str, choices=("x86_64", "amd64", "arm64",))
parser.add_argument("--architecture", type=str,
choices=("x86_64", "amd64", "arm64",))
return parser.parse_args()
def get_blender_git_root() -> str:
return check_output([args.git_command, "rev-parse", "--show-toplevel"])
def get_blender_git_root() -> Path:
"""
Get root directory of the current Git directory.
"""
return Path(
check_output([args.git_command, "rev-parse", "--show-toplevel"]))
# Setup for precompiled libraries and tests from svn.
def get_effective_platform(args: argparse.Namespace) -> str:
"""
Get platform of the host.
The result string is normalized to the name used by Blender releases and
library repository name prefixes: linux, macos, windows.
"""
if sys.platform == "darwin":
platform = "macos"
elif sys.platform == "win32":
platform = "windows"
else:
platform = sys.platform
assert (platform in ("linux", "macos", "windows"))
return platform
def get_effective_architecture(args: argparse.Namespace) -> str:
"""
Get architecture of the host.
The result string is normalized to the architecture name used by the Blender
releases and library repository name suffixes: x64, arm64.
NOTE: When cross-compiling the architecture is coming from the command line
argument.
"""
architecture = args.architecture
if architecture:
assert isinstance(architecture, str)
return architecture
# Check platform.version to detect arm64 with x86_64 python binary.
if "ARM64" in platform.version():
return "arm64"
return platform.machine().lower()
def svn_update(args: argparse.Namespace, release_version: Optional[str]) -> None:
svn_non_interactive = [args.svn_command, '--non-interactive']
lib_dirpath = os.path.join(get_blender_git_root(), '..', 'lib')
svn_url = make_utils.svn_libraries_base_url(release_version, args.svn_branch)
# Checkout precompiled libraries
architecture = get_effective_architecture(args)
if sys.platform == 'darwin':
if architecture == 'arm64':
lib_platform = "darwin_arm64"
elif architecture == 'x86_64':
lib_platform = "darwin"
else:
lib_platform = None
elif sys.platform == 'win32':
# Windows checkout is usually handled by bat scripts since python3 to run
# this script is bundled as part of the precompiled libraries. However it
# is used by the buildbot.
lib_platform = "win64_vc15"
elif args.use_linux_libraries:
lib_platform = "linux_x86_64_glibc_228"
elif "ARM64" in platform.version():
# Check platform.version to detect arm64 with x86_64 python binary.
architecture = "arm64"
else:
# No precompiled libraries for Linux.
lib_platform = None
architecture = platform.machine().lower()
if lib_platform:
lib_platform_dirpath = os.path.join(lib_dirpath, lib_platform)
# Normalize the architecture name.
if architecture in ("x86_64", "amd64"):
architecture = "x64"
if not os.path.exists(lib_platform_dirpath):
print_stage("Checking out Precompiled Libraries")
assert (architecture in ("x64", "arm64"))
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't checkout libraries\n")
sys.exit(1)
svn_url_platform = svn_url + lib_platform
call(svn_non_interactive + ["checkout", svn_url_platform, lib_platform_dirpath])
if args.use_tests:
lib_tests = "tests"
lib_tests_dirpath = os.path.join(lib_dirpath, lib_tests)
if not os.path.exists(lib_tests_dirpath):
print_stage("Checking out Tests")
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't checkout tests\n")
sys.exit(1)
svn_url_tests = svn_url + lib_tests
call(svn_non_interactive + ["checkout", svn_url_tests, lib_tests_dirpath])
lib_assets = "assets"
lib_assets_dirpath = os.path.join(lib_dirpath, lib_assets)
if not os.path.exists(lib_assets_dirpath):
print_stage("Checking out Assets")
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't checkout assets\n")
sys.exit(1)
svn_url_assets = svn_url + lib_assets
call(svn_non_interactive + ["checkout", svn_url_assets, lib_assets_dirpath])
# Update precompiled libraries, assets and tests
if not os.path.isdir(lib_dirpath):
print("Library path: %r, not found, skipping" % lib_dirpath)
else:
paths_local_and_remote = []
if os.path.exists(os.path.join(lib_dirpath, ".svn")):
print_stage("Updating Precompiled Libraries, Assets and Tests (one repository)")
paths_local_and_remote.append((lib_dirpath, svn_url))
else:
print_stage("Updating Precompiled Libraries, Assets and Tests (multiple repositories)")
# Separate paths checked out.
for dirname in os.listdir(lib_dirpath):
if dirname.startswith("."):
# Temporary paths such as ".mypy_cache" will report a warning, skip hidden directories.
continue
dirpath = os.path.join(lib_dirpath, dirname)
if not (os.path.isdir(dirpath) and os.path.exists(os.path.join(dirpath, ".svn"))):
continue
paths_local_and_remote.append((dirpath, svn_url + dirname))
if paths_local_and_remote:
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't update libraries\n")
sys.exit(1)
for dirpath, svn_url_full in paths_local_and_remote:
call(svn_non_interactive + ["cleanup", dirpath])
# Switch to appropriate branch and update.
call(svn_non_interactive + ["switch", svn_url_full, dirpath], exit_on_error=False)
call(svn_non_interactive + ["update", dirpath])
return architecture
def get_submodule_directories(args: argparse.Namespace):
"""
Get list of all configured submodule directories.
"""
blender_git_root = get_blender_git_root()
dot_modules = blender_git_root / ".gitmodules"
if not dot_modules.exists():
return ()
submodule_directories_output = check_output(
[args.git_command, "config", "--file", dot_modules, "--get-regexp", "path"])
return (Path(line.split(' ', 1)[1]) for line in submodule_directories_output.strip().splitlines())
def ensure_git_lfs(args: argparse.Namespace) -> None:
# Use `--skip-repo` to avoid creating git hooks.
# This is called from the `blender.git` checkout, so we don't need to install hooks there.
call((args.git_command, "lfs", "install", "--skip-repo"), exit_on_error=True)
def update_precompiled_libraries(args: argparse.Namespace) -> None:
"""
Configure and update submodule for precompiled libraries
This function detects the current host architecture and enables
corresponding submodule, and updates the submodule.
NOTE: When cross-compiling the architecture is coming from the command line
argument.
"""
print_stage("Configuring Precompiled Libraries")
platform = get_effective_platform(args)
arch = get_effective_architecture(args)
print(f"Detected platform : {platform}")
print(f"Detected architecture : {arch}")
print()
if sys.platform == "linux" and not args.use_linux_libraries:
print("Skipping Linux libraries configuration")
return
submodule_dir = f"lib/{platform}_{arch}"
submodule_directories = get_submodule_directories(args)
if Path(submodule_dir) not in submodule_directories:
print("Skipping libraries update: no configured submodule")
return
make_utils.git_enable_submodule(args.git_command, submodule_dir)
make_utils.git_update_submodule(args.git_command, submodule_dir)
def update_tests_data_files(args: argparse.Namespace) -> None:
"""
Configure and update submodule with files used by regression tests
"""
print_stage("Configuring Tests Data Files")
submodule_dir = "tests/data"
make_utils.git_enable_submodule(args.git_command, submodule_dir)
make_utils.git_update_submodule(args.git_command, submodule_dir)
# Test if git repo can be updated.
def git_update_skip(args: argparse.Namespace, check_remote_exists: bool = True) -> str:
"""Test if git repo can be updated."""
if make_utils.command_missing(args.git_command):
sys.stderr.write("git not found, can't update code\n")
sys.exit(1)
@ -274,23 +275,22 @@ def resolve_external_url(blender_url: str, repo_name: str) -> str:
return urljoin(blender_url + "/", "../" + repo_name)
def external_script_copy_old_submodule_over(args: argparse.Namespace, directory_name: str) -> None:
blender_git_root = Path(get_blender_git_root())
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
def external_script_copy_old_submodule_over(
args: argparse.Namespace,
directory: Path,
old_submodules_dir: Path) -> None:
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
old_submodule_relative_dir = Path("release") / "scripts" / directory_name
print(f"Moving {old_submodule_relative_dir} to scripts/{directory_name} ...")
old_submodule_dir = blender_git_root / old_submodule_relative_dir
shutil.move(old_submodule_dir, external_dir)
print(f"Moving {old_submodules_dir} to {directory} ...")
shutil.move(blender_git_root / old_submodules_dir, external_dir)
# Remove old ".git" which is a file with path to a submodule bare repo inside of main
# repo .git/modules directory.
(external_dir / ".git").unlink()
bare_repo_relative_dir = Path(".git") / "modules" / "release" / "scripts" / directory_name
print(f"Copying {bare_repo_relative_dir} to scripts/{directory_name}/.git ...")
bare_repo_relative_dir = Path(".git") / "modules" / old_submodules_dir
print(f"Copying {bare_repo_relative_dir} to {directory}/.git ...")
bare_repo_dir = blender_git_root / bare_repo_relative_dir
shutil.copytree(bare_repo_dir, external_dir / ".git")
@ -298,25 +298,26 @@ def external_script_copy_old_submodule_over(args: argparse.Namespace, directory_
call((args.git_command, "config", "--file", str(git_config), "--unset", "core.worktree"))
def external_script_initialize_if_needed(args: argparse.Namespace,
repo_name: str,
directory_name: str) -> None:
"""Initialize checkout of an external repository scripts directory"""
def floating_checkout_initialize_if_needed(args: argparse.Namespace,
repo_name: str,
directory: Path,
old_submodules_dir: Path = None) -> None:
"""Initialize checkout of an external repository"""
blender_git_root = Path(get_blender_git_root())
blender_git_root = get_blender_git_root()
blender_dot_git = blender_git_root / ".git"
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
external_dir = blender_git_root / directory
if external_dir.exists():
return
print(f"Initializing scripts/{directory_name} ...")
print(f"Initializing {directory} ...")
old_submodule_dot_git = blender_git_root / "release" / "scripts" / directory_name / ".git"
if old_submodule_dot_git.exists() and blender_dot_git.is_dir():
external_script_copy_old_submodule_over(args, directory_name)
return
if old_submodules_dir is not None:
old_submodule_dot_git = blender_git_root / old_submodules_dir / ".git"
if old_submodule_dot_git.exists() and blender_dot_git.is_dir():
external_script_copy_old_submodule_over(args, directory, old_submodules_dir)
return
origin_name = "upstream" if use_upstream_workflow(args) else "origin"
blender_url = make_utils.git_get_remote_url(args.git_command, origin_name)
@ -330,9 +331,9 @@ def external_script_initialize_if_needed(args: argparse.Namespace,
call((args.git_command, "clone", "--origin", origin_name, external_url, str(external_dir)))
def external_script_add_origin_if_needed(args: argparse.Namespace,
repo_name: str,
directory_name: str) -> None:
def floating_checkout_add_origin_if_needed(args: argparse.Namespace,
repo_name: str,
directory: Path) -> None:
"""
Add remote called 'origin' if there is a fork of the external repository available
@ -344,9 +345,8 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
cwd = os.getcwd()
blender_git_root = Path(get_blender_git_root())
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
origin_blender_url = make_utils.git_get_remote_url(args.git_command, "origin")
origin_external_url = resolve_external_url(origin_blender_url, repo_name)
@ -361,7 +361,7 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
if not make_utils.git_is_remote_repository(args.git_command, origin_external_url):
return
print(f"Adding origin remote to {directory_name} pointing to fork ...")
print(f"Adding origin remote to {directory} pointing to fork ...")
# Non-obvious tricks to introduce the new remote called "origin" to the existing
# submodule configuration.
@ -390,23 +390,30 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
return
def external_scripts_update(args: argparse.Namespace,
repo_name: str,
directory_name: str,
branch: Optional[str]) -> str:
def floating_checkout_update(args: argparse.Namespace,
repo_name: str,
directory: Path,
branch: Optional[str],
old_submodules_dir: Path = None,
only_update=False) -> str:
"""Update a single external checkout with the given name in the scripts folder"""
external_script_initialize_if_needed(args, repo_name, directory_name)
external_script_add_origin_if_needed(args, repo_name, directory_name)
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
print(f"Updating scripts/{directory_name} ...")
if only_update and not external_dir.exists():
return ""
floating_checkout_initialize_if_needed(args, repo_name, directory, old_submodules_dir)
floating_checkout_add_origin_if_needed(args, repo_name, directory)
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
print(f"* Updating {directory} ...")
cwd = os.getcwd()
blender_git_root = Path(get_blender_git_root())
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
# Update externals to appropriate given branch, falling back to main if none is given and/or
# found in a sub-repository.
branch_fallback = "main"
@ -419,7 +426,7 @@ def external_scripts_update(args: argparse.Namespace,
os.chdir(external_dir)
msg = git_update_skip(args, check_remote_exists=False)
if msg:
skip_msg += directory_name + " skipped: " + msg + "\n"
skip_msg += str(directory) + " skipped: " + msg + "\n"
else:
# Find a matching branch that exists.
for remote in ("origin", "upstream"):
@ -465,6 +472,17 @@ def external_scripts_update(args: argparse.Namespace,
return skip_msg
def external_scripts_update(args: argparse.Namespace,
repo_name: str,
directory_name: str,
branch: Optional[str]) -> str:
return floating_checkout_update(args,
repo_name,
Path("scripts") / directory_name,
branch,
old_submodules_dir=Path("release") / "scripts" / directory_name)
def scripts_submodules_update(args: argparse.Namespace, branch: Optional[str]) -> str:
"""Update working trees of addons and addons_contrib within the scripts/ directory"""
msg = ""
@ -475,12 +493,74 @@ def scripts_submodules_update(args: argparse.Namespace, branch: Optional[str]) -
return msg
def floating_libraries_update(args: argparse.Namespace, branch: Optional[str]) -> str:
"""Update libraries checkouts which are floating (not attached as Git submodules)"""
msg = ""
msg += floating_checkout_update(args,
"benchmarks",
Path("tests") / "benchmarks",
branch,
only_update=True)
return msg
def add_submodule_push_url(args: argparse.Namespace):
"""
Add pushURL configuration for all locally activated submodules, pointing to SSH protocol.
"""
blender_git_root = get_blender_git_root()
modules = blender_git_root / ".git" / "modules"
submodule_directories = get_submodule_directories(args)
for submodule_path in submodule_directories:
module_path = modules / submodule_path
config = module_path / "config"
if not config.exists():
# Ignore modules which are not initialized
continue
push_url = check_output((args.git_command, "config", "--file", str(config),
"--get", "remote.origin.pushURL"), exit_on_error=False)
if push_url and push_url != "git@projects.blender.org:blender/lib-darwin_arm64.git":
# Ignore modules which have pushURL configured.
# Keep special exception, as some debug code sneaked into the production for a short
# while.
continue
url = make_utils.git_get_config(args.git_command, "remote.origin.url", str(config))
if not url.startswith("https:"):
# Ignore non-URL URLs.
continue
url_parts = urlsplit(url)
push_url = f"git@{url_parts.netloc}:{url_parts.path[1:]}"
print(f"Setting pushURL to {push_url} for {submodule_path}")
make_utils.git_set_config(args.git_command, "remote.origin.pushURL", push_url, str(config))
def submodules_update(args: argparse.Namespace, branch: Optional[str]) -> str:
"""Update submodules or other externally tracked source trees"""
print_stage("Updating Submodules")
msg = ""
msg += scripts_submodules_update(args, branch)
msg += floating_libraries_update(args, branch)
print("* Updating Git submodules")
exitcode = call((args.git_command, "submodule", "update", "--init"), exit_on_error=False)
if exitcode != 0:
msg += "Error updating Git submodules\n"
add_submodule_push_url(args)
return msg
@ -494,26 +574,33 @@ if __name__ == "__main__":
major = blender_version.version // 100
minor = blender_version.version % 100
branch = f"blender-v{major}.{minor}-release"
release_version: Optional[str] = f"{major}.{minor}"
else:
branch = 'main'
release_version = None
if not args.no_libraries:
svn_update(args, release_version)
# Submodules and precompiled libraries require Git LFS.
ensure_git_lfs(args)
if not args.no_blender:
blender_skip_msg = git_update_skip(args)
if not blender_skip_msg:
blender_skip_msg = blender_update(args)
if blender_skip_msg:
blender_skip_msg = "Blender repository skipped: " + blender_skip_msg + "\n"
if not args.no_libraries:
update_precompiled_libraries(args)
if args.use_tests:
update_tests_data_files(args)
if not args.no_submodules:
submodules_skip_msg = submodules_update(args, branch)
# Report any skipped repositories at the end, so it's not as easy to miss.
skip_msg = blender_skip_msg + submodules_skip_msg
if skip_msg:
print_stage(skip_msg.strip())
print()
print(skip_msg.strip())
print()
# For failed submodule update we throw an error, since not having correct
# submodules can make Blender throw errors.

View File

@ -11,9 +11,7 @@ import re
import shutil
import subprocess
import sys
import os
from pathlib import Path
from urllib.parse import urljoin
from typing import (
Sequence,
@ -48,7 +46,7 @@ def check_output(cmd: Sequence[str], exit_on_error: bool = True) -> str:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True)
except subprocess.CalledProcessError as e:
if exit_on_error:
sys.stderr.write(" ".join(cmd))
sys.stderr.write(" ".join(cmd) + "\n")
sys.stderr.write(e.output + "\n")
sys.exit(e.returncode)
output = ""
@ -87,25 +85,6 @@ def git_remote_exist(git_command: str, remote_name: str) -> bool:
return remote_url != remote_name
def git_get_resolved_submodule_url(git_command: str, blender_url: str, submodule_path: str) -> str:
git_root = check_output([git_command, "rev-parse", "--show-toplevel"])
dot_gitmodules = os.path.join(git_root, ".gitmodules")
submodule_key_prefix = f"submodule.{submodule_path}"
submodule_key_url = f"{submodule_key_prefix}.url"
gitmodule_url = git_get_config(
git_command, submodule_key_url, file=dot_gitmodules)
# A bit of a trickery to construct final URL.
# Only works for the relative submodule URLs.
#
# Note that unless the LHS URL ends up with a slash urljoin treats the last component as a
# file.
assert gitmodule_url.startswith('..')
return urljoin(blender_url + "/", gitmodule_url)
def git_is_remote_repository(git_command: str, repo: str) -> bool:
"""Returns true if the given repository is a valid/clonable git repo"""
exit_code = call((git_command, "ls-remote", repo, "HEAD"), exit_on_error=False, silent=True)
@ -113,7 +92,8 @@ def git_is_remote_repository(git_command: str, repo: str) -> bool:
def git_branch(git_command: str) -> str:
# Get current branch name.
"""Get current branch name."""
try:
branch = subprocess.check_output([git_command, "rev-parse", "--abbrev-ref", "HEAD"])
except subprocess.CalledProcessError as e:
@ -137,44 +117,32 @@ def git_set_config(git_command: str, key: str, value: str, file: Optional[str] =
return check_output([git_command, "config", key, value])
def git_tag(git_command: str) -> Optional[str]:
# Get current tag name.
try:
tag = subprocess.check_output([git_command, "describe", "--exact-match"], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
return None
def git_enable_submodule(git_command: str, submodule_dir: str):
"""Enable submodule denoted by its directory within the repository"""
return tag.strip().decode('utf8')
command = (git_command,
"config",
"--local",
f"submodule.{submodule_dir}.update", "checkout")
call(command, exit_on_error=True, silent=False)
def git_branch_release_version(branch: str, tag: Optional[str]) -> Optional[str]:
re_match = re.search("^blender-v(.*)-release$", branch)
release_version = None
if re_match:
release_version = re_match.group(1)
elif tag:
re_match = re.search(r"^v([0-9]*\.[0-9]*).*", tag)
if re_match:
release_version = re_match.group(1)
return release_version
def git_update_submodule(git_command: str, submodule_dir: str):
"""
Update the given submodule.
The submodule is denoted by its path within the repository.
This function will initialize the submodule if it has not been initialized.
"""
def svn_libraries_base_url(release_version: Optional[str], branch: Optional[str] = None) -> str:
if release_version:
svn_branch = "tags/blender-" + release_version + "-release"
elif branch:
svn_branch = "branches/" + branch
else:
svn_branch = "trunk"
return "https://svn.blender.org/svnroot/bf-blender/" + svn_branch + "/lib/"
call((git_command, "submodule", "update", "--init", submodule_dir))
def command_missing(command: str) -> bool:
# Support running with Python 2 for macOS
if sys.version_info >= (3, 0):
return shutil.which(command) is None
else:
return False
return False
class BlenderVersion:

View File

@ -1,37 +1,30 @@
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
set BUILD_VS_LIBDIR="lib\windows_x64"
if NOT "%verbose%" == "" (
echo Library Directory = "%BUILD_VS_LIBDIR%"
)
if NOT EXIST %BUILD_VS_LIBDIR% (
rem libs not found, but svn is on the system
if not "%SVN%"=="" (
if NOT EXIST "%BUILD_VS_LIBDIR%\.git" (
rem libs not found, but git is on the system
if not "%GIT%"=="" (
echo.
echo The required external libraries in %BUILD_VS_LIBDIR% are missing
echo.
set /p GetLibs= "Would you like to download them? (y/n)"
if /I "!GetLibs!"=="Y" (
echo.
echo Downloading %BUILD_VS_SVNDIR% libraries, please wait.
echo Downloading %BUILD_VS_LIBDIR% libraries, please wait.
echo.
:RETRY
"%SVN%" checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/%BUILD_VS_SVNDIR% %BUILD_VS_LIBDIR%
:RETRY
"%GIT%" -C "%BLENDER_DIR%" config --local "submodule.%BUILD_VS_LIBDIR%.update" "checkout"
"%GIT%" -C "%BLENDER_DIR%" submodule update --init "%BUILD_VS_LIBDIR%"
if errorlevel 1 (
set /p LibRetry= "Error during download, retry? y/n"
if /I "!LibRetry!"=="Y" (
cd %BUILD_VS_LIBDIR%
"%SVN%" cleanup
cd %BLENDER_DIR%
goto RETRY
)
echo.
echo Error: Download of external libraries failed.
echo This is needed for building, please manually run 'svn cleanup' and 'svn update' in
echo %BUILD_VS_LIBDIR% , until this is resolved you CANNOT make a successful blender build
echo Until this is resolved you CANNOT make a successful blender build.
echo.
exit /b 1
)
@ -39,11 +32,11 @@ if NOT EXIST %BUILD_VS_LIBDIR% (
)
) else (
if NOT EXIST %PYTHON% (
if not "%SVN%"=="" (
if not "%GIT%"=="" (
echo.
echo Python not found in external libraries, updating to latest version
echo.
"%SVN%" update %BUILD_VS_LIBDIR%
"%GIT%" -C "%BLENDER_DIR%" submodule update "%BUILD_VS_LIBDIR%"
)
)
)
@ -53,8 +46,8 @@ if NOT EXIST %BUILD_VS_LIBDIR% (
echo Error: Required libraries not found at "%BUILD_VS_LIBDIR%"
echo This is needed for building, aborting!
echo.
if "%SVN%"=="" (
echo This is most likely caused by svn.exe not being available.
if "%GIT%"=="" (
echo This is most likely caused by git.exe not being available.
)
exit /b 1
)

View File

@ -1,5 +1,4 @@
REM find all dependencies and set the corresponding environment variables.
for %%X in (svn.exe) do (set SVN=%%~$PATH:X)
for %%X in (cmake.exe) do (set CMAKE=%%~$PATH:X)
for %%X in (ctest.exe) do (set CTEST=%%~$PATH:X)
for %%X in (git.exe) do (set GIT=%%~$PATH:X)
@ -7,19 +6,19 @@ REM For python, default on 310 but if that does not exist also check
REM the 311, 312 and finally 39 folders to see if those are there, it checks
REM this far ahead to ensure good lib folder compatibility in the future
REM it falls back to 3.9 just incase it is a very old lib folder.
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\310\bin\python.exe
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\310\bin\python.exe
if EXIST %PYTHON% (
goto detect_python_done
)
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\311\bin\python.exe
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\311\bin\python.exe
if EXIST %PYTHON% (
goto detect_python_done
)
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\312\bin\python.exe
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\312\bin\python.exe
if EXIST %PYTHON% (
goto detect_python_done
)
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\39\bin\python.exe
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\39\bin\python.exe
if EXIST %PYTHON% (
goto detect_python_done
)
@ -36,7 +35,6 @@ if NOT EXIST %PYTHON% (
:detect_python_done
if NOT "%verbose%" == "" (
echo svn : "%SVN%"
echo cmake : "%CMAKE%"
echo ctest : "%CTEST%"
echo git : "%GIT%"

View File

@ -1,5 +1,5 @@
if EXIST %BLENDER_DIR%\..\lib\win64_vc15\llvm\bin\clang-format.exe (
set CF_PATH=..\lib\win64_vc15\llvm\bin
if EXIST %BLENDER_DIR%\lib\windows_x64\llvm\bin\clang-format.exe (
set CF_PATH=lib\windows_x64\llvm\bin
goto detect_done
)

View File

@ -0,0 +1,16 @@
set BUILD_VS_LIBDIR="lib\windows_x64"
:RETRY
"%GIT%" -C "%BLENDER_DIR%" config --local "submodule.%BUILD_VS_LIBDIR%.update" "checkout"
"%GIT%" -C "%BLENDER_DIR%" submodule update --init "%BUILD_VS_LIBDIR%"
if errorlevel 1 (
set /p LibRetry= "Error during update, retry? y/n"
if /I "!LibRetry!"=="Y" (
goto RETRY
)
echo.
echo Error: Download of external libraries failed.
echo Until this is resolved you CANNOT make a successful blender build.
echo.
exit /b 1
)

View File

@ -110,9 +110,6 @@ if NOT "%1" == "" (
) else if "%1" == "doc_py" (
set DOC_PY=1
goto EOF
) else if "%1" == "svnfix" (
set SVN_FIX=1
goto EOF
) else (
echo Command "%1" unknown, aborting!
goto ERR

View File

@ -4,9 +4,7 @@ set BUILD_CMAKE_ARGS=
set BUILD_ARCH=
set BUILD_VS_VER=
set BUILD_VS_YEAR=
set BUILD_VS_LIBDIRPOST=
set BUILD_VS_LIBDIR=
set BUILD_VS_SVNDIR=
set KEY_NAME=
set MSBUILD_PLATFORM=
set MUST_CLEAN=

View File

@ -8,13 +8,10 @@ for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Branch_hash=%%i
cd "%BLENDER_DIR%/scripts/addons"
for /f "delims=" %%i in ('"%GIT%" rev-parse --abbrev-ref HEAD') do echo Addons_Branch_name=%%i
for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Addons_Branch_hash=%%i
if "%SVN%" == "" (
echo SVN not found, cannot library information.
goto EOF
)
set BUILD_VS_LIBDIR=%BLENDER_DIR%..\lib\win64_vc15
for /f "delims=" %%i in ('"%SVN%" info --show-item=url --no-newline %BUILD_VS_LIBDIR% ') do echo Libs_URL=%%i
for /f "delims=" %%i in ('"%SVN%" info --show-item=revision --no-newline %BUILD_VS_LIBDIR% ') do echo Libs_Revision=%%i
for /f "delims=" %%i in ('"%SVN%" info --show-item=last-changed-date --no-newline %BUILD_VS_LIBDIR% ') do echo Libs_LastChange=%%i
cd "%BLENDER_DIR%/lib/windows_x64"
for /f "delims=" %%i in ('"%GIT%" rev-parse --abbrev-ref HEAD') do echo Libs_Branch_name=%%i
for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Libs_Branch_hash=%%i
cd "%BLENDER_DIR%"
:EOF

View File

@ -1,25 +0,0 @@
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
echo Starting cleanup in %BUILD_VS_LIBDIR%.
cd %BUILD_VS_LIBDIR%
:RETRY
"%SVN%" cleanup
"%SVN%" update
if errorlevel 1 (
set /p LibRetry= "Error during update, retry? y/n"
if /I "!LibRetry!"=="Y" (
goto RETRY
)
echo.
echo Error: Download of external libraries failed.
echo This is needed for building, please manually run 'svn cleanup' and 'svn update' in
echo %BUILD_VS_LIBDIR% , until this is resolved you CANNOT make a successful blender build
echo.
exit /b 1
)
echo Cleanup complete

View File

@ -1,24 +0,0 @@
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
cd %BUILD_VS_LIBDIR%
:RETRY
"%SVN%" update
if errorlevel 1 (
set /p LibRetry= "Error during update, retry? y/n"
if /I "!LibRetry!"=="Y" (
"%SVN%" cleanup
goto RETRY
)
echo.
echo Error: Download of external libraries failed.
echo This is needed for building, please manually run 'svn cleanup' and 'svn update' in
echo %BUILD_VS_LIBDIR% , until this is resolved you CANNOT make a successful blender build
echo.
exit /b 1
)
cd %BLENDER_DIR%

View File

@ -8,6 +8,6 @@ exit /b 1
:detect_python_done
REM Use -B to avoid writing __pycache__ in lib directory and causing update conflicts.
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_test.py --git-command "%GIT%" --svn-command "%SVN%" --cmake-command="%CMAKE%" --ctest-command="%CTEST%" --config="%BUILD_TYPE%" %BUILD_DIR%
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_test.py --git-command "%GIT%" --cmake-command="%CMAKE%" --ctest-command="%CTEST%" --config="%BUILD_TYPE%" %BUILD_DIR%
:EOF

View File

@ -5,6 +5,6 @@ if NOT EXIST %PYTHON% (
:detect_python_done
REM Use -B to avoid writing __pycache__ in lib directory and causing update conflicts.
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_update.py --git-command "%GIT%" --svn-command "%SVN%" %BUILD_UPDATE_ARGS%
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_update.py --git-command "%GIT%" %BUILD_UPDATE_ARGS%
:EOF

View File

@ -470,7 +470,7 @@ ccl_device Spectrum bsdf_hair_huang_eval_residual(KernelGlobals kg,
const float3 wh1 = sample_wh(kg, roughness, wi, wmi, sample1);
const float cos_hi1 = dot(wi, wh1);
if (!(cos_hi1 > 0)) {
if (!(cos_hi1 > 0.0f)) {
continue;
}
@ -535,7 +535,7 @@ ccl_device Spectrum bsdf_hair_huang_eval_residual(KernelGlobals kg,
lcg_step_float(&rng_quadrature));
const float3 wh2 = sample_wh(kg, roughness, -wt, wmt, sample2);
const float cos_hi2 = dot(-wt, wh2);
if (!(cos_hi2 > 0)) {
if (!(cos_hi2 > 0.0f)) {
continue;
}
const float R2 = fresnel_dielectric_cos(cos_hi2, inv_eta);
@ -717,7 +717,7 @@ ccl_device int bsdf_hair_huang_sample(const KernelGlobals kg,
wtt = refract_angle(-wt, wh2, cos_theta_t2, bsdf->eta);
if (dot(wmt, -wtt) > 0.0f && cos_theta_t2 != 0.0f && microfacet_visible(-wtt, wmt_, wh2)) {
if (dot(wmt, -wtt) > 0.0f && T2 > 0.0f && microfacet_visible(-wtt, wmt_, wh2)) {
TT = bsdf->extra->TT * T1 * A_t * T2 * scale2 * bsdf_Go(roughness2, cos_mi2, dot(wmt, -wtt));
}
@ -746,9 +746,7 @@ ccl_device int bsdf_hair_huang_sample(const KernelGlobals kg,
const float T3 = 1.0f - R3;
if (cos_theta_t3 != 0.0f &&
microfacet_visible(wtr, -wtrt, make_float3(wmtr.x, 0.0f, wmtr.z), wh3))
{
if (T3 > 0.0f && microfacet_visible(wtr, -wtrt, make_float3(wmtr.x, 0.0f, wmtr.z), wh3)) {
TRT = bsdf->extra->TRT * TR * make_spectrum(T3) *
bsdf_Go(roughness2, cos_mi3, dot(wmtr, -wtrt));
}
@ -760,8 +758,8 @@ ccl_device int bsdf_hair_huang_sample(const KernelGlobals kg,
/* Sample `theta_o`. */
const float rand_theta = max(lcg_step_float(&sd->lcg_state), 1e-5f);
const float fac = 1.0f +
bsdf->roughness *
logf(rand_theta + (1.0f - rand_theta) * expf(-2.0f / bsdf->roughness));
4.0f * bsdf->roughness *
logf(rand_theta + (1.0f - rand_theta) * expf(-0.5f / bsdf->roughness));
const float sin_theta_o = -fac * sin_theta(wi) +
cos_from_sin(fac) *
cosf(M_2PI_F * lcg_step_float(&sd->lcg_state)) * cos_theta(wi);

View File

@ -135,18 +135,26 @@ ccl_device_forceinline bool triangle_light_sample(KernelGlobals kg,
const float3 e1 = V[2] - V[0];
const float3 e2 = V[2] - V[1];
const float longest_edge_squared = max(len_squared(e0), max(len_squared(e1), len_squared(e2)));
const float3 N0 = cross(e0, e1);
float3 N0 = cross(e0, e1);
/* Flip normal if necessary. */
const int object_flag = kernel_data_fetch(object_flag, object);
if (object_flag & SD_OBJECT_NEGATIVE_SCALE) {
N0 = -N0;
}
/* Do not draw samples from the side without MIS. */
ls->shader = kernel_data_fetch(tri_shader, prim);
const float distance_to_plane = dot(N0, V[0] - P) / dot(N0, N0);
const int ls_shader_flag = kernel_data_fetch(shaders, ls->shader & SHADER_MASK).flags;
if (!(ls_shader_flag & (distance_to_plane > 0 ? SD_MIS_BACK : SD_MIS_FRONT))) {
return false;
}
float Nl = 0.0f;
ls->Ng = safe_normalize_len(N0, &Nl);
const float area = 0.5f * Nl;
/* flip normal if necessary */
const int object_flag = kernel_data_fetch(object_flag, object);
if (object_flag & SD_OBJECT_NEGATIVE_SCALE) {
ls->Ng = -ls->Ng;
}
ls->eval_fac = 1.0f;
ls->shader = kernel_data_fetch(tri_shader, prim);
ls->object = object;
ls->prim = prim;
ls->lamp = LAMP_NONE;
@ -154,8 +162,6 @@ ccl_device_forceinline bool triangle_light_sample(KernelGlobals kg,
ls->type = LIGHT_TRIANGLE;
ls->group = object_lightgroup(kg, object);
float distance_to_plane = fabsf(dot(N0, V[0] - P) / dot(N0, N0));
if (!in_volume_segment && (longest_edge_squared > distance_to_plane * distance_to_plane)) {
/* A modified version of James Arvo, "Stratified Sampling of Spherical Triangles"
* http://www.graphics.cornell.edu/pubs/1995/Arv95c.pdf */

View File

@ -1331,7 +1331,7 @@ void LightManager::device_update_lights(Device *device, DeviceScene *dscene, Sce
float3 dir = safe_normalize(light->get_dir());
if (light->use_mis && area != 0.0f) {
if (light->use_mis && area != 0.0f && light->spread > 0.0f) {
shader_id |= SHADER_USE_MIS;
}

1
lib/linux_x64 Submodule

@ -0,0 +1 @@
Subproject commit d2eea8a8a6b22d6ec6849deea72e141fc5b384d4

1
lib/macos_arm64 Submodule

@ -0,0 +1 @@
Subproject commit 6c8139034cfb05f8dee9f6648d31443792c160b5

1
lib/macos_x64 Submodule

@ -0,0 +1 @@
Subproject commit bf950af1a7d197675bd28ce7d4920ba2b4a0a4f6

1
lib/windows_x64 Submodule

@ -0,0 +1 @@
Subproject commit 2935423fe0636157750feef1fc02d8d0c3efd9dd

View File

@ -56,11 +56,6 @@ if "%BUILD_VS_YEAR%" == "" (
)
)
if "%SVN_FIX%" == "1" (
call "%BLENDER_DIR%\build_files\windows\svn_fix.cmd"
goto EOF
)
if "%BUILD_UPDATE%" == "1" (
REM First see if the SVN libs are there and check them out if they are not.
call "%BLENDER_DIR%\build_files\windows\check_libraries.cmd"
@ -70,7 +65,7 @@ if "%BUILD_UPDATE%" == "1" (
REM running tends to be problematic. The python script that update_sources
REM calls later on may still try to switch branches and run into trouble,
REM but for *most* people this will side step the problem.
call "%BLENDER_DIR%\build_files\windows\svn_update.cmd"
call "%BLENDER_DIR%\build_files\windows\lib_update.cmd"
)
REM Finally call the python script shared between all platforms that updates git
REM and does any other SVN work like update the tests or branch switches

@ -0,0 +1 @@
Subproject commit 38a8f38d98987efc9aebe878ca941d99756e914e

View File

@ -143,6 +143,11 @@ class DATA_PT_grease_pencil_layer_relations(LayerDataButtonsPanel, Panel):
row = layout.row(align=True)
row.prop_search(layer, "parent_bone", layer.parent.data, "bones", text="Bone")
layout.separator()
col = layout.row(align=True)
col.prop(layer, "pass_index")
classes = (
DATA_PT_context_grease_pencil,

View File

@ -19,7 +19,7 @@
namespace blender::asset_system::tests {
/* UUIDs from lib/tests/asset_library/blender_assets.cats.txt */
/* UUIDs from tests/data/asset_library/blender_assets.cats.txt */
const bUUID UUID_ID_WITHOUT_PATH("e34dd2c5-5d2e-4668-9794-1db5de2a4f71");
const bUUID UUID_POSES_ELLIE("df60e1f6-2259-475b-93d9-69a1b4a8db78");
const bUUID UUID_POSES_ELLIE_WHITESPACE("b06132f6-5687-4751-a6dd-392740eb3c46");
@ -31,7 +31,7 @@ const bUUID UUID_POSES_RUZENA_FACE("82162c1f-06cc-4d91-a9bf-4f72c104e348");
const bUUID UUID_WITHOUT_SIMPLENAME("d7916a31-6ca9-4909-955f-182ca2b81fa3");
const bUUID UUID_ANOTHER_RUZENA("00000000-d9fa-4b91-b704-e6af1f1339ef");
/* UUIDs from lib/tests/asset_library/modified_assets.cats.txt */
/* UUIDs from tests/data/asset_library/modified_assets.cats.txt */
const bUUID UUID_AGENT_47("c5744ba5-43f5-4f73-8e52-010ad4a61b34");
/* Subclass that adds accessors such that protected fields can be used in tests. */

View File

@ -176,7 +176,7 @@ enum {
BLENDER_SYSTEM_PYTHON = 54,
};
/* for BKE_appdir_folder_id_version only */
/** For #BKE_appdir_folder_id_version only. */
enum {
BLENDER_RESOURCE_PATH_USER = 0,
BLENDER_RESOURCE_PATH_LOCAL = 1,

View File

@ -481,6 +481,11 @@ inline const bNode *bNodeTree::group_output_node() const
return this->runtime->group_output_node;
}
inline blender::Span<bNode *> bNodeTree::group_input_nodes()
{
return this->nodes_by_type("NodeGroupInput");
}
inline blender::Span<const bNode *> bNodeTree::group_input_nodes() const
{
return this->nodes_by_type("NodeGroupInput");

View File

@ -103,6 +103,16 @@ void BKE_preferences_extension_repo_dirpath_get(const bUserExtensionRepo *repo,
bUserExtensionRepo *BKE_preferences_extension_repo_find_index(const UserDef *userdef, int index);
bUserExtensionRepo *BKE_preferences_extension_repo_find_by_module(const UserDef *userdef,
const char *module);
/**
* Using a full URL/remote path to find a repository that shares its prefix.
*/
bUserExtensionRepo *BKE_preferences_extension_repo_find_by_remote_path_prefix(
const UserDef *userdef, const char *remote_path_full, const bool only_enabled);
/**
* Skip the `https` or `http` part of a URL `https://`, return zero if none is found.
*/
int BKE_preferences_extension_repo_remote_scheme_end(const char *url);
int BKE_preferences_extension_repo_get_index(const UserDef *userdef,
const bUserExtensionRepo *repo);

View File

@ -139,8 +139,9 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
return nullptr;
}
const int expected_length = avs->path_ef - avs->path_sf;
BLI_assert(expected_length > 0); /* Because the `if` above. */
/* Adding 1 because the avs range is inclusive on both ends. */
const int expected_length = (avs->path_ef - avs->path_sf) + 1;
BLI_assert(expected_length > 1); /* Because the `if` above. */
/* If there is already a motionpath, just return that, provided its settings
* are ok (saves extra free+alloc). */
@ -157,7 +158,7 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
/* Only reuse a path if it was already a valid path, and of the expected length. */
if (mpath->start_frame != mpath->end_frame && mpath->length == expected_length) {
mpath->start_frame = avs->path_sf;
mpath->end_frame = avs->path_ef;
mpath->end_frame = avs->path_ef + 1;
return mpath;
}
@ -171,7 +172,7 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
/* Copy mpath settings from the viz settings. */
mpath->start_frame = avs->path_sf;
mpath->end_frame = avs->path_ef;
mpath->end_frame = avs->path_ef + 1;
mpath->length = expected_length;
if (avs->path_bakeflag & MOTIONPATH_BAKE_HEADS) {

View File

@ -45,7 +45,7 @@ IndexRange NodeBakeCache::frame_range() const
}
const int start_frame = this->frames.first()->frame.frame();
const int end_frame = this->frames.last()->frame.frame();
return {start_frame, end_frame - start_frame + 1};
return IndexRange::from_begin_end_inclusive(start_frame, end_frame);
}
SimulationNodeCache *ModifierCache::get_simulation_node_cache(const int id)

View File

@ -512,7 +512,7 @@ static void swap_old_bmain_data_for_blendfile(ReuseOldBMainData *reuse_data, con
std::swap(*new_lb, *old_lb);
/* TODO: Could add per-IDType control over namemaps clearing, if this becomes a performances
/* TODO: Could add per-IDType control over name-maps clearing, if this becomes a performances
* concern. */
BKE_main_namemap_clear(old_bmain);
BKE_main_namemap_clear(new_bmain);

View File

@ -6,6 +6,9 @@
* \ingroup bke
*/
#include <fmt/format.h>
#include "BKE_anim_data.h"
#include "BKE_attribute.hh"
#include "BKE_colorband.hh"
#include "BKE_colortools.hh"
@ -23,6 +26,7 @@
#include "BKE_object.hh"
#include "BLI_color.hh"
#include "BLI_function_ref.hh"
#include "BLI_listbase.h"
#include "BLI_math_vector_types.hh"
#include "BLI_string.h"
@ -31,16 +35,86 @@
#include "BLT_translation.hh"
#include "DNA_anim_types.h"
#include "DNA_gpencil_legacy_types.h"
#include "DNA_gpencil_modifier_types.h"
#include "DNA_grease_pencil_types.h"
#include "DNA_meshdata_types.h"
#include "DNA_modifier_types.h"
#include "DEG_depsgraph.hh"
#include "DEG_depsgraph_build.hh"
namespace blender::bke::greasepencil::convert {
/* -------------------------------------------------------------------- */
/** \name Animation conversion helpers.
*
* These utilities will call given callback over all relevant F-curves
* (also includes drivers, and actions linked through the NLA).
* \{ */
static bool legacy_fcurves_process(ListBase &fcurves,
blender::FunctionRef<bool(FCurve *fcurve)> callback)
{
bool is_changed = false;
LISTBASE_FOREACH (FCurve *, fcurve, &fcurves) {
const bool local_is_changed = callback(fcurve);
is_changed = is_changed || local_is_changed;
}
return is_changed;
}
static bool legacy_nla_strip_process(NlaStrip &nla_strip,
blender::FunctionRef<bool(FCurve *fcurve)> callback)
{
bool is_changed = false;
if (nla_strip.act) {
if (legacy_fcurves_process(nla_strip.act->curves, callback)) {
DEG_id_tag_update(&nla_strip.act->id, ID_RECALC_ANIMATION);
is_changed = true;
}
}
LISTBASE_FOREACH (NlaStrip *, nla_strip_children, &nla_strip.strips) {
const bool local_is_changed = legacy_nla_strip_process(*nla_strip_children, callback);
is_changed = is_changed || local_is_changed;
}
return is_changed;
}
static bool legacy_animation_process(AnimData &anim_data,
blender::FunctionRef<bool(FCurve *fcurve)> callback)
{
bool is_changed = false;
if (anim_data.action) {
if (legacy_fcurves_process(anim_data.action->curves, callback)) {
DEG_id_tag_update(&anim_data.action->id, ID_RECALC_ANIMATION);
is_changed = true;
}
}
if (anim_data.tmpact) {
if (legacy_fcurves_process(anim_data.tmpact->curves, callback)) {
DEG_id_tag_update(&anim_data.tmpact->id, ID_RECALC_ANIMATION);
is_changed = true;
}
}
{
const bool local_is_changed = legacy_fcurves_process(anim_data.drivers, callback);
is_changed = is_changed || local_is_changed;
}
LISTBASE_FOREACH (NlaTrack *, nla_track, &anim_data.nla_tracks) {
LISTBASE_FOREACH (NlaStrip *, nla_strip, &nla_track->strips) {
const bool local_is_changed = legacy_nla_strip_process(*nla_strip, callback);
is_changed = is_changed || local_is_changed;
}
}
return is_changed;
}
/* \} */
/**
* Find vertex groups that have assigned vertices in this drawing.
* Returns:
@ -384,6 +458,18 @@ void legacy_gpencil_to_grease_pencil(Main &bmain, GreasePencil &grease_pencil, b
/* TODO: Update drawing user counts. */
}
/* Second loop, to write to layer attributes after all layers were created. */
MutableAttributeAccessor layer_attributes = grease_pencil.attributes_for_write();
SpanAttributeWriter<int> layer_passes = layer_attributes.lookup_or_add_for_write_span<int>(
"pass_index", bke::AttrDomain::Layer);
layer_idx = 0;
LISTBASE_FOREACH_INDEX (bGPDlayer *, gpl, &gpd.layers, layer_idx) {
layer_passes.span[layer_idx] = int(gpl->pass_index);
}
layer_passes.finish();
/* Copy vertex group names and settings. */
BKE_defgroup_copy_list(&grease_pencil.vertex_group_names, &gpd.vertex_group_names);
grease_pencil.vertex_group_active_index = gpd.vertex_group_active_index;
@ -618,6 +704,33 @@ static ModifierData &legacy_object_modifier_common(Object &object,
/* Attempt to copy UI state (panels) as best as possible. */
new_md.ui_expand_flag = legacy_md.ui_expand_flag;
/* Convert animation data if needed. */
AnimData *anim_data = BKE_animdata_from_id(&object.id);
if (anim_data) {
auto modifier_path_update = [&](FCurve *fcurve) -> bool {
/* NOTE: This logic will likely need to be re-used in other similar conditions for other
* areas, should be put into its own util then. */
if (!fcurve->rna_path) {
return false;
}
StringRefNull rna_path = fcurve->rna_path;
const std::string legacy_root_path = fmt::format("grease_pencil_modifiers[\"{}\"]",
legacy_md.name);
if (!rna_path.startswith(legacy_root_path)) {
return false;
}
const std::string new_rna_path = fmt::format(
"modifiers[\"{}\"]{}", new_md.name, rna_path.substr(int64_t(legacy_root_path.size())));
MEM_freeN(fcurve->rna_path);
fcurve->rna_path = BLI_strdupn(new_rna_path.c_str(), new_rna_path.size());
return true;
};
if (legacy_animation_process(*anim_data, modifier_path_update)) {
DEG_id_tag_update(&object.id, ID_RECALC_ANIMATION);
}
}
return new_md;
}

View File

@ -597,7 +597,7 @@ static bool main_namemap_validate_and_fix(Main *bmain, const bool do_fix)
return is_valid;
}
/* Clear all existing namemaps. */
/* Clear all existing name-maps. */
BKE_main_namemap_clear(bmain);
return is_valid;

View File

@ -902,6 +902,58 @@ class NodeTreeMainUpdater {
}
}
/* Find conflicts between on corresponding menu sockets on different group input nodes. */
const Span<bNode *> group_input_nodes = ntree.group_input_nodes();
for (const int interface_input_i : ntree.interface_inputs().index_range()) {
const bNodeTreeInterfaceSocket &interface_socket =
*ntree.interface_inputs()[interface_input_i];
if (interface_socket.socket_type != StringRef("NodeSocketMenu")) {
continue;
}
const RuntimeNodeEnumItems *found_enum_items = nullptr;
bool found_conflict = false;
for (bNode *input_node : group_input_nodes) {
const bNodeSocket &socket = input_node->output_socket(interface_input_i);
const auto &socket_value = *socket.default_value_typed<bNodeSocketValueMenu>();
if (socket_value.has_conflict()) {
found_conflict = true;
break;
}
if (found_enum_items == nullptr) {
found_enum_items = socket_value.enum_items;
}
else if (socket_value.enum_items != nullptr) {
if (found_enum_items != socket_value.enum_items) {
found_conflict = true;
break;
}
}
}
if (found_conflict) {
/* Make sure that all group input sockets know that there is a socket. */
for (bNode *input_node : group_input_nodes) {
bNodeSocket &socket = input_node->output_socket(interface_input_i);
auto &socket_value = *socket.default_value_typed<bNodeSocketValueMenu>();
if (socket_value.enum_items) {
socket_value.enum_items->remove_user_and_delete_if_last();
socket_value.enum_items = nullptr;
}
socket_value.runtime_flag |= NodeSocketValueMenuRuntimeFlag::NODE_MENU_ITEMS_CONFLICT;
}
}
else if (found_enum_items != nullptr) {
/* Make sure all corresponding menu sockets have the same menu reference. */
for (bNode *input_node : group_input_nodes) {
bNodeSocket &socket = input_node->output_socket(interface_input_i);
auto &socket_value = *socket.default_value_typed<bNodeSocketValueMenu>();
if (socket_value.enum_items == nullptr) {
found_enum_items->add_user();
socket_value.enum_items = found_enum_items;
}
}
}
}
/* Build list of new enum items for the node tree interface. */
Vector<bNodeSocketValueMenu> interface_enum_items(ntree.interface_inputs().size(), {0});
for (const bNode *group_input_node : ntree.group_input_nodes()) {

View File

@ -278,6 +278,82 @@ bUserExtensionRepo *BKE_preferences_extension_repo_find_by_module(const UserDef
BLI_findstring(&userdef->extension_repos, module, offsetof(bUserExtensionRepo, module)));
}
bUserExtensionRepo *BKE_preferences_extension_repo_find_by_remote_path_prefix(
const UserDef *userdef, const char *path_full, const bool only_enabled)
{
const int path_full_len = strlen(path_full);
const int path_full_offset = BKE_preferences_extension_repo_remote_scheme_end(path_full);
LISTBASE_FOREACH (bUserExtensionRepo *, repo, &userdef->extension_repos) {
if (only_enabled && (repo->flag & USER_EXTENSION_REPO_FLAG_DISABLED)) {
continue;
}
/* Has a valid remote path to check. */
if ((repo->flag & USER_EXTENSION_REPO_FLAG_USE_REMOTE_PATH) == 0) {
continue;
}
if (repo->remote_path[0] == '\0') {
continue;
}
/* Set path variables which may be offset by the "scheme". */
const char *path_repo = repo->remote_path;
const char *path_test = path_full;
int path_test_len = path_full_len;
/* Allow paths beginning with both `http` & `https` to be considered equivalent.
* This is done by skipping the "scheme" prefix both have a scheme. */
if (path_full_offset) {
const int path_repo_offset = BKE_preferences_extension_repo_remote_scheme_end(path_repo);
if (path_repo_offset) {
path_repo += path_repo_offset;
path_test += path_full_offset;
path_test_len -= path_full_offset;
}
}
/* The length of the path without trailing slashes. */
int path_repo_len = strlen(path_repo);
while (path_repo_len && ELEM(path_repo[path_repo_len - 1], '/', '\\')) {
path_repo_len--;
}
if (path_test_len <= path_repo_len) {
continue;
}
if (memcmp(path_repo, path_test, path_repo_len) != 0) {
continue;
}
/* A delimiter must follow to ensure `path_test` doesn't reference a longer host-name.
* Will typically be a `/` or a `:`. */
if (!ELEM(path_test[path_repo_len], '/', '\\', ':', '&', '?')) {
continue;
}
return repo;
}
return nullptr;
}
int BKE_preferences_extension_repo_remote_scheme_end(const char *url)
{
/* Technically the "://" are not part of the scheme, so subtract 3 from the return value. */
const char *scheme_check[] = {
"http://",
"https://",
"file://",
};
for (int i = 0; i < ARRAY_SIZE(scheme_check); i++) {
const char *scheme = scheme_check[i];
int scheme_len = strlen(scheme);
if (strncmp(url, scheme, scheme_len) == 0) {
return scheme_len - 3;
}
}
return 0;
}
int BKE_preferences_extension_repo_get_index(const UserDef *userdef,
const bUserExtensionRepo *repo)
{

View File

@ -261,7 +261,7 @@ template<typename T> inline Vector<IndexRange> find_all_ranges(const Span<T> spa
int64_t length = (span.first() == value) ? 1 : 0;
for (const int64_t i : span.index_range().drop_front(1)) {
if (span[i - 1] == value && span[i] != value) {
ranges.append(IndexRange(i - length, length));
ranges.append(IndexRange::from_end_size(i, length));
length = 0;
}
else if (span[i] == value) {
@ -269,7 +269,7 @@ template<typename T> inline Vector<IndexRange> find_all_ranges(const Span<T> spa
}
}
if (length > 0) {
ranges.append(IndexRange(span.size() - length, length));
ranges.append(IndexRange::from_end_size(span.size(), length));
}
return ranges;
}

View File

@ -236,7 +236,7 @@ class BitVector {
IndexRange index_range() const
{
return {0, size_in_bits_};
return IndexRange(size_in_bits_);
}
/**
@ -283,7 +283,7 @@ class BitVector {
}
size_in_bits_ = new_size_in_bits;
if (old_size_in_bits < new_size_in_bits) {
MutableBitSpan(data_, IndexRange(old_size_in_bits, new_size_in_bits - old_size_in_bits))
MutableBitSpan(data_, IndexRange::from_begin_end(old_size_in_bits, new_size_in_bits))
.set_all(value);
}
}

View File

@ -116,7 +116,19 @@ class IndexMaskMemory : public LinearAllocator<> {
}
};
using IndexMaskSegment = OffsetSpan<int64_t, int16_t>;
/**
* A sequence of unique and ordered indices in one segment of an IndexMask. The segment as a whole
* has an `int64_t` index offset that is added to each referenced `int16_t` index.
*/
class IndexMaskSegment : public OffsetSpan<int64_t, int16_t> {
public:
using OffsetSpan<int64_t, int16_t>::OffsetSpan;
explicit IndexMaskSegment(const OffsetSpan<int64_t, int16_t> span);
IndexMaskSegment slice(const IndexRange &range) const;
IndexMaskSegment slice(const int64_t start, const int64_t size) const;
};
/**
* An #IndexMask is a sequence of unique and sorted indices (`BLI_unique_sorted_indices.hh`).
@ -536,6 +548,26 @@ inline bool operator==(const RawMaskIterator &a, const RawMaskIterator &b)
return !(a != b);
}
/* -------------------------------------------------------------------- */
/** \name #IndexMaskSegment Inline Methods
* \{ */
inline IndexMaskSegment::IndexMaskSegment(const OffsetSpan<int64_t, int16_t> span)
: OffsetSpan<int64_t, int16_t>(span)
{
}
inline IndexMaskSegment IndexMaskSegment::slice(const IndexRange &range) const
{
return IndexMaskSegment(static_cast<const OffsetSpan<int64_t, int16_t> *>(this)->slice(range));
}
inline IndexMaskSegment IndexMaskSegment::slice(const int64_t start, const int64_t size) const
{
return IndexMaskSegment(
static_cast<const OffsetSpan<int64_t, int16_t> *>(this)->slice(start, size));
}
/* -------------------------------------------------------------------- */
/** \name #IndexMask Inline Methods
* \{ */
@ -612,8 +644,7 @@ inline IndexRange IndexMask::bounds() const
}
const int64_t first = this->first();
const int64_t last = this->last();
const int64_t range = last - first + 1;
return IndexRange(first, range);
return IndexRange::from_begin_end_inclusive(first, last);
}
inline int64_t IndexMask::first() const

View File

@ -60,12 +60,37 @@ class IndexRange {
BLI_assert(size >= 0);
}
constexpr IndexRange(int64_t start, int64_t size) : start_(start), size_(size)
constexpr IndexRange(const int64_t start, const int64_t size) : start_(start), size_(size)
{
BLI_assert(start >= 0);
BLI_assert(size >= 0);
}
constexpr static IndexRange from_begin_size(const int64_t begin, const int64_t size)
{
return IndexRange(begin, size);
}
constexpr static IndexRange from_begin_end(const int64_t begin, const int64_t end)
{
return IndexRange(begin, end - begin);
}
constexpr static IndexRange from_begin_end_inclusive(const int64_t begin, const int64_t last)
{
return IndexRange(begin, last - begin + 1);
}
constexpr static IndexRange from_end_size(const int64_t end, const int64_t size)
{
return IndexRange(end - size, size);
}
constexpr static IndexRange from_single(const int64_t index)
{
return IndexRange(index, 1);
}
class Iterator : public iterator::RandomAccessIteratorMixin<Iterator> {
public:
using value_type = int64_t;

View File

@ -66,16 +66,14 @@ template<typename T> class OffsetIndices {
BLI_assert(index < offsets_.size() - 1);
const int64_t begin = offsets_[index];
const int64_t end = offsets_[index + 1];
const int64_t size = end - begin;
return IndexRange(begin, size);
return IndexRange::from_begin_end(begin, end);
}
IndexRange operator[](const IndexRange indices) const
{
const int64_t begin = offsets_[indices.start()];
const int64_t end = offsets_[indices.one_after_last()];
const int64_t size = end - begin;
return IndexRange(begin, size);
return IndexRange::from_begin_end(begin, end);
}
/**

View File

@ -101,7 +101,7 @@ inline IndexRange align_sub_range(const IndexRange unaligned_range,
const int64_t aligned_end = unaligned_end == global_end ?
unaligned_end :
std::max(global_begin, unaligned_end & alignment_mask);
const IndexRange aligned_range{aligned_begin, aligned_end - aligned_begin};
const IndexRange aligned_range = IndexRange::from_begin_end(aligned_begin, aligned_end);
return aligned_range;
}

View File

@ -480,7 +480,18 @@ int BLI_rename(const char *from, const char *to)
#elif defined(__GLIBC_PREREQ)
# if __GLIBC_PREREQ(2, 28)
/* Most common Linux cases. */
return renameat2(AT_FDCWD, from, AT_FDCWD, to, RENAME_NOREPLACE);
int ret = renameat2(AT_FDCWD, from, AT_FDCWD, to, RENAME_NOREPLACE);
if (ret < 0 && errno == EINVAL) {
/* Most likely a filesystem that doesn't support RENAME_NOREPLACE.
* (For example NFS, Samba, exFAT, NTFS, etc)
* Retry with a non atomic operation.
*/
if (BLI_exists(to)) {
return 1;
}
return rename(from, to);
}
return ret;
# endif
#else
/* At least all BSD's currently. */

View File

@ -286,7 +286,7 @@ int BLI_hash_md5_stream(FILE *stream, void *resblock)
len[1] = 0;
/* Iterate over full file contents. */
while (1) {
while (true) {
/* We read the file in blocks of BLOCKSIZE bytes.
* One call of the computation function processes the whole buffer
* so that with the next round of the loop another block can be read.

View File

@ -235,7 +235,7 @@ static void consolidate_segments(Vector<IndexMaskSegment, 16> &segments,
return;
}
/* Join multiple ranges together into a bigger range. */
const IndexRange range{group_first, group_last + 1 - group_first};
const IndexRange range = IndexRange::from_begin_end_inclusive(group_first, group_last);
segments[group_start_segment_i] = IndexMaskSegment(range[0],
static_indices.take_front(range.size()));
for (int64_t i = group_start_segment_i + 1; i <= last_segment_i; i++) {
@ -500,16 +500,12 @@ IndexMask IndexMask::complement(const IndexRange universe, IndexMaskMemory &memo
if (first_in_range) {
/* This mask is a range that contains the start of the universe.
* The complement is a range that contains the end of the universe. */
const int64_t complement_start = this_range->one_after_last();
const int64_t complement_size = universe.one_after_last() - complement_start;
return IndexRange(complement_start, complement_size);
return IndexRange::from_begin_end(this_range->one_after_last(), universe.one_after_last());
}
if (last_in_range) {
/* This mask is a range that contains the end of the universe.
* The complement is a range that contains the start of the universe. */
const int64_t complement_start = universe.first();
const int64_t complement_size = this_range->first() - complement_start;
return IndexRange(complement_start, complement_size);
return IndexRange::from_begin_end(universe.first(), this_range->first());
}
}

View File

@ -24,7 +24,7 @@ TEST(index_mask, IndicesToMask)
EXPECT_EQ(mask.first(), 5);
EXPECT_EQ(mask.last(), 101000);
EXPECT_EQ(mask.min_array_size(), 101001);
EXPECT_EQ(mask.bounds(), IndexRange(5, 101001 - 5));
EXPECT_EQ(mask.bounds(), IndexRange::from_begin_end_inclusive(5, 101000));
}
TEST(index_mask, FromBits)

View File

@ -23,6 +23,86 @@ TEST(index_range, DefaultConstructor)
EXPECT_EQ(vector.size(), 0);
}
TEST(index_range, FromBeginSize)
{
{
const IndexRange range = IndexRange::from_begin_size(0, 0);
EXPECT_TRUE(range.is_empty());
}
{
const IndexRange range = IndexRange::from_begin_size(0, 10);
EXPECT_EQ(range.size(), 10);
EXPECT_EQ(range.first(), 0);
}
{
const IndexRange range = IndexRange::from_begin_size(4, 10);
EXPECT_EQ(range.size(), 10);
EXPECT_EQ(range.first(), 4);
EXPECT_EQ(range.last(), 13);
}
}
TEST(index_range, FromBeginEnd)
{
{
const IndexRange range = IndexRange::from_begin_end(0, 0);
EXPECT_TRUE(range.is_empty());
}
{
const IndexRange range = IndexRange::from_begin_end(0, 10);
EXPECT_EQ(range.size(), 10);
EXPECT_EQ(range.first(), 0);
}
{
const IndexRange range = IndexRange::from_begin_end(4, 10);
EXPECT_EQ(range.size(), 6);
EXPECT_EQ(range.first(), 4);
EXPECT_EQ(range.last(), 9);
}
}
TEST(index_range, FromBeginEndInclusive)
{
{
const IndexRange range = IndexRange::from_begin_end_inclusive(0, 0);
EXPECT_EQ(range.size(), 1);
EXPECT_EQ(range.first(), 0);
}
{
const IndexRange range = IndexRange::from_begin_end_inclusive(100, 200);
EXPECT_EQ(range.size(), 101);
EXPECT_EQ(range.first(), 100);
EXPECT_EQ(range.last(), 200);
}
}
TEST(index_range, FromEndSize)
{
{
const IndexRange range = IndexRange::from_end_size(0, 0);
EXPECT_TRUE(range.is_empty());
}
{
const IndexRange range = IndexRange::from_end_size(100, 20);
EXPECT_EQ(range.first(), 80);
EXPECT_EQ(range.last(), 99);
}
}
TEST(index_range, FromSingle)
{
{
const IndexRange range = IndexRange::from_single(0);
EXPECT_EQ(range.size(), 1);
EXPECT_EQ(range.first(), 0);
}
{
const IndexRange range = IndexRange::from_single(100);
EXPECT_EQ(range.size(), 1);
EXPECT_EQ(range.first(), 100);
}
}
TEST(index_range, SingleElementRange)
{
IndexRange range(4, 1);

View File

@ -9,7 +9,7 @@ class BlendfileLoadingTest : public BlendfileLoadingBaseTest {};
TEST_F(BlendfileLoadingTest, CanaryTest)
{
/* Load the smallest blend file we have in the SVN lib/tests directory. */
/* Load the smallest blend file we have in the tests/data directory. */
if (!blendfile_load("modifier_stack" SEP_STR "array_test.blend")) {
return;
}

View File

@ -25,10 +25,10 @@ class BlendfileLoadingBaseTest : public testing::Test {
/* Frees the depsgraph & blendfile. */
virtual void TearDown();
/* Loads a blend file from the lib/tests directory from SVN.
/* Loads a blend file from the tests/data directory from SVN.
* Returns 'ok' flag (true=good, false=bad) and sets this->bfile.
* Fails the test if the file cannot be loaded (still returns though).
* Requires the CLI argument --test-asset-dir to point to ../../lib/tests.
* Requires the CLI argument --test-asset-dir to point to ../tests/data.
*
* WARNING: only files saved with Blender 2.80+ can be loaded. Since Blender
* is only partially initialized (most importantly, without window manager),

View File

@ -32,7 +32,7 @@ void HueSaturationValueCorrectOperation::execute_pixel_sampled(float output[4],
input_program_->read_sampled(hsv, x, y, sampler);
/* We parametrize the curve using the hue value. */
/* We parameterize the curve using the hue value. */
const float parameter = hsv[0];
/* adjust hue, scaling returned default 0.5 up to 1 */
@ -70,7 +70,7 @@ void HueSaturationValueCorrectOperation::update_memory_buffer_partial(MemoryBuff
for (BuffersIterator<float> it = output->iterate_with(inputs, area); !it.is_end(); ++it) {
copy_v4_v4(hsv, it.in(0));
/* We parametrize the curve using the hue value. */
/* We parameterize the curve using the hue value. */
const float parameter = hsv[0];
/* Adjust hue, scaling returned default 0.5 up to 1. */

View File

@ -27,7 +27,7 @@ float roughness_from_relative_mip(float prev_mip_roughness, float curr_mip_rough
float m_prev = pow(prev_mip_roughness, exponent);
float m_curr = pow(curr_mip_roughness, exponent);
/* Given that spherical gaussians are very close to regular gaussian in 1D,
* we reuse the same rule for successive convolution (i.e: G`(x,a) X G(x,b) = G(x,a+b)`).
* we reuse the same rule for successive convolution (i.e: `G(x,a) X G(x,b) = G(x,a+b)`).
* While this isn't technically correct, this still works quite well in practice. */
float m_target = m_curr - m_prev;
/* From GGX roughness input to linear roughness. */

View File

@ -93,7 +93,7 @@ static void motion_path_get_frame_range_to_draw(bAnimVizSettings *avs,
}
else {
start = avs->path_sf;
end = avs->path_ef;
end = avs->path_ef + 1;
}
if (start > end) {

View File

@ -3044,9 +3044,9 @@ static void knife_find_line_hits(KnifeTool_OpData *kcd)
if (!(d1 <= line_tol || d2 <= line_tol || fabsf(d1 - d2) <= line_tol)) {
float3 r1, r2;
float3 p_cage_dummy;
/* Can't just interpolate between ends of kfe because
/* Can't just interpolate between ends of `kfe` because
* that doesn't work with perspective transformation.
* Need to find 3d intersection of ray through sint. */
* Need to find 3d intersection of ray through `p_cage_ss`. */
knife_input_ray_segment(kcd, p_cage_ss, r1, r2);
isect_kind = isect_line_line_v3(

View File

@ -821,26 +821,28 @@ static void renamebutton_cb(bContext *C, void * /*arg1*/, char *oldname)
BLI_path_join(newname, sizeof(newname), params->dir, filename);
if (!STREQ(orgname, newname)) {
if (!BLI_exists(newname)) {
errno = 0;
if ((BLI_rename(orgname, newname) != 0) || !BLI_exists(newname)) {
WM_reportf(RPT_ERROR, "Could not rename: %s", errno ? strerror(errno) : "unknown error");
WM_report_banner_show(wm, win);
}
else {
/* If rename is successful, scroll to newly renamed entry. */
STRNCPY(params->renamefile, filename);
file_params_invoke_rename_postscroll(wm, win, sfile);
}
/* to make sure we show what is on disk */
ED_fileselect_clear(wm, sfile);
}
else {
errno = 0;
if ((BLI_rename(orgname, newname) != 0) || !BLI_exists(newname)) {
WM_reportf(RPT_ERROR, "Could not rename: %s", errno ? strerror(errno) : "unknown error");
WM_report_banner_show(wm, win);
/* Renaming failed, reset the name for further renaming handling. */
STRNCPY(params->renamefile, oldname);
}
else {
/* If rename is successful, set renamefile to newly renamed entry.
* This is used later to select and scroll to the file.
*/
STRNCPY(params->renamefile, filename);
}
/* Ensure we select and scroll to the renamed file.
* This is done even if the rename fails as we want to make sure that the file we tried to
* rename is still selected and in view. (it can move if something added files/folders to the
* directory while we were renaming.
*/
file_params_invoke_rename_postscroll(wm, win, sfile);
/* to make sure we show what is on disk */
ED_fileselect_clear(wm, sfile);
ED_region_tag_redraw(region);
}
}

View File

@ -786,11 +786,16 @@ static bool drop_extension_url_poll(bContext * /*C*/, wmDrag *drag, const wmEven
return false;
}
/* NOTE(@ideasman42): it should be possible to drag a URL into the text editor or Python console.
* In the future we may support dragging images into Blender by URL, so treating any single-line
* URL as an extension could back-fire. Avoid problems in the future by limiting the text which
* is accepted as an extension to ZIP's or URL's that reference known repositories. */
const std::string &str = WM_drag_get_string(drag);
/* Only URL formatted text. */
const char *cstr = str.c_str();
if (!(STRPREFIX(cstr, "http://") || STRPREFIX(cstr, "https://") || STRPREFIX(cstr, "file://"))) {
if (BKE_preferences_extension_repo_remote_scheme_end(cstr) == 0) {
return false;
}
@ -798,8 +803,13 @@ static bool drop_extension_url_poll(bContext * /*C*/, wmDrag *drag, const wmEven
if (str.find('\n') != std::string::npos) {
return false;
}
const char *cstr_ext = BLI_path_extension(cstr);
if (!(cstr_ext && STRCASEEQ(cstr_ext, ".zip"))) {
/* Check the URL has a `.zip` suffix OR has a known repository as a prefix.
* This is needed to support redirects which don't contain an extension. */
if (!(cstr_ext && STRCASEEQ(cstr_ext, ".zip")) &&
!(BKE_preferences_extension_repo_find_by_remote_path_prefix(&U, cstr, true)))
{
return false;
}

View File

@ -140,6 +140,9 @@ void VKVertexBuffer::upload_data_direct(const VKBuffer &host_buffer)
{
device_format_ensure();
if (vertex_format_converter.needs_conversion()) {
if (G.debug & G_DEBUG_GPU) {
std::cout << "PERFORMANCE: Vertex buffer requires conversion.\n";
}
vertex_format_converter.convert(host_buffer.mapped_memory_get(), data, vertex_len);
host_buffer.flush();
}

View File

@ -333,7 +333,6 @@ void IMB_close_anim(ImBufAnim *anim);
void IMB_close_anim_proxies(ImBufAnim *anim);
bool IMB_anim_can_produce_frames(const ImBufAnim *anim);
int ismovie(const char *filepath);
int IMB_anim_get_image_width(ImBufAnim *anim);
int IMB_anim_get_image_height(ImBufAnim *anim);
bool IMB_get_gop_decode_time(ImBufAnim *anim);
@ -397,12 +396,12 @@ bool IMB_ispic_type_matches(const char *filepath, int filetype);
int IMB_ispic_type_from_memory(const unsigned char *buf, size_t buf_size);
int IMB_ispic_type(const char *filepath);
enum class ImbAnimType { NotAnim, Sequence, Movie, Ffmpeg };
/**
* Test if the file is a video file (known format, has a video stream and
* supported video codec).
*/
bool IMB_isanim(const char *filepath);
ImbAnimType imb_get_anim_type(const char *filepath);
/**
* Test if color-space conversions of pixels in buffer need to take into account alpha.
*/

View File

@ -8,36 +8,26 @@
#pragma once
#include <ctype.h>
#include <stdio.h>
#include <stdlib.h>
#include <sys/types.h>
#include <cstdint>
#ifdef _WIN32
# include <io.h>
#else
# include <dirent.h>
#endif
#include "imbuf.hh"
#include "IMB_imbuf.hh"
#include "IMB_imbuf_types.hh"
#include "IMB_imbuf_enums.h"
#ifdef WITH_FFMPEG
extern "C" {
# include <libavcodec/avcodec.h>
# include <libavformat/avformat.h>
# include <libswscale/swscale.h>
}
struct AVFormatContext;
struct AVCodecContext;
struct AVCodec;
struct AVFrame;
struct AVPacket;
struct SwsContext;
#endif
struct IDProperty;
struct ImBufAnimIndex;
struct ImBufAnim {
enum class State { Uninitialized, Failed, Valid };
int ib_flags;
ImbAnimType curtype;
State state;
int cur_position; /* index 0 = 1e, 1 = 2e, enz. */
int duration_in_frames;
int frs_sec;
@ -47,16 +37,7 @@ struct ImBufAnim {
/* for number */
char filepath[1024];
/* for sequence */
char filepath_first[1024];
/* movie */
void *movie;
void *track;
void *params;
int orientation;
size_t framesize;
int interlacing;
int streamindex;
#ifdef WITH_FFMPEG
@ -65,7 +46,7 @@ struct ImBufAnim {
const AVCodec *pCodec;
AVFrame *pFrameRGB;
AVFrame *pFrameDeinterlaced;
struct SwsContext *img_convert_ctx;
SwsContext *img_convert_ctx;
int videoStream;
AVFrame *pFrame;
@ -85,11 +66,11 @@ struct ImBufAnim {
int proxies_tried;
int indices_tried;
struct ImBufAnim *proxy_anim[IMB_PROXY_MAX_SLOT];
struct ImBufAnimIndex *curr_idx[IMB_TC_MAX_SLOT];
ImBufAnim *proxy_anim[IMB_PROXY_MAX_SLOT];
ImBufAnimIndex *curr_idx[IMB_TC_MAX_SLOT];
char colorspace[64];
char suffix[64]; /* MAX_NAME - multiview */
struct IDProperty *metadata;
IDProperty *metadata;
};

View File

@ -52,25 +52,6 @@ extern "C" {
#endif /* WITH_FFMPEG */
int ismovie(const char * /*filepath*/)
{
return 0;
}
/* never called, just keep the linker happy */
static int startmovie(ImBufAnim * /*anim*/)
{
return 1;
}
static ImBuf *movie_fetchibuf(ImBufAnim * /*anim*/, int /*position*/)
{
return nullptr;
}
static void free_anim_movie(ImBufAnim * /*anim*/)
{
/* pass */
}
#ifdef WITH_FFMPEG
static void free_anim_ffmpeg(ImBufAnim *anim);
#endif
@ -82,8 +63,6 @@ void IMB_free_anim(ImBufAnim *anim)
return;
}
free_anim_movie(anim);
#ifdef WITH_FFMPEG
free_anim_ffmpeg(anim);
#endif
@ -113,34 +92,23 @@ void IMB_close_anim_proxies(ImBufAnim *anim)
IDProperty *IMB_anim_load_metadata(ImBufAnim *anim)
{
switch (anim->curtype) {
case ImbAnimType::Ffmpeg: {
if (anim->state == ImBufAnim::State::Valid) {
#ifdef WITH_FFMPEG
AVDictionaryEntry *entry = nullptr;
BLI_assert(anim->pFormatCtx != nullptr);
av_log(anim->pFormatCtx, AV_LOG_DEBUG, "METADATA FETCH\n");
BLI_assert(anim->pFormatCtx != nullptr);
av_log(anim->pFormatCtx, AV_LOG_DEBUG, "METADATA FETCH\n");
while (true) {
entry = av_dict_get(anim->pFormatCtx->metadata, "", entry, AV_DICT_IGNORE_SUFFIX);
if (entry == nullptr) {
break;
}
/* Delay creation of the property group until there is actual metadata to put in there. */
IMB_metadata_ensure(&anim->metadata);
IMB_metadata_set_field(anim->metadata, entry->key, entry->value);
AVDictionaryEntry *entry = nullptr;
while (true) {
entry = av_dict_get(anim->pFormatCtx->metadata, "", entry, AV_DICT_IGNORE_SUFFIX);
if (entry == nullptr) {
break;
}
#endif
break;
/* Delay creation of the property group until there is actual metadata to put in there. */
IMB_metadata_ensure(&anim->metadata);
IMB_metadata_set_field(anim->metadata, entry->key, entry->value);
}
case ImbAnimType::Sequence:
case ImbAnimType::Movie:
/* TODO */
break;
case ImbAnimType::NotAnim:
default:
break;
#endif
}
return anim->metadata;
}
@ -376,8 +344,6 @@ static int startffmpeg(ImBufAnim *anim)
* starts. */
anim->start_offset = video_start;
anim->params = nullptr;
anim->x = pCodecCtx->width;
anim->y = pCodecCtx->height;
@ -386,10 +352,6 @@ static int startffmpeg(ImBufAnim *anim)
anim->pCodec = pCodec;
anim->videoStream = video_stream_index;
anim->interlacing = 0;
anim->orientation = 0;
anim->framesize = anim->x * anim->y * 4;
anim->cur_position = 0;
anim->cur_pts = -1;
anim->cur_key_frame_pts = -1;
@ -1234,48 +1196,21 @@ static void free_anim_ffmpeg(ImBufAnim *anim)
*/
static bool anim_getnew(ImBufAnim *anim)
{
BLI_assert(anim->curtype == ImbAnimType::NotAnim);
if (anim == nullptr) {
/* Nothing to initialize. */
return false;
}
free_anim_movie(anim);
BLI_assert(anim->state == ImBufAnim::State::Uninitialized);
#ifdef WITH_FFMPEG
free_anim_ffmpeg(anim);
#endif
anim->curtype = imb_get_anim_type(anim->filepath);
switch (anim->curtype) {
case ImbAnimType::Sequence: {
ImBuf *ibuf = IMB_loadiffname(anim->filepath, anim->ib_flags, anim->colorspace);
if (ibuf) {
STRNCPY(anim->filepath_first, anim->filepath);
anim->duration_in_frames = 1;
IMB_freeImBuf(ibuf);
}
else {
return false;
}
break;
}
case ImbAnimType::Movie:
if (startmovie(anim)) {
return false;
}
break;
#ifdef WITH_FFMPEG
case ImbAnimType::Ffmpeg:
if (startffmpeg(anim)) {
return false;
}
break;
#endif
default:
break;
if (startffmpeg(anim)) {
anim->state = ImBufAnim::State::Failed;
return false;
}
#endif
anim->state = ImBufAnim::State::Valid;
return true;
}
@ -1300,7 +1235,7 @@ ImBuf *IMB_anim_previewframe(ImBufAnim *anim)
IMB_metadata_set_field(ibuf->metadata, "Thumb::Video::Frames", value);
#ifdef WITH_FFMPEG
if (anim->pFormatCtx && anim->curtype == ImbAnimType::Ffmpeg) {
if (anim->pFormatCtx) {
AVStream *v_st = anim->pFormatCtx->streams[anim->videoStream];
AVRational frame_rate = av_guess_frame_rate(anim->pFormatCtx, v_st, nullptr);
if (frame_rate.num != 0) {
@ -1323,15 +1258,12 @@ ImBuf *IMB_anim_absolute(ImBufAnim *anim,
IMB_Proxy_Size preview_size)
{
ImBuf *ibuf = nullptr;
int filter_y;
if (anim == nullptr) {
return nullptr;
}
filter_y = (anim->ib_flags & IB_animdeinterlace);
if (preview_size == IMB_PROXY_NONE) {
if (anim->curtype == ImbAnimType::NotAnim) {
if (anim->state == ImBufAnim::State::Uninitialized) {
if (!anim_getnew(anim)) {
return nullptr;
}
@ -1354,45 +1286,16 @@ ImBuf *IMB_anim_absolute(ImBufAnim *anim,
}
}
switch (anim->curtype) {
case ImbAnimType::Sequence: {
constexpr size_t filepath_size = BOUNDED_ARRAY_TYPE_SIZE<decltype(anim->filepath_first)>();
char head[filepath_size], tail[filepath_size];
ushort digits;
const int pic = BLI_path_sequence_decode(
anim->filepath_first, head, sizeof(head), tail, sizeof(tail), &digits) +
position;
BLI_path_sequence_encode(anim->filepath, sizeof(anim->filepath), head, tail, digits, pic);
ibuf = IMB_loadiffname(anim->filepath, IB_rect, anim->colorspace);
if (ibuf) {
anim->cur_position = position;
}
break;
}
case ImbAnimType::Movie:
ibuf = movie_fetchibuf(anim, position);
if (ibuf) {
anim->cur_position = position;
IMB_convert_rgba_to_abgr(ibuf);
}
break;
#ifdef WITH_FFMPEG
case ImbAnimType::Ffmpeg:
ibuf = ffmpeg_fetchibuf(anim, position, tc);
if (ibuf) {
anim->cur_position = position;
}
filter_y = 0; /* done internally */
break;
#endif
default:
break;
if (anim->state == ImBufAnim::State::Valid) {
ibuf = ffmpeg_fetchibuf(anim, position, tc);
if (ibuf) {
anim->cur_position = position;
}
}
#endif
if (ibuf) {
if (filter_y) {
IMB_filtery(ibuf);
}
SNPRINTF(ibuf->filepath, "%s.%04d", anim->filepath, anim->cur_position + 1);
}
return ibuf;

View File

@ -458,9 +458,7 @@ static void get_tc_filepath(ImBufAnim *anim, IMB_Timecode_Type tc, char *filepat
* - common rebuilder structures
* ---------------------------------------------------------------------- */
struct IndexBuildContext {
ImbAnimType anim_type;
};
struct IndexBuildContext {};
/* ----------------------------------------------------------------------
* - ffmpeg rebuilder
@ -786,8 +784,7 @@ static void free_proxy_output_ffmpeg(proxy_output_ctx *ctx, int rollback)
MEM_freeN(ctx);
}
struct FFmpegIndexBuilderContext {
int anim_type;
struct FFmpegIndexBuilderContext : public IndexBuildContext {
AVFormatContext *iFormatCtx;
AVCodecContext *iCodecCtx;
@ -1248,7 +1245,6 @@ IndexBuildContext *IMB_anim_index_rebuild_context(ImBufAnim *anim,
GSet *file_list,
bool build_only_on_bad_performance)
{
IndexBuildContext *context = nullptr;
int proxy_sizes_to_build = proxy_sizes_in_use;
int i;
@ -1297,24 +1293,16 @@ IndexBuildContext *IMB_anim_index_rebuild_context(ImBufAnim *anim,
return nullptr;
}
switch (anim->curtype) {
IndexBuildContext *context = nullptr;
#ifdef WITH_FFMPEG
case ImbAnimType::Ffmpeg:
context = index_ffmpeg_create_context(
anim, tcs_in_use, proxy_sizes_to_build, quality, build_only_on_bad_performance);
break;
if (anim->state == ImBufAnim::State::Valid) {
context = index_ffmpeg_create_context(
anim, tcs_in_use, proxy_sizes_to_build, quality, build_only_on_bad_performance);
}
#else
UNUSED_VARS(build_only_on_bad_performance);
UNUSED_VARS(build_only_on_bad_performance);
#endif
default:
break;
}
if (context) {
context->anim_type = anim->curtype;
}
return context;
UNUSED_VARS(tcs_in_use, proxy_sizes_in_use, quality);
@ -1328,35 +1316,25 @@ void IMB_anim_index_rebuild(IndexBuildContext *context,
/* NOLINTNEXTLINE: readability-non-const-parameter. */
float *progress)
{
switch (context->anim_type) {
#ifdef WITH_FFMPEG
case ImbAnimType::Ffmpeg:
if (indexer_need_to_build_proxy((FFmpegIndexBuilderContext *)context)) {
index_rebuild_ffmpeg((FFmpegIndexBuilderContext *)context, stop, do_update, progress);
}
break;
#endif
default:
break;
if (context != nullptr) {
if (indexer_need_to_build_proxy((FFmpegIndexBuilderContext *)context)) {
index_rebuild_ffmpeg((FFmpegIndexBuilderContext *)context, stop, do_update, progress);
}
}
UNUSED_VARS(stop, do_update, progress);
#endif
UNUSED_VARS(context, stop, do_update, progress);
}
void IMB_anim_index_rebuild_finish(IndexBuildContext *context, const bool stop)
{
switch (context->anim_type) {
#ifdef WITH_FFMPEG
case ImbAnimType::Ffmpeg:
index_rebuild_ffmpeg_finish((FFmpegIndexBuilderContext *)context, stop);
break;
#endif
default:
break;
if (context != nullptr) {
index_rebuild_ffmpeg_finish((FFmpegIndexBuilderContext *)context, stop);
}
#endif
/* static defined at top of the file */
UNUSED_VARS(stop, proxy_sizes);
UNUSED_VARS(context, stop, proxy_sizes);
}
void IMB_free_indices(ImBufAnim *anim)

View File

@ -299,62 +299,19 @@ static int isffmpeg(const char *filepath)
}
#endif
ImbAnimType imb_get_anim_type(const char *filepath)
bool IMB_isanim(const char *filepath)
{
BLI_stat_t st;
BLI_assert(!BLI_path_is_rel(filepath));
if (UTIL_DEBUG) {
printf("%s: %s\n", __func__, filepath);
}
#ifndef _WIN32
# ifdef WITH_FFMPEG
/* stat test below fails on large files > 4GB */
#ifdef WITH_FFMPEG
if (isffmpeg(filepath)) {
return ImbAnimType::Ffmpeg;
}
# endif
if (BLI_stat(filepath, &st) == -1) {
return ImbAnimType::NotAnim;
}
if (((st.st_mode) & S_IFMT) != S_IFREG) {
return ImbAnimType::NotAnim;
return true;
}
#endif
if (ismovie(filepath)) {
return ImbAnimType::Movie;
}
#else /* !_WIN32 */
if (BLI_stat(filepath, &st) == -1) {
return ImbAnimType::NotAnim;
}
if (((st.st_mode) & S_IFMT) != S_IFREG) {
return ImbAnimType::NotAnim;
}
if (ismovie(filepath)) {
return ImbAnimType::Movie;
}
# ifdef WITH_FFMPEG
if (isffmpeg(filepath)) {
return ImbAnimType::Ffmpeg;
}
# endif
#endif /* !_WIN32 */
/* Assume a single image is part of an image sequence. */
if (IMB_ispic(filepath)) {
return ImbAnimType::Sequence;
}
return ImbAnimType::NotAnim;
}
bool IMB_isanim(const char *filepath)
{
ImbAnimType type = imb_get_anim_type(filepath);
return !ELEM(type, ImbAnimType::NotAnim, ImbAnimType::Sequence);
return false;
}

View File

@ -1112,7 +1112,7 @@ typedef enum IDRecalcFlag {
* have to be copied on every update. */
ID_RECALC_PARAMETERS = (1 << 21),
/* Input has changed and datablock is to be reload from disk.
/* Input has changed and data-block is to be reload from disk.
* Applies to movie clips to inform that copy-on-written version is to be refreshed for the new
* input file or for color space changes. */
ID_RECALC_SOURCE = (1 << 23),

View File

@ -57,9 +57,9 @@ typedef struct bMotionPath {
/** The number of cached verts. */
int length;
/** For drawing paths, the start frame number. */
/** For drawing paths, the start frame number. Inclusive.*/
int start_frame;
/** For drawing paths, the end frame number. */
/** For drawing paths, the end frame number. Exclusive. */
int end_frame;
/** Optional custom color. */
@ -113,7 +113,7 @@ typedef struct bAnimVizSettings {
short path_bakeflag;
char _pad[4];
/** Start and end frames of path-calculation range. */
/** Start and end frames of path-calculation range. Both are inclusive.*/
int path_sf, path_ef;
/** Number of frames before/after current frame to show. */
int path_bc, path_ac;

View File

@ -790,6 +790,7 @@ typedef struct bNodeTree {
bNode *group_output_node();
const bNode *group_output_node() const;
/** Get all input nodes of the node group. */
blender::Span<bNode *> group_input_nodes();
blender::Span<const bNode *> group_input_nodes() const;
/** Zones in the node tree. Currently there are only simulation zones in geometry nodes. */

View File

@ -22,6 +22,7 @@
# include <fmt/format.h>
# include "BKE_attribute.hh"
# include "BKE_grease_pencil.hh"
# include "BLI_span.hh"
@ -135,6 +136,34 @@ static void rna_GreasePencilLayer_name_set(PointerRNA *ptr, const char *value)
grease_pencil->rename_node(layer->wrap().as_node(), value);
}
static int rna_GreasePencilLayer_pass_index_get(PointerRNA *ptr)
{
using namespace blender;
const GreasePencil &grease_pencil = *rna_grease_pencil(ptr);
const bke::greasepencil::Layer &layer =
static_cast<const GreasePencilLayer *>(ptr->data)->wrap();
const int layer_idx = *grease_pencil.get_layer_index(layer);
const VArray layer_passes = *grease_pencil.attributes().lookup_or_default<int>(
"pass_index", bke::AttrDomain::Layer, 0);
return layer_passes[layer_idx];
}
static void rna_GreasePencilLayer_pass_index_set(PointerRNA *ptr, int value)
{
using namespace blender;
GreasePencil &grease_pencil = *rna_grease_pencil(ptr);
const bke::greasepencil::Layer &layer =
static_cast<const GreasePencilLayer *>(ptr->data)->wrap();
const int layer_idx = *grease_pencil.get_layer_index(layer);
bke::SpanAttributeWriter<int> layer_passes =
grease_pencil.attributes_for_write().lookup_or_add_for_write_span<int>(
"pass_index", bke::AttrDomain::Layer);
layer_passes.span[layer_idx] = std::max(0, value);
layer_passes.finish();
}
static PointerRNA rna_GreasePencil_active_layer_get(PointerRNA *ptr)
{
GreasePencil *grease_pencil = rna_grease_pencil(ptr);
@ -253,6 +282,15 @@ static void rna_def_grease_pencil_layer(BlenderRNA *brna)
prop, "Onion Skinning", "Display onion skins before and after the current frame");
RNA_def_property_update(prop, NC_GPENCIL | ND_DATA, "rna_grease_pencil_update");
/* pass index for compositing and modifiers */
prop = RNA_def_property(srna, "pass_index", PROP_INT, PROP_UNSIGNED);
RNA_def_property_ui_text(prop, "Pass Index", "Index number for the \"Layer Index\" pass");
RNA_def_property_int_funcs(prop,
"rna_GreasePencilLayer_pass_index_get",
"rna_GreasePencilLayer_pass_index_set",
nullptr);
RNA_def_property_update(prop, NC_GPENCIL | ND_DATA, "rna_grease_pencil_update");
prop = RNA_def_property(srna, "parent", PROP_POINTER, PROP_NONE);
RNA_def_property_struct_type(prop, "Object");
RNA_def_property_flag(prop, PROP_EDITABLE | PROP_ID_SELF_CHECK);

View File

@ -1918,6 +1918,19 @@ static std::optional<std::string> rna_ViewLayerEEVEE_path(const PointerRNA *ptr)
return rna_path;
}
static void rna_SceneEEVEE_gi_cubemap_resolution_update(Main * /*main*/,
Scene *scene,
PointerRNA * /*ptr*/)
{
/* Tag all light probes to recalc transform. This signals EEVEE to update the light probes. */
FOREACH_SCENE_OBJECT_BEGIN (scene, ob) {
if (ob->type == OB_LIGHTPROBE) {
DEG_id_tag_update(&ob->id, ID_RECALC_TRANSFORM);
}
}
FOREACH_SCENE_OBJECT_END;
}
static std::optional<std::string> rna_SceneRenderView_path(const PointerRNA *ptr)
{
const SceneRenderView *srv = (SceneRenderView *)ptr->data;
@ -7799,6 +7812,7 @@ static void rna_def_scene_eevee(BlenderRNA *brna)
RNA_def_property_enum_items(prop, eevee_shadow_size_items);
RNA_def_property_ui_text(prop, "Cubemap Size", "Size of every cubemaps");
RNA_def_property_override_flag(prop, PROPOVERRIDE_OVERRIDABLE_LIBRARY);
RNA_def_property_update(prop, 0, "rna_SceneEEVEE_gi_cubemap_resolution_update");
prop = RNA_def_property(srna, "gi_visibility_resolution", PROP_ENUM, PROP_NONE);
RNA_def_property_enum_items(prop, eevee_gi_visibility_size_items);

View File

@ -87,6 +87,8 @@ static void foreach_ID_link(ModifierData *md, Object *ob, IDWalkFunc walk, void
auto *mmd = reinterpret_cast<GreasePencilHookModifierData *>(md);
modifier::greasepencil::foreach_influence_ID_link(&mmd->influence, ob, walk, user_data);
walk(user_data, ob, (ID **)&mmd->object, IDWALK_CB_NOP);
}
static void blend_write(BlendWriter *writer, const ID * /*id_owner*/, const ModifierData *md)

View File

@ -204,7 +204,7 @@ static IndexMask get_filtered_layer_mask(const GreasePencil &grease_pencil,
bke::AttributeAccessor layer_attributes = grease_pencil.attributes();
const Span<const Layer *> layers = grease_pencil.layers();
const VArray<int> layer_passes =
layer_attributes.lookup_or_default<int>("pass", bke::AttrDomain::Layer, 0).varray;
layer_attributes.lookup_or_default<int>("pass_index", bke::AttrDomain::Layer, 0).varray;
IndexMask result = IndexMask::from_predicate(
full_mask, GrainSize(4096), memory, [&](const int64_t layer_i) {

View File

@ -240,100 +240,22 @@ CryptomatteSession *ntreeCompositCryptomatteSession(const Scene *scene, bNode *n
return session_ptr.release();
}
namespace blender::nodes::node_composite_cryptomatte_cc {
namespace blender::nodes::node_composite_base_cryptomatte_cc {
NODE_STORAGE_FUNCS(NodeCryptomatte)
static bNodeSocketTemplate cmp_node_cryptomatte_out[] = {
{SOCK_RGBA, N_("Image")},
{SOCK_FLOAT, N_("Matte")},
{SOCK_RGBA, N_("Pick")},
{-1, ""},
};
static void cmp_node_cryptomatte_declare(NodeDeclarationBuilder &b)
{
b.add_input<decl::Color>("Image")
.default_value({0.0f, 0.0f, 0.0f, 1.0f})
.compositor_domain_priority(0);
b.add_output<decl::Color>("Image");
b.add_output<decl::Float>("Matte");
b.add_output<decl::Color>("Pick");
}
static void node_init_cryptomatte(bNodeTree * /*ntree*/, bNode *node)
{
NodeCryptomatte *user = MEM_cnew<NodeCryptomatte>(__func__);
node->storage = user;
}
static void node_init_api_cryptomatte(const bContext *C, PointerRNA *ptr)
{
Scene *scene = CTX_data_scene(C);
bNode *node = static_cast<bNode *>(ptr->data);
BLI_assert(node->type == CMP_NODE_CRYPTOMATTE);
node->id = &scene->id;
id_us_plus(node->id);
}
static void node_free_cryptomatte(bNode *node)
{
BLI_assert(ELEM(node->type, CMP_NODE_CRYPTOMATTE, CMP_NODE_CRYPTOMATTE_LEGACY));
NodeCryptomatte *nc = static_cast<NodeCryptomatte *>(node->storage);
if (nc) {
MEM_SAFE_FREE(nc->matte_id);
BLI_freelistN(&nc->runtime.layers);
BLI_freelistN(&nc->entries);
MEM_freeN(nc);
}
}
static void node_copy_cryptomatte(bNodeTree * /*dst_ntree*/,
bNode *dest_node,
const bNode *src_node)
{
NodeCryptomatte *src_nc = static_cast<NodeCryptomatte *>(src_node->storage);
NodeCryptomatte *dest_nc = static_cast<NodeCryptomatte *>(MEM_dupallocN(src_nc));
BLI_duplicatelist(&dest_nc->entries, &src_nc->entries);
BLI_listbase_clear(&dest_nc->runtime.layers);
dest_nc->matte_id = static_cast<char *>(MEM_dupallocN(src_nc->matte_id));
dest_node->storage = dest_nc;
}
static bool node_poll_cryptomatte(const bNodeType * /*ntype*/,
const bNodeTree *ntree,
const char **r_disabled_hint)
{
if (STREQ(ntree->idname, "CompositorNodeTree")) {
Scene *scene;
/* See node_composit_poll_rlayers. */
for (scene = static_cast<Scene *>(G.main->scenes.first); scene;
scene = static_cast<Scene *>(scene->id.next))
{
if (scene->nodetree == ntree) {
break;
}
}
if (scene == nullptr) {
*r_disabled_hint = RPT_(
"The node tree must be the compositing node tree of any scene in the file");
}
return scene != nullptr;
}
*r_disabled_hint = RPT_("Not a compositor node tree");
return false;
}
using namespace blender::realtime_compositor;
class CryptoMatteOperation : public NodeOperation {
class BaseCryptoMatteOperation : public NodeOperation {
public:
using NodeOperation::NodeOperation;
/* Should return the input image result. */
virtual Result &get_input_image() = 0;
/* Should returns all the Cryptomatte layers in order. */
virtual Vector<GPUTexture *> get_layers() = 0;
void execute() override
{
Vector<GPUTexture *> layers = get_layers();
@ -462,7 +384,7 @@ class CryptoMatteOperation : public NodeOperation {
GPUShader *shader = context().get_shader("compositor_cryptomatte_image");
GPU_shader_bind(shader);
Result &input_image = get_input("Image");
Result &input_image = get_input_image();
input_image.bind_as_texture(shader, "input_tx");
matte.bind_as_texture(shader, "matte_tx");
@ -480,8 +402,123 @@ class CryptoMatteOperation : public NodeOperation {
image_output.unbind_as_image();
}
/* Get the identifiers of the entities selected by the user to generate a matte from. The
* identifiers are hashes of the names of the entities encoded in floats. See the "ID Generation"
* section of the Cryptomatte specification for more information. */
Vector<float> get_identifiers()
{
Vector<float> identifiers;
LISTBASE_FOREACH (CryptomatteEntry *, cryptomatte_entry, &node_storage(bnode()).entries) {
identifiers.append(cryptomatte_entry->encoded_hash);
}
return identifiers;
}
};
} // namespace blender::nodes::node_composite_base_cryptomatte_cc
namespace blender::nodes::node_composite_cryptomatte_cc {
NODE_STORAGE_FUNCS(NodeCryptomatte)
static bNodeSocketTemplate cmp_node_cryptomatte_out[] = {
{SOCK_RGBA, N_("Image")},
{SOCK_FLOAT, N_("Matte")},
{SOCK_RGBA, N_("Pick")},
{-1, ""},
};
static void cmp_node_cryptomatte_declare(NodeDeclarationBuilder &b)
{
b.add_input<decl::Color>("Image")
.default_value({0.0f, 0.0f, 0.0f, 1.0f})
.compositor_domain_priority(0);
b.add_output<decl::Color>("Image");
b.add_output<decl::Float>("Matte");
b.add_output<decl::Color>("Pick");
}
static void node_init_cryptomatte(bNodeTree * /*ntree*/, bNode *node)
{
NodeCryptomatte *user = MEM_cnew<NodeCryptomatte>(__func__);
node->storage = user;
}
static void node_init_api_cryptomatte(const bContext *C, PointerRNA *ptr)
{
Scene *scene = CTX_data_scene(C);
bNode *node = static_cast<bNode *>(ptr->data);
BLI_assert(node->type == CMP_NODE_CRYPTOMATTE);
node->id = &scene->id;
id_us_plus(node->id);
}
static void node_free_cryptomatte(bNode *node)
{
BLI_assert(ELEM(node->type, CMP_NODE_CRYPTOMATTE, CMP_NODE_CRYPTOMATTE_LEGACY));
NodeCryptomatte *nc = static_cast<NodeCryptomatte *>(node->storage);
if (nc) {
MEM_SAFE_FREE(nc->matte_id);
BLI_freelistN(&nc->runtime.layers);
BLI_freelistN(&nc->entries);
MEM_freeN(nc);
}
}
static void node_copy_cryptomatte(bNodeTree * /*dst_ntree*/,
bNode *dest_node,
const bNode *src_node)
{
NodeCryptomatte *src_nc = static_cast<NodeCryptomatte *>(src_node->storage);
NodeCryptomatte *dest_nc = static_cast<NodeCryptomatte *>(MEM_dupallocN(src_nc));
BLI_duplicatelist(&dest_nc->entries, &src_nc->entries);
BLI_listbase_clear(&dest_nc->runtime.layers);
dest_nc->matte_id = static_cast<char *>(MEM_dupallocN(src_nc->matte_id));
dest_node->storage = dest_nc;
}
static bool node_poll_cryptomatte(const bNodeType * /*ntype*/,
const bNodeTree *ntree,
const char **r_disabled_hint)
{
if (STREQ(ntree->idname, "CompositorNodeTree")) {
Scene *scene;
/* See node_composit_poll_rlayers. */
for (scene = static_cast<Scene *>(G.main->scenes.first); scene;
scene = static_cast<Scene *>(scene->id.next))
{
if (scene->nodetree == ntree) {
break;
}
}
if (scene == nullptr) {
*r_disabled_hint = RPT_(
"The node tree must be the compositing node tree of any scene in the file");
}
return scene != nullptr;
}
*r_disabled_hint = RPT_("Not a compositor node tree");
return false;
}
using namespace blender::realtime_compositor;
using namespace blender::nodes::node_composite_base_cryptomatte_cc;
class CryptoMatteOperation : public BaseCryptoMatteOperation {
public:
using BaseCryptoMatteOperation::BaseCryptoMatteOperation;
Result &get_input_image() override
{
return get_input("Image");
}
/* Returns all the relevant Cryptomatte layers from the selected source. */
Vector<GPUTexture *> get_layers()
Vector<GPUTexture *> get_layers() override
{
switch (get_source()) {
case CMP_NODE_CRYPTOMATTE_SOURCE_RENDER:
@ -631,18 +668,6 @@ class CryptoMatteOperation : public NodeOperation {
return std::string(type_name);
}
/* Get the identifiers of the entities selected by the user to generate a matte from. The
* identifiers are hashes of the names of the entities encoded in floats. See the "ID Generation"
* section of the Cryptomatte specification for more information. */
Vector<float> get_identifiers()
{
Vector<float> identifiers;
LISTBASE_FOREACH (CryptomatteEntry *, cryptomatte_entry, &node_storage(bnode()).entries) {
identifiers.append(cryptomatte_entry->encoded_hash);
}
return identifiers;
}
/* The domain should be centered with the same size as the source. In case of invalid source,
* fallback to the domain inferred from the input. */
Domain compute_domain() override
@ -804,23 +829,31 @@ static void node_init_cryptomatte_legacy(bNodeTree *ntree, bNode *node)
}
using namespace blender::realtime_compositor;
using namespace blender::nodes::node_composite_base_cryptomatte_cc;
class CryptoMatteOperation : public NodeOperation {
class LegacyCryptoMatteOperation : public BaseCryptoMatteOperation {
public:
using NodeOperation::NodeOperation;
using BaseCryptoMatteOperation::BaseCryptoMatteOperation;
void execute() override
Result &get_input_image() override
{
get_input("image").pass_through(get_result("Image"));
get_result("Matte").allocate_invalid();
get_result("Pick").allocate_invalid();
context().set_info_message("Viewport compositor setup not fully supported");
return get_input("image");
}
Vector<GPUTexture *> get_layers() override
{
Vector<GPUTexture *> layers;
/* Add all textures of all inputs except the first input, which is the input image. */
for (const bNodeSocket *socket : bnode().input_sockets().drop_front(1)) {
layers.append(get_input(socket->identifier).texture());
}
return layers;
}
};
static NodeOperation *get_compositor_operation(Context &context, DNode node)
{
return new CryptoMatteOperation(context, node);
return new LegacyCryptoMatteOperation(context, node);
}
} // namespace blender::nodes::node_composite_legacy_cryptomatte_cc
@ -840,8 +873,6 @@ void register_node_type_cmp_cryptomatte_legacy()
&ntype, "NodeCryptomatte", file_ns::node_free_cryptomatte, file_ns::node_copy_cryptomatte);
ntype.gather_link_search_ops = nullptr;
ntype.get_compositor_operation = legacy_file_ns::get_compositor_operation;
ntype.realtime_compositor_unsupported_message = N_(
"Node not supported in the Viewport compositor");
nodeRegisterType(&ntype);
}

View File

@ -2503,6 +2503,7 @@ static eHandlerActionFlag wm_handler_operator_call(bContext *C,
else {
/* Not very common, but modal operators may report before finishing. */
if (!BLI_listbase_is_empty(&op->reports->list)) {
WM_event_add_notifier(C, NC_SPACE | ND_SPACE_INFO_REPORT, nullptr);
WM_reports_from_reports_move(wm, op->reports);
}
}

View File

@ -1703,10 +1703,6 @@ install(
set(ASSET_BUNDLE_DIR ${CMAKE_SOURCE_DIR}/release/datafiles/assets/publish/)
if(NOT EXISTS "${ASSET_BUNDLE_DIR}")
set(ASSET_BUNDLE_DIR ${CMAKE_SOURCE_DIR}/../lib/assets/publish/)
endif()
# TODO temporary change for development only. Remove before merging.
set(ASSET_BUNDLE_TESTING_DIR "${ASSET_BUNDLE_DIR}/../testing/")
if(EXISTS "${ASSET_BUNDLE_TESTING_DIR}")

1
tests/data Submodule

@ -0,0 +1 @@
Subproject commit 74d32aff48a05236adf61a24d8c1b5b4c7c55097

View File

@ -14,7 +14,7 @@ namespace blender::tests {
/* These strings are passed on the CLI with the --test-asset-dir and --test-release-dir arguments.
* The arguments are added automatically when invoking tests via `ctest`. */
const std::string &flags_test_asset_dir(); /* ../lib/tests in the SVN directory. */
const std::string &flags_test_asset_dir(); /* tests/data in the Blender repository. */
const std::string &flags_test_release_dir(); /* bin/{blender version} in the build directory. */
} // namespace blender::tests

View File

@ -6,7 +6,7 @@
#include "MEM_guardedalloc.h"
DEFINE_string(test_assets_dir, "", "lib/tests directory from SVN containing the test assets.");
DEFINE_string(test_assets_dir, "", "tests/data directory containing the test assets.");
DEFINE_string(test_release_dir, "", "bin/{blender version} directory of the current build.");
namespace blender::tests {
@ -14,8 +14,7 @@ namespace blender::tests {
const std::string &flags_test_asset_dir()
{
if (FLAGS_test_assets_dir.empty()) {
ADD_FAILURE()
<< "Pass the flag --test-assets-dir and point to the lib/tests directory from SVN.";
ADD_FAILURE() << "Pass the flag --test-assets-dir and point to the tests/data directory.";
}
return FLAGS_test_assets_dir;
}

View File

@ -26,7 +26,7 @@ class TestEnvironment:
self.build_dir = base_dir / 'build'
self.install_dir = self.build_dir / "bin"
self.lib_dir = base_dir / 'lib'
self.benchmarks_dir = self.blender_git_dir.parent / 'lib' / 'benchmarks'
self.benchmarks_dir = self.blender_git_dir / 'tests' / 'benchmarks'
self.git_executable = 'git'
self.cmake_executable = 'cmake'
self.cmake_options = ['-DWITH_INTERNATIONAL=OFF', '-DWITH_BUILDINFO=OFF']

View File

@ -8,7 +8,7 @@
# and don't give deterministic results
set(USE_EXPERIMENTAL_TESTS FALSE)
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/../lib/tests)
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/tests/data)
set(TEST_PYTHON_DIR ${CMAKE_SOURCE_DIR}/tests/python)
set(TEST_OUT_DIR ${CMAKE_BINARY_DIR}/tests)

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: GPL-2.0-or-later
"""
./blender.bin --background --factory-startup --python tests/python/bl_alembic_io_test.py -- --testdir /path/to/lib/tests/alembic
./blender.bin --background --factory-startup --python tests/python/bl_alembic_io_test.py -- --testdir /path/to/tests/data/alembic
"""
import math

View File

@ -9,7 +9,7 @@ import sys
from rna_prop_ui import rna_idprop_quote_path
"""
blender -b --factory-startup --python tests/python/bl_animation_drivers.py -- --testdir /path/to/lib/tests/animation
blender -b --factory-startup --python tests/python/bl_animation_drivers.py -- --testdir /path/to/tests/data/animation
"""

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: GPL-2.0-or-later
"""
blender -b --factory-startup --python tests/python/bl_animation_fcurves.py -- --testdir /path/to/lib/tests/animation
blender -b --factory-startup --python tests/python/bl_animation_fcurves.py -- --testdir /path/to/tests/data/animation
"""
import pathlib

View File

@ -9,7 +9,7 @@ import sys
from math import radians
"""
blender -b --factory-startup --python tests/python/bl_animation_keyframing.py -- --testdir /path/to/lib/tests/animation
blender -b --factory-startup --python tests/python/bl_animation_keyframing.py -- --testdir /path/to/tests/data/animation
"""

View File

@ -75,7 +75,7 @@ def argparse_create():
# When --help or no args are given, print this help
description = ("Test basic versioning code by opening all blend files "
"in `lib/tests` directory.")
"in `tests/data` directory.")
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"--src-test-dir",

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: GPL-2.0-or-later
"""
./blender.bin --background --factory-startup --python tests/python/bl_constraints.py -- --testdir /path/to/lib/tests/constraints
./blender.bin --background --factory-startup --python tests/python/bl_constraints.py -- --testdir /path/to/tests/data/constraints
"""
import pathlib

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: GPL-2.0-or-later
"""
blender -b --factory-startup --python tests/python/bl_rigging_symmetrize.py -- --testdir /path/to/lib/tests/animation
blender -b --factory-startup --python tests/python/bl_rigging_symmetrize.py -- --testdir /path/to/tests/data/animation
"""
import pathlib

View File

@ -10,7 +10,7 @@
# and don't give deterministic results
set(USE_EXPERIMENTAL_TESTS FALSE)
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/../lib/tests)
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/tests/data)
set(TEST_OUT_DIR ${CMAKE_BINARY_DIR}/tests)
# ugh, any better way to do this on testing only?

View File

@ -5,7 +5,7 @@
"""
Call as follows:
python collada_mesh_simple.py --blender PATH_TO_BLENDER_EXE --testdir PATH_TO_SVN/lib/tests/collada/mesh
python collada_mesh_simple.py --blender PATH_TO_BLENDER_EXE --testdir tests/data/collada/mesh
"""
import sys

View File

@ -5,7 +5,7 @@
"""
Call as follows:
python collada_mesh_simple.py --blender PATH_TO_BLENDER_EXE --testdir PATH_TO_SVN/lib/tests/collada/mesh
python collada_mesh_simple.py --blender PATH_TO_BLENDER_EXE --testdir tests/data/collada/mesh
"""
import sys

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# To run all tests, use
# BLENDER_VERBOSE=1 ./bin/blender ../lib/tests/modeling/curve_to_mesh.blend --python ../blender/tests/python/bl_curve_to_mesh.py -- --run-all-tests
# BLENDER_VERBOSE=1 ./bin/blender ../tests/data/modeling/curve_to_mesh.blend --python ../blender/tests/python/bl_curve_to_mesh.py -- --run-all-tests
# (that assumes the test is run from a build directory in the same directory as the source code)
import bpy
import os

View File

@ -6,7 +6,7 @@
# Use '--write-blend=/tmp/test.blend' to view output
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/../lib/tests)
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/tests/data)
set(TEST_OUT_DIR ${CMAKE_BINARY_DIR}/tests)
# ugh, any better way to do this on testing only?