WIP: Fix #116458: Added decay factor for flattening brushes. #118699
|
@ -278,3 +278,7 @@ StatementMacros:
|
|||
|
||||
MacroBlockBegin: "^OSL_CLOSURE_STRUCT_BEGIN$"
|
||||
MacroBlockEnd: "^OSL_CLOSURE_STRUCT_END$"
|
||||
|
||||
# Ensure single new line at the end of source files.
|
||||
InsertNewlineAtEOF: True
|
||||
KeepEmptyLinesAtEOF: False
|
||||
|
|
|
@ -56,6 +56,7 @@ waveletNoiseTile.bin
|
|||
# External repositories.
|
||||
/scripts/addons/
|
||||
/scripts/addons_contrib/
|
||||
/tests/benchmarks/
|
||||
|
||||
# Ignore old submodules directories.
|
||||
# Eventually need to get rid of those, but for the first time of transition
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
[submodule "lib/linux_x64"]
|
||||
update = none
|
||||
path = lib/linux_x64
|
||||
url = https://projects.blender.org/blender/lib-linux_x64.git
|
||||
branch = blender-v4.1-release
|
||||
[submodule "lib/macos_arm64"]
|
||||
update = none
|
||||
path = lib/macos_arm64
|
||||
url = https://projects.blender.org/blender/lib-macos_arm64.git
|
||||
branch = blender-v4.1-release
|
||||
[submodule "lib/macos_x64"]
|
||||
update = none
|
||||
path = lib/macos_x64
|
||||
url = https://projects.blender.org/blender/lib-macos_x64.git
|
||||
branch = blender-v4.1-release
|
||||
[submodule "lib/windows_x64"]
|
||||
update = none
|
||||
path = lib/windows_x64
|
||||
url = https://projects.blender.org/blender/lib-windows_x64.git
|
||||
branch = blender-v4.1-release
|
||||
[submodule "release/datafiles/assets"]
|
||||
path = release/datafiles/assets
|
||||
url = https://projects.blender.org/blender/blender-assets.git
|
||||
branch = blender-v4.1-release
|
||||
[submodule "tests/data"]
|
||||
update = none
|
||||
path = tests/data
|
||||
url = https://projects.blender.org/blender/blender-test-data.git
|
||||
branch = blender-v4.1-release
|
33
GNUmakefile
33
GNUmakefile
|
@ -2,7 +2,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# This Makefile does an out-of-source CMake build in ../build_`OS`_`CPU`
|
||||
# This Makefile does an out-of-source CMake build in ../build_`OS`
|
||||
# eg:
|
||||
# ../build_linux_i386
|
||||
# This is for users who like to configure & build blender with a single command.
|
||||
|
@ -35,7 +35,7 @@ Other Convenience Targets
|
|||
* deps: Build library dependencies (intended only for platform maintainers).
|
||||
|
||||
The existence of locally build dependencies overrides the pre-built dependencies from subversion.
|
||||
These must be manually removed from '../lib/' to go back to using the pre-compiled libraries.
|
||||
These must be manually removed from 'lib/' to go back to using the pre-compiled libraries.
|
||||
|
||||
Project Files
|
||||
Generate project files for development environments.
|
||||
|
@ -165,6 +165,16 @@ OS:=$(shell uname -s)
|
|||
OS_NCASE:=$(shell uname -s | tr '[A-Z]' '[a-z]')
|
||||
CPU:=$(shell uname -m)
|
||||
|
||||
# Use our OS and CPU architecture naming conventions.
|
||||
ifeq ($(CPU),x86_64)
|
||||
CPU:=x64
|
||||
endif
|
||||
ifeq ($(OS_NCASE),darwin)
|
||||
OS_LIBDIR:=macos
|
||||
else
|
||||
OS_LIBDIR:=$(OS_NCASE)
|
||||
endif
|
||||
|
||||
|
||||
# Source and Build DIR's
|
||||
BLENDER_DIR:=$(shell pwd -P)
|
||||
|
@ -186,26 +196,13 @@ ifndef DEPS_BUILD_DIR
|
|||
endif
|
||||
|
||||
ifndef DEPS_INSTALL_DIR
|
||||
DEPS_INSTALL_DIR:=$(shell dirname "$(BLENDER_DIR)")/lib/$(OS_NCASE)
|
||||
|
||||
# Add processor type to directory name, except for darwin x86_64
|
||||
# which by convention does not have it.
|
||||
ifeq ($(OS_NCASE),darwin)
|
||||
ifneq ($(CPU),x86_64)
|
||||
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
|
||||
endif
|
||||
else
|
||||
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
|
||||
endif
|
||||
DEPS_INSTALL_DIR:=$(shell dirname "$(BLENDER_DIR)")/lib/$(OS_LIBDIR)_$(CPU)
|
||||
endif
|
||||
|
||||
# Set the LIBDIR, an empty string when not found.
|
||||
LIBDIR:=$(wildcard ../lib/${OS_NCASE}_${CPU})
|
||||
LIBDIR:=$(wildcard $(BLENDER_DIR)/lib/${OS_LIBDIR}_${CPU})
|
||||
ifeq (, $(LIBDIR))
|
||||
LIBDIR:=$(wildcard ../lib/${OS_NCASE}_${CPU}_glibc_228)
|
||||
endif
|
||||
ifeq (, $(LIBDIR))
|
||||
LIBDIR:=$(wildcard ../lib/${OS_NCASE})
|
||||
LIBDIR:=$(wildcard $(BLENDER_DIR)/lib/${OS_LIBDIR})
|
||||
endif
|
||||
|
||||
# Find the newest Python version bundled in `LIBDIR`.
|
||||
|
|
|
@ -134,9 +134,18 @@ BUILD_MANDATORY_SUBPACKAGES = (
|
|||
},
|
||||
),
|
||||
Package(name="Git",
|
||||
sub_packages=(
|
||||
Package(name="Git LFS",
|
||||
distro_package_names={DISTRO_ID_DEBIAN: "git-lfs",
|
||||
DISTRO_ID_FEDORA: "git-lfs",
|
||||
DISTRO_ID_SUSE: "git-lfs",
|
||||
DISTRO_ID_ARCH: "git-lfs",
|
||||
},
|
||||
),
|
||||
),
|
||||
distro_package_names={DISTRO_ID_DEBIAN: "git",
|
||||
DISTRO_ID_FEDORA: "git",
|
||||
DISTRO_ID_SUSE: None,
|
||||
DISTRO_ID_SUSE: "git",
|
||||
DISTRO_ID_ARCH: "git",
|
||||
},
|
||||
),
|
||||
|
|
|
@ -49,18 +49,17 @@ endif()
|
|||
|
||||
if(NOT DEFINED LIBDIR)
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64")
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin)
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/macos_x64)
|
||||
else()
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin_${CMAKE_OSX_ARCHITECTURES})
|
||||
endif()
|
||||
else()
|
||||
if(FIRST_RUN)
|
||||
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/macos_${CMAKE_OSX_ARCHITECTURES})
|
||||
endif()
|
||||
endif()
|
||||
if(NOT EXISTS "${LIBDIR}/")
|
||||
if(NOT EXISTS "${LIBDIR}/.git")
|
||||
message(FATAL_ERROR "Mac OSX requires pre-compiled libs at: '${LIBDIR}'")
|
||||
endif()
|
||||
if(FIRST_RUN)
|
||||
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
|
||||
endif()
|
||||
|
||||
# Avoid searching for headers since this would otherwise override our lib
|
||||
# directory as well as PYTHON_ROOT_DIR.
|
||||
|
|
|
@ -16,13 +16,13 @@ else()
|
|||
set(LIBDIR_NATIVE_ABI ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_NAME})
|
||||
|
||||
# Path to precompiled libraries with known glibc 2.28 ABI.
|
||||
set(LIBDIR_GLIBC228_ABI ${CMAKE_SOURCE_DIR}/../lib/linux_x86_64_glibc_228)
|
||||
set(LIBDIR_GLIBC228_ABI ${CMAKE_SOURCE_DIR}/lib/linux_x64)
|
||||
|
||||
# Choose the best suitable libraries.
|
||||
if(EXISTS ${LIBDIR_NATIVE_ABI})
|
||||
set(LIBDIR ${LIBDIR_NATIVE_ABI})
|
||||
set(WITH_LIBC_MALLOC_HOOK_WORKAROUND TRUE)
|
||||
elseif(EXISTS ${LIBDIR_GLIBC228_ABI})
|
||||
elseif(EXISTS "${LIBDIR_GLIBC228_ABI}/.git")
|
||||
set(LIBDIR ${LIBDIR_GLIBC228_ABI})
|
||||
if(WITH_MEM_JEMALLOC)
|
||||
# jemalloc provides malloc hooks.
|
||||
|
|
|
@ -266,23 +266,23 @@ if(NOT DEFINED LIBDIR)
|
|||
# Setup 64bit and 64bit windows systems
|
||||
if(CMAKE_CL_64)
|
||||
message(STATUS "64 bit compiler detected.")
|
||||
set(LIBDIR_BASE "win64")
|
||||
set(LIBDIR_BASE "windows_x64")
|
||||
else()
|
||||
message(FATAL_ERROR "32 bit compiler detected, blender no longer provides pre-build libraries for 32 bit windows, please set the LIBDIR cmake variable to your own library folder")
|
||||
endif()
|
||||
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 19.30.30423)
|
||||
message(STATUS "Visual Studio 2022 detected.")
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/${LIBDIR_BASE})
|
||||
elseif(MSVC_VERSION GREATER 1919)
|
||||
message(STATUS "Visual Studio 2019 detected.")
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/${LIBDIR_BASE})
|
||||
endif()
|
||||
else()
|
||||
if(FIRST_RUN)
|
||||
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
|
||||
endif()
|
||||
endif()
|
||||
if(NOT EXISTS "${LIBDIR}/")
|
||||
if(NOT EXISTS "${LIBDIR}/.git")
|
||||
message(FATAL_ERROR "\n\nWindows requires pre-compiled libs at: '${LIBDIR}'. Please run `make update` in the blender source folder to obtain them.")
|
||||
endif()
|
||||
|
||||
|
|
|
@ -163,14 +163,14 @@ function(blender_add_ctests)
|
|||
TEST_PREFIX ${ARGS_SUITE_NAME}
|
||||
WORKING_DIRECTORY "${TEST_INSTALL_DIR}"
|
||||
EXTRA_ARGS
|
||||
--test-assets-dir "${CMAKE_SOURCE_DIR}/../lib/tests"
|
||||
--test-assets-dir "${CMAKE_SOURCE_DIR}/tests/data"
|
||||
--test-release-dir "${_test_release_dir}"
|
||||
)
|
||||
else()
|
||||
add_test(
|
||||
NAME ${ARGS_SUITE_NAME}
|
||||
COMMAND ${ARGS_TARGET}
|
||||
--test-assets-dir "${CMAKE_SOURCE_DIR}/../lib/tests"
|
||||
--test-assets-dir "${CMAKE_SOURCE_DIR}/tests/data"
|
||||
--test-release-dir "${_test_release_dir}"
|
||||
WORKING_DIRECTORY ${TEST_INSTALL_DIR}
|
||||
)
|
||||
|
|
|
@ -13,6 +13,7 @@ import sys
|
|||
|
||||
import make_utils
|
||||
from make_utils import call
|
||||
from pathlib import Path
|
||||
|
||||
# Parse arguments.
|
||||
|
||||
|
@ -21,7 +22,6 @@ def parse_arguments() -> argparse.Namespace:
|
|||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--ctest-command", default="ctest")
|
||||
parser.add_argument("--cmake-command", default="cmake")
|
||||
parser.add_argument("--svn-command", default="svn")
|
||||
parser.add_argument("--git-command", default="git")
|
||||
parser.add_argument("--config", default="")
|
||||
parser.add_argument("build_directory")
|
||||
|
@ -30,7 +30,6 @@ def parse_arguments() -> argparse.Namespace:
|
|||
|
||||
args = parse_arguments()
|
||||
git_command = args.git_command
|
||||
svn_command = args.svn_command
|
||||
ctest_command = args.ctest_command
|
||||
cmake_command = args.cmake_command
|
||||
config = args.config
|
||||
|
@ -45,24 +44,18 @@ if make_utils.command_missing(git_command):
|
|||
sys.exit(1)
|
||||
|
||||
# Test if we are building a specific release version.
|
||||
branch = make_utils.git_branch(git_command)
|
||||
tag = make_utils.git_tag(git_command)
|
||||
release_version = make_utils.git_branch_release_version(branch, tag)
|
||||
lib_tests_dirpath = os.path.join('..', 'lib', "tests")
|
||||
lib_tests_dirpath = Path("tests") / "data"
|
||||
|
||||
if not os.path.exists(lib_tests_dirpath):
|
||||
if not (lib_tests_dirpath / ".git").exists():
|
||||
print("Tests files not found, downloading...")
|
||||
|
||||
if make_utils.command_missing(svn_command):
|
||||
sys.stderr.write("svn not found, can't checkout test files\n")
|
||||
sys.exit(1)
|
||||
|
||||
if make_utils.command_missing(cmake_command):
|
||||
sys.stderr.write("cmake not found, can't checkout test files\n")
|
||||
sys.exit(1)
|
||||
|
||||
svn_url = make_utils.svn_libraries_base_url(release_version) + "/tests"
|
||||
call([svn_command, "checkout", svn_url, lib_tests_dirpath])
|
||||
# Ensure the test data files sub-module is configured and present.
|
||||
make_utils.git_enable_submodule(git_command, "tests/data")
|
||||
make_utils.git_update_submodule(args.git_command, lib_tests_dirpath)
|
||||
|
||||
# Run cmake again to detect tests files.
|
||||
os.chdir(build_dir)
|
||||
|
|
|
@ -4,11 +4,11 @@
|
|||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
"make update" for all platforms, updating svn libraries and tests and Blender
|
||||
git repository and sub-modules.
|
||||
"make update" for all platforms, updating Git LFS submodules for libraries and
|
||||
tests, and Blender git repository.
|
||||
|
||||
For release branches, this will check out the appropriate branches of
|
||||
sub-modules and libraries.
|
||||
submodules and libraries.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
|
@ -20,168 +20,169 @@ import sys
|
|||
import make_utils
|
||||
from pathlib import Path
|
||||
from make_utils import call, check_output
|
||||
from urllib.parse import urljoin
|
||||
from urllib.parse import urljoin, urlsplit
|
||||
|
||||
from typing import (
|
||||
Optional,
|
||||
)
|
||||
|
||||
|
||||
class Submodule:
|
||||
path: str
|
||||
branch: str
|
||||
branch_fallback: str
|
||||
|
||||
def __init__(self, path: str, branch: str, branch_fallback: str) -> None:
|
||||
self.path = path
|
||||
self.branch = branch
|
||||
self.branch_fallback = branch_fallback
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def print_stage(text: str) -> None:
|
||||
print("")
|
||||
print(text)
|
||||
print("=" * len(text))
|
||||
print("")
|
||||
|
||||
# Parse arguments
|
||||
|
||||
|
||||
def parse_arguments() -> argparse.Namespace:
|
||||
"""
|
||||
Parse command line line arguments.
|
||||
|
||||
Returns parsed object from which the command line arguments can be accessed
|
||||
as properties. The name of the properties matches the command line argument,
|
||||
but with the leading dashed omitted and all remaining dashes replaced with
|
||||
underscore.
|
||||
"""
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--no-libraries", action="store_true")
|
||||
parser.add_argument("--no-blender", action="store_true")
|
||||
parser.add_argument("--no-submodules", action="store_true")
|
||||
parser.add_argument("--use-tests", action="store_true")
|
||||
parser.add_argument("--svn-command", default="svn")
|
||||
parser.add_argument("--svn-branch", default=None)
|
||||
parser.add_argument("--git-command", default="git")
|
||||
parser.add_argument("--use-linux-libraries", action="store_true")
|
||||
parser.add_argument("--architecture", type=str, choices=("x86_64", "amd64", "arm64",))
|
||||
parser.add_argument("--architecture", type=str,
|
||||
choices=("x86_64", "amd64", "arm64",))
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_blender_git_root() -> str:
|
||||
return check_output([args.git_command, "rev-parse", "--show-toplevel"])
|
||||
def get_blender_git_root() -> Path:
|
||||
"""
|
||||
Get root directory of the current Git directory.
|
||||
"""
|
||||
return Path(
|
||||
check_output([args.git_command, "rev-parse", "--show-toplevel"]))
|
||||
|
||||
# Setup for precompiled libraries and tests from svn.
|
||||
|
||||
def get_effective_platform(args: argparse.Namespace) -> str:
|
||||
"""
|
||||
Get platform of the host.
|
||||
|
||||
The result string is normalized to the name used by Blender releases and
|
||||
library repository name prefixes: linux, macos, windows.
|
||||
"""
|
||||
|
||||
if sys.platform == "darwin":
|
||||
platform = "macos"
|
||||
elif sys.platform == "win32":
|
||||
platform = "windows"
|
||||
else:
|
||||
platform = sys.platform
|
||||
|
||||
assert (platform in ("linux", "macos", "windows"))
|
||||
|
||||
return platform
|
||||
|
||||
|
||||
def get_effective_architecture(args: argparse.Namespace) -> str:
|
||||
"""
|
||||
Get architecture of the host.
|
||||
|
||||
The result string is normalized to the architecture name used by the Blender
|
||||
releases and library repository name suffixes: x64, arm64.
|
||||
|
||||
NOTE: When cross-compiling the architecture is coming from the command line
|
||||
argument.
|
||||
"""
|
||||
architecture = args.architecture
|
||||
if architecture:
|
||||
assert isinstance(architecture, str)
|
||||
return architecture
|
||||
|
||||
# Check platform.version to detect arm64 with x86_64 python binary.
|
||||
if "ARM64" in platform.version():
|
||||
return "arm64"
|
||||
|
||||
return platform.machine().lower()
|
||||
|
||||
|
||||
def svn_update(args: argparse.Namespace, release_version: Optional[str]) -> None:
|
||||
svn_non_interactive = [args.svn_command, '--non-interactive']
|
||||
|
||||
lib_dirpath = os.path.join(get_blender_git_root(), '..', 'lib')
|
||||
svn_url = make_utils.svn_libraries_base_url(release_version, args.svn_branch)
|
||||
|
||||
# Checkout precompiled libraries
|
||||
architecture = get_effective_architecture(args)
|
||||
if sys.platform == 'darwin':
|
||||
if architecture == 'arm64':
|
||||
lib_platform = "darwin_arm64"
|
||||
elif architecture == 'x86_64':
|
||||
lib_platform = "darwin"
|
||||
else:
|
||||
lib_platform = None
|
||||
elif sys.platform == 'win32':
|
||||
# Windows checkout is usually handled by bat scripts since python3 to run
|
||||
# this script is bundled as part of the precompiled libraries. However it
|
||||
# is used by the buildbot.
|
||||
lib_platform = "win64_vc15"
|
||||
elif args.use_linux_libraries:
|
||||
lib_platform = "linux_x86_64_glibc_228"
|
||||
elif "ARM64" in platform.version():
|
||||
# Check platform.version to detect arm64 with x86_64 python binary.
|
||||
architecture = "arm64"
|
||||
else:
|
||||
# No precompiled libraries for Linux.
|
||||
lib_platform = None
|
||||
architecture = platform.machine().lower()
|
||||
|
||||
if lib_platform:
|
||||
lib_platform_dirpath = os.path.join(lib_dirpath, lib_platform)
|
||||
# Normalize the architecture name.
|
||||
if architecture in ("x86_64", "amd64"):
|
||||
architecture = "x64"
|
||||
|
||||
if not os.path.exists(lib_platform_dirpath):
|
||||
print_stage("Checking out Precompiled Libraries")
|
||||
assert (architecture in ("x64", "arm64"))
|
||||
|
||||
if make_utils.command_missing(args.svn_command):
|
||||
sys.stderr.write("svn not found, can't checkout libraries\n")
|
||||
sys.exit(1)
|
||||
|
||||
svn_url_platform = svn_url + lib_platform
|
||||
call(svn_non_interactive + ["checkout", svn_url_platform, lib_platform_dirpath])
|
||||
|
||||
if args.use_tests:
|
||||
lib_tests = "tests"
|
||||
lib_tests_dirpath = os.path.join(lib_dirpath, lib_tests)
|
||||
|
||||
if not os.path.exists(lib_tests_dirpath):
|
||||
print_stage("Checking out Tests")
|
||||
|
||||
if make_utils.command_missing(args.svn_command):
|
||||
sys.stderr.write("svn not found, can't checkout tests\n")
|
||||
sys.exit(1)
|
||||
|
||||
svn_url_tests = svn_url + lib_tests
|
||||
call(svn_non_interactive + ["checkout", svn_url_tests, lib_tests_dirpath])
|
||||
|
||||
lib_assets = "assets"
|
||||
lib_assets_dirpath = os.path.join(lib_dirpath, lib_assets)
|
||||
|
||||
if not os.path.exists(lib_assets_dirpath):
|
||||
print_stage("Checking out Assets")
|
||||
|
||||
if make_utils.command_missing(args.svn_command):
|
||||
sys.stderr.write("svn not found, can't checkout assets\n")
|
||||
sys.exit(1)
|
||||
|
||||
svn_url_assets = svn_url + lib_assets
|
||||
call(svn_non_interactive + ["checkout", svn_url_assets, lib_assets_dirpath])
|
||||
|
||||
# Update precompiled libraries, assets and tests
|
||||
|
||||
if not os.path.isdir(lib_dirpath):
|
||||
print("Library path: %r, not found, skipping" % lib_dirpath)
|
||||
else:
|
||||
paths_local_and_remote = []
|
||||
if os.path.exists(os.path.join(lib_dirpath, ".svn")):
|
||||
print_stage("Updating Precompiled Libraries, Assets and Tests (one repository)")
|
||||
paths_local_and_remote.append((lib_dirpath, svn_url))
|
||||
else:
|
||||
print_stage("Updating Precompiled Libraries, Assets and Tests (multiple repositories)")
|
||||
# Separate paths checked out.
|
||||
for dirname in os.listdir(lib_dirpath):
|
||||
if dirname.startswith("."):
|
||||
# Temporary paths such as ".mypy_cache" will report a warning, skip hidden directories.
|
||||
continue
|
||||
|
||||
dirpath = os.path.join(lib_dirpath, dirname)
|
||||
if not (os.path.isdir(dirpath) and os.path.exists(os.path.join(dirpath, ".svn"))):
|
||||
continue
|
||||
|
||||
paths_local_and_remote.append((dirpath, svn_url + dirname))
|
||||
|
||||
if paths_local_and_remote:
|
||||
if make_utils.command_missing(args.svn_command):
|
||||
sys.stderr.write("svn not found, can't update libraries\n")
|
||||
sys.exit(1)
|
||||
|
||||
for dirpath, svn_url_full in paths_local_and_remote:
|
||||
call(svn_non_interactive + ["cleanup", dirpath])
|
||||
# Switch to appropriate branch and update.
|
||||
call(svn_non_interactive + ["switch", svn_url_full, dirpath], exit_on_error=False)
|
||||
call(svn_non_interactive + ["update", dirpath])
|
||||
return architecture
|
||||
|
||||
|
||||
def get_submodule_directories(args: argparse.Namespace):
|
||||
"""
|
||||
Get list of all configured submodule directories.
|
||||
"""
|
||||
|
||||
blender_git_root = get_blender_git_root()
|
||||
dot_modules = blender_git_root / ".gitmodules"
|
||||
|
||||
if not dot_modules.exists():
|
||||
return ()
|
||||
|
||||
submodule_directories_output = check_output(
|
||||
[args.git_command, "config", "--file", dot_modules, "--get-regexp", "path"])
|
||||
return (Path(line.split(' ', 1)[1]) for line in submodule_directories_output.strip().splitlines())
|
||||
|
||||
|
||||
def ensure_git_lfs(args: argparse.Namespace) -> None:
|
||||
# Use `--skip-repo` to avoid creating git hooks.
|
||||
# This is called from the `blender.git` checkout, so we don't need to install hooks there.
|
||||
call((args.git_command, "lfs", "install", "--skip-repo"), exit_on_error=True)
|
||||
|
||||
|
||||
def update_precompiled_libraries(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Configure and update submodule for precompiled libraries
|
||||
|
||||
This function detects the current host architecture and enables
|
||||
corresponding submodule, and updates the submodule.
|
||||
|
||||
NOTE: When cross-compiling the architecture is coming from the command line
|
||||
argument.
|
||||
"""
|
||||
|
||||
print_stage("Configuring Precompiled Libraries")
|
||||
|
||||
platform = get_effective_platform(args)
|
||||
arch = get_effective_architecture(args)
|
||||
|
||||
print(f"Detected platform : {platform}")
|
||||
print(f"Detected architecture : {arch}")
|
||||
print()
|
||||
|
||||
if sys.platform == "linux" and not args.use_linux_libraries:
|
||||
print("Skipping Linux libraries configuration")
|
||||
return
|
||||
|
||||
submodule_dir = f"lib/{platform}_{arch}"
|
||||
|
||||
submodule_directories = get_submodule_directories(args)
|
||||
|
||||
if Path(submodule_dir) not in submodule_directories:
|
||||
print("Skipping libraries update: no configured submodule")
|
||||
return
|
||||
|
||||
make_utils.git_enable_submodule(args.git_command, submodule_dir)
|
||||
make_utils.git_update_submodule(args.git_command, submodule_dir)
|
||||
|
||||
|
||||
def update_tests_data_files(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Configure and update submodule with files used by regression tests
|
||||
"""
|
||||
|
||||
print_stage("Configuring Tests Data Files")
|
||||
|
||||
submodule_dir = "tests/data"
|
||||
|
||||
make_utils.git_enable_submodule(args.git_command, submodule_dir)
|
||||
make_utils.git_update_submodule(args.git_command, submodule_dir)
|
||||
|
||||
|
||||
# Test if git repo can be updated.
|
||||
def git_update_skip(args: argparse.Namespace, check_remote_exists: bool = True) -> str:
|
||||
"""Test if git repo can be updated."""
|
||||
|
||||
if make_utils.command_missing(args.git_command):
|
||||
sys.stderr.write("git not found, can't update code\n")
|
||||
sys.exit(1)
|
||||
|
@ -274,23 +275,22 @@ def resolve_external_url(blender_url: str, repo_name: str) -> str:
|
|||
return urljoin(blender_url + "/", "../" + repo_name)
|
||||
|
||||
|
||||
def external_script_copy_old_submodule_over(args: argparse.Namespace, directory_name: str) -> None:
|
||||
blender_git_root = Path(get_blender_git_root())
|
||||
scripts_dir = blender_git_root / "scripts"
|
||||
external_dir = scripts_dir / directory_name
|
||||
def external_script_copy_old_submodule_over(
|
||||
args: argparse.Namespace,
|
||||
directory: Path,
|
||||
old_submodules_dir: Path) -> None:
|
||||
blender_git_root = get_blender_git_root()
|
||||
external_dir = blender_git_root / directory
|
||||
|
||||
old_submodule_relative_dir = Path("release") / "scripts" / directory_name
|
||||
print(f"Moving {old_submodule_relative_dir} to scripts/{directory_name} ...")
|
||||
|
||||
old_submodule_dir = blender_git_root / old_submodule_relative_dir
|
||||
shutil.move(old_submodule_dir, external_dir)
|
||||
print(f"Moving {old_submodules_dir} to {directory} ...")
|
||||
shutil.move(blender_git_root / old_submodules_dir, external_dir)
|
||||
|
||||
# Remove old ".git" which is a file with path to a submodule bare repo inside of main
|
||||
# repo .git/modules directory.
|
||||
(external_dir / ".git").unlink()
|
||||
|
||||
bare_repo_relative_dir = Path(".git") / "modules" / "release" / "scripts" / directory_name
|
||||
print(f"Copying {bare_repo_relative_dir} to scripts/{directory_name}/.git ...")
|
||||
bare_repo_relative_dir = Path(".git") / "modules" / old_submodules_dir
|
||||
print(f"Copying {bare_repo_relative_dir} to {directory}/.git ...")
|
||||
bare_repo_dir = blender_git_root / bare_repo_relative_dir
|
||||
shutil.copytree(bare_repo_dir, external_dir / ".git")
|
||||
|
||||
|
@ -298,25 +298,26 @@ def external_script_copy_old_submodule_over(args: argparse.Namespace, directory_
|
|||
call((args.git_command, "config", "--file", str(git_config), "--unset", "core.worktree"))
|
||||
|
||||
|
||||
def external_script_initialize_if_needed(args: argparse.Namespace,
|
||||
repo_name: str,
|
||||
directory_name: str) -> None:
|
||||
"""Initialize checkout of an external repository scripts directory"""
|
||||
def floating_checkout_initialize_if_needed(args: argparse.Namespace,
|
||||
repo_name: str,
|
||||
directory: Path,
|
||||
old_submodules_dir: Path = None) -> None:
|
||||
"""Initialize checkout of an external repository"""
|
||||
|
||||
blender_git_root = Path(get_blender_git_root())
|
||||
blender_git_root = get_blender_git_root()
|
||||
blender_dot_git = blender_git_root / ".git"
|
||||
scripts_dir = blender_git_root / "scripts"
|
||||
external_dir = scripts_dir / directory_name
|
||||
external_dir = blender_git_root / directory
|
||||
|
||||
if external_dir.exists():
|
||||
return
|
||||
|
||||
print(f"Initializing scripts/{directory_name} ...")
|
||||
print(f"Initializing {directory} ...")
|
||||
|
||||
old_submodule_dot_git = blender_git_root / "release" / "scripts" / directory_name / ".git"
|
||||
if old_submodule_dot_git.exists() and blender_dot_git.is_dir():
|
||||
external_script_copy_old_submodule_over(args, directory_name)
|
||||
return
|
||||
if old_submodules_dir is not None:
|
||||
old_submodule_dot_git = blender_git_root / old_submodules_dir / ".git"
|
||||
if old_submodule_dot_git.exists() and blender_dot_git.is_dir():
|
||||
external_script_copy_old_submodule_over(args, directory, old_submodules_dir)
|
||||
return
|
||||
|
||||
origin_name = "upstream" if use_upstream_workflow(args) else "origin"
|
||||
blender_url = make_utils.git_get_remote_url(args.git_command, origin_name)
|
||||
|
@ -330,9 +331,9 @@ def external_script_initialize_if_needed(args: argparse.Namespace,
|
|||
call((args.git_command, "clone", "--origin", origin_name, external_url, str(external_dir)))
|
||||
|
||||
|
||||
def external_script_add_origin_if_needed(args: argparse.Namespace,
|
||||
repo_name: str,
|
||||
directory_name: str) -> None:
|
||||
def floating_checkout_add_origin_if_needed(args: argparse.Namespace,
|
||||
repo_name: str,
|
||||
directory: Path) -> None:
|
||||
"""
|
||||
Add remote called 'origin' if there is a fork of the external repository available
|
||||
|
||||
|
@ -344,9 +345,8 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
|
|||
|
||||
cwd = os.getcwd()
|
||||
|
||||
blender_git_root = Path(get_blender_git_root())
|
||||
scripts_dir = blender_git_root / "scripts"
|
||||
external_dir = scripts_dir / directory_name
|
||||
blender_git_root = get_blender_git_root()
|
||||
external_dir = blender_git_root / directory
|
||||
|
||||
origin_blender_url = make_utils.git_get_remote_url(args.git_command, "origin")
|
||||
origin_external_url = resolve_external_url(origin_blender_url, repo_name)
|
||||
|
@ -361,7 +361,7 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
|
|||
if not make_utils.git_is_remote_repository(args.git_command, origin_external_url):
|
||||
return
|
||||
|
||||
print(f"Adding origin remote to {directory_name} pointing to fork ...")
|
||||
print(f"Adding origin remote to {directory} pointing to fork ...")
|
||||
|
||||
# Non-obvious tricks to introduce the new remote called "origin" to the existing
|
||||
# submodule configuration.
|
||||
|
@ -390,23 +390,30 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
|
|||
return
|
||||
|
||||
|
||||
def external_scripts_update(args: argparse.Namespace,
|
||||
repo_name: str,
|
||||
directory_name: str,
|
||||
branch: Optional[str]) -> str:
|
||||
def floating_checkout_update(args: argparse.Namespace,
|
||||
repo_name: str,
|
||||
directory: Path,
|
||||
branch: Optional[str],
|
||||
old_submodules_dir: Path = None,
|
||||
only_update=False) -> str:
|
||||
"""Update a single external checkout with the given name in the scripts folder"""
|
||||
|
||||
external_script_initialize_if_needed(args, repo_name, directory_name)
|
||||
external_script_add_origin_if_needed(args, repo_name, directory_name)
|
||||
blender_git_root = get_blender_git_root()
|
||||
external_dir = blender_git_root / directory
|
||||
|
||||
print(f"Updating scripts/{directory_name} ...")
|
||||
if only_update and not external_dir.exists():
|
||||
return ""
|
||||
|
||||
floating_checkout_initialize_if_needed(args, repo_name, directory, old_submodules_dir)
|
||||
floating_checkout_add_origin_if_needed(args, repo_name, directory)
|
||||
|
||||
blender_git_root = get_blender_git_root()
|
||||
external_dir = blender_git_root / directory
|
||||
|
||||
print(f"* Updating {directory} ...")
|
||||
|
||||
cwd = os.getcwd()
|
||||
|
||||
blender_git_root = Path(get_blender_git_root())
|
||||
scripts_dir = blender_git_root / "scripts"
|
||||
external_dir = scripts_dir / directory_name
|
||||
|
||||
# Update externals to appropriate given branch, falling back to main if none is given and/or
|
||||
# found in a sub-repository.
|
||||
branch_fallback = "main"
|
||||
|
@ -419,7 +426,7 @@ def external_scripts_update(args: argparse.Namespace,
|
|||
os.chdir(external_dir)
|
||||
msg = git_update_skip(args, check_remote_exists=False)
|
||||
if msg:
|
||||
skip_msg += directory_name + " skipped: " + msg + "\n"
|
||||
skip_msg += str(directory) + " skipped: " + msg + "\n"
|
||||
else:
|
||||
# Find a matching branch that exists.
|
||||
for remote in ("origin", "upstream"):
|
||||
|
@ -465,6 +472,17 @@ def external_scripts_update(args: argparse.Namespace,
|
|||
return skip_msg
|
||||
|
||||
|
||||
def external_scripts_update(args: argparse.Namespace,
|
||||
repo_name: str,
|
||||
directory_name: str,
|
||||
branch: Optional[str]) -> str:
|
||||
return floating_checkout_update(args,
|
||||
repo_name,
|
||||
Path("scripts") / directory_name,
|
||||
branch,
|
||||
old_submodules_dir=Path("release") / "scripts" / directory_name)
|
||||
|
||||
|
||||
def scripts_submodules_update(args: argparse.Namespace, branch: Optional[str]) -> str:
|
||||
"""Update working trees of addons and addons_contrib within the scripts/ directory"""
|
||||
msg = ""
|
||||
|
@ -475,12 +493,74 @@ def scripts_submodules_update(args: argparse.Namespace, branch: Optional[str]) -
|
|||
return msg
|
||||
|
||||
|
||||
def floating_libraries_update(args: argparse.Namespace, branch: Optional[str]) -> str:
|
||||
"""Update libraries checkouts which are floating (not attached as Git submodules)"""
|
||||
msg = ""
|
||||
|
||||
msg += floating_checkout_update(args,
|
||||
"benchmarks",
|
||||
Path("tests") / "benchmarks",
|
||||
branch,
|
||||
only_update=True)
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
def add_submodule_push_url(args: argparse.Namespace):
|
||||
"""
|
||||
Add pushURL configuration for all locally activated submodules, pointing to SSH protocol.
|
||||
"""
|
||||
|
||||
blender_git_root = get_blender_git_root()
|
||||
modules = blender_git_root / ".git" / "modules"
|
||||
|
||||
submodule_directories = get_submodule_directories(args)
|
||||
|
||||
for submodule_path in submodule_directories:
|
||||
module_path = modules / submodule_path
|
||||
config = module_path / "config"
|
||||
|
||||
if not config.exists():
|
||||
# Ignore modules which are not initialized
|
||||
continue
|
||||
|
||||
push_url = check_output((args.git_command, "config", "--file", str(config),
|
||||
"--get", "remote.origin.pushURL"), exit_on_error=False)
|
||||
if push_url and push_url != "git@projects.blender.org:blender/lib-darwin_arm64.git":
|
||||
# Ignore modules which have pushURL configured.
|
||||
# Keep special exception, as some debug code sneaked into the production for a short
|
||||
# while.
|
||||
continue
|
||||
|
||||
url = make_utils.git_get_config(args.git_command, "remote.origin.url", str(config))
|
||||
if not url.startswith("https:"):
|
||||
# Ignore non-URL URLs.
|
||||
continue
|
||||
|
||||
url_parts = urlsplit(url)
|
||||
push_url = f"git@{url_parts.netloc}:{url_parts.path[1:]}"
|
||||
|
||||
print(f"Setting pushURL to {push_url} for {submodule_path}")
|
||||
make_utils.git_set_config(args.git_command, "remote.origin.pushURL", push_url, str(config))
|
||||
|
||||
|
||||
def submodules_update(args: argparse.Namespace, branch: Optional[str]) -> str:
|
||||
"""Update submodules or other externally tracked source trees"""
|
||||
print_stage("Updating Submodules")
|
||||
|
||||
msg = ""
|
||||
|
||||
msg += scripts_submodules_update(args, branch)
|
||||
|
||||
msg += floating_libraries_update(args, branch)
|
||||
|
||||
print("* Updating Git submodules")
|
||||
exitcode = call((args.git_command, "submodule", "update", "--init"), exit_on_error=False)
|
||||
if exitcode != 0:
|
||||
msg += "Error updating Git submodules\n"
|
||||
|
||||
add_submodule_push_url(args)
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
|
@ -494,26 +574,33 @@ if __name__ == "__main__":
|
|||
major = blender_version.version // 100
|
||||
minor = blender_version.version % 100
|
||||
branch = f"blender-v{major}.{minor}-release"
|
||||
release_version: Optional[str] = f"{major}.{minor}"
|
||||
else:
|
||||
branch = 'main'
|
||||
release_version = None
|
||||
|
||||
if not args.no_libraries:
|
||||
svn_update(args, release_version)
|
||||
# Submodules and precompiled libraries require Git LFS.
|
||||
ensure_git_lfs(args)
|
||||
|
||||
if not args.no_blender:
|
||||
blender_skip_msg = git_update_skip(args)
|
||||
if not blender_skip_msg:
|
||||
blender_skip_msg = blender_update(args)
|
||||
if blender_skip_msg:
|
||||
blender_skip_msg = "Blender repository skipped: " + blender_skip_msg + "\n"
|
||||
|
||||
if not args.no_libraries:
|
||||
update_precompiled_libraries(args)
|
||||
if args.use_tests:
|
||||
update_tests_data_files(args)
|
||||
|
||||
if not args.no_submodules:
|
||||
submodules_skip_msg = submodules_update(args, branch)
|
||||
|
||||
# Report any skipped repositories at the end, so it's not as easy to miss.
|
||||
skip_msg = blender_skip_msg + submodules_skip_msg
|
||||
if skip_msg:
|
||||
print_stage(skip_msg.strip())
|
||||
print()
|
||||
print(skip_msg.strip())
|
||||
print()
|
||||
|
||||
# For failed submodule update we throw an error, since not having correct
|
||||
# submodules can make Blender throw errors.
|
||||
|
|
|
@ -11,9 +11,7 @@ import re
|
|||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from typing import (
|
||||
Sequence,
|
||||
|
@ -48,7 +46,7 @@ def check_output(cmd: Sequence[str], exit_on_error: bool = True) -> str:
|
|||
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
if exit_on_error:
|
||||
sys.stderr.write(" ".join(cmd))
|
||||
sys.stderr.write(" ".join(cmd) + "\n")
|
||||
sys.stderr.write(e.output + "\n")
|
||||
sys.exit(e.returncode)
|
||||
output = ""
|
||||
|
@ -87,25 +85,6 @@ def git_remote_exist(git_command: str, remote_name: str) -> bool:
|
|||
return remote_url != remote_name
|
||||
|
||||
|
||||
def git_get_resolved_submodule_url(git_command: str, blender_url: str, submodule_path: str) -> str:
|
||||
git_root = check_output([git_command, "rev-parse", "--show-toplevel"])
|
||||
dot_gitmodules = os.path.join(git_root, ".gitmodules")
|
||||
|
||||
submodule_key_prefix = f"submodule.{submodule_path}"
|
||||
submodule_key_url = f"{submodule_key_prefix}.url"
|
||||
|
||||
gitmodule_url = git_get_config(
|
||||
git_command, submodule_key_url, file=dot_gitmodules)
|
||||
|
||||
# A bit of a trickery to construct final URL.
|
||||
# Only works for the relative submodule URLs.
|
||||
#
|
||||
# Note that unless the LHS URL ends up with a slash urljoin treats the last component as a
|
||||
# file.
|
||||
assert gitmodule_url.startswith('..')
|
||||
return urljoin(blender_url + "/", gitmodule_url)
|
||||
|
||||
|
||||
def git_is_remote_repository(git_command: str, repo: str) -> bool:
|
||||
"""Returns true if the given repository is a valid/clonable git repo"""
|
||||
exit_code = call((git_command, "ls-remote", repo, "HEAD"), exit_on_error=False, silent=True)
|
||||
|
@ -113,7 +92,8 @@ def git_is_remote_repository(git_command: str, repo: str) -> bool:
|
|||
|
||||
|
||||
def git_branch(git_command: str) -> str:
|
||||
# Get current branch name.
|
||||
"""Get current branch name."""
|
||||
|
||||
try:
|
||||
branch = subprocess.check_output([git_command, "rev-parse", "--abbrev-ref", "HEAD"])
|
||||
except subprocess.CalledProcessError as e:
|
||||
|
@ -137,44 +117,32 @@ def git_set_config(git_command: str, key: str, value: str, file: Optional[str] =
|
|||
return check_output([git_command, "config", key, value])
|
||||
|
||||
|
||||
def git_tag(git_command: str) -> Optional[str]:
|
||||
# Get current tag name.
|
||||
try:
|
||||
tag = subprocess.check_output([git_command, "describe", "--exact-match"], stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
return None
|
||||
def git_enable_submodule(git_command: str, submodule_dir: str):
|
||||
"""Enable submodule denoted by its directory within the repository"""
|
||||
|
||||
return tag.strip().decode('utf8')
|
||||
command = (git_command,
|
||||
"config",
|
||||
"--local",
|
||||
f"submodule.{submodule_dir}.update", "checkout")
|
||||
call(command, exit_on_error=True, silent=False)
|
||||
|
||||
|
||||
def git_branch_release_version(branch: str, tag: Optional[str]) -> Optional[str]:
|
||||
re_match = re.search("^blender-v(.*)-release$", branch)
|
||||
release_version = None
|
||||
if re_match:
|
||||
release_version = re_match.group(1)
|
||||
elif tag:
|
||||
re_match = re.search(r"^v([0-9]*\.[0-9]*).*", tag)
|
||||
if re_match:
|
||||
release_version = re_match.group(1)
|
||||
return release_version
|
||||
def git_update_submodule(git_command: str, submodule_dir: str):
|
||||
"""
|
||||
Update the given submodule.
|
||||
|
||||
The submodule is denoted by its path within the repository.
|
||||
This function will initialize the submodule if it has not been initialized.
|
||||
"""
|
||||
|
||||
def svn_libraries_base_url(release_version: Optional[str], branch: Optional[str] = None) -> str:
|
||||
if release_version:
|
||||
svn_branch = "tags/blender-" + release_version + "-release"
|
||||
elif branch:
|
||||
svn_branch = "branches/" + branch
|
||||
else:
|
||||
svn_branch = "trunk"
|
||||
return "https://svn.blender.org/svnroot/bf-blender/" + svn_branch + "/lib/"
|
||||
call((git_command, "submodule", "update", "--init", submodule_dir))
|
||||
|
||||
|
||||
def command_missing(command: str) -> bool:
|
||||
# Support running with Python 2 for macOS
|
||||
if sys.version_info >= (3, 0):
|
||||
return shutil.which(command) is None
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
class BlenderVersion:
|
||||
|
|
|
@ -1,49 +1,55 @@
|
|||
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
|
||||
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
|
||||
|
||||
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
|
||||
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
|
||||
set BUILD_VS_LIBDIR=lib/windows_x64
|
||||
|
||||
if NOT "%verbose%" == "" (
|
||||
echo Library Directory = "%BUILD_VS_LIBDIR%"
|
||||
)
|
||||
if NOT EXIST %BUILD_VS_LIBDIR% (
|
||||
rem libs not found, but svn is on the system
|
||||
if not "%SVN%"=="" (
|
||||
if NOT EXIST "%BUILD_VS_LIBDIR%\.git" (
|
||||
rem libs not found, but git is on the system
|
||||
if not "%GIT%"=="" (
|
||||
echo.
|
||||
echo The required external libraries in %BUILD_VS_LIBDIR% are missing
|
||||
echo.
|
||||
set /p GetLibs= "Would you like to download them? (y/n)"
|
||||
if /I "!GetLibs!"=="Y" (
|
||||
echo.
|
||||
echo Downloading %BUILD_VS_SVNDIR% libraries, please wait.
|
||||
echo Downloading %BUILD_VS_LIBDIR% libraries, please wait.
|
||||
echo.
|
||||
:RETRY
|
||||
"%SVN%" checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/%BUILD_VS_SVNDIR% %BUILD_VS_LIBDIR%
|
||||
echo *********************************************************
|
||||
echo * *
|
||||
echo * Note: Once the initial download finishes and you see *
|
||||
echo * "Resolving deltas: 100%% (nnn/nnn) done" *
|
||||
echo * a second, much larger, update will occur with *
|
||||
echo * no visible updates. Please do not interrupt *
|
||||
echo * this process. It may take over an hour to *
|
||||
echo * complete depending on your internet connection. *
|
||||
echo * *
|
||||
echo *********************************************************
|
||||
:RETRY
|
||||
"%GIT%" -C "%BLENDER_DIR%\" config --local "submodule.%BUILD_VS_LIBDIR%.update" "checkout"
|
||||
"%GIT%" -C "%BLENDER_DIR%\" submodule update --progress --init "%BUILD_VS_LIBDIR%"
|
||||
if errorlevel 1 (
|
||||
set /p LibRetry= "Error during download, retry? y/n"
|
||||
if /I "!LibRetry!"=="Y" (
|
||||
cd %BUILD_VS_LIBDIR%
|
||||
"%SVN%" cleanup
|
||||
cd %BLENDER_DIR%
|
||||
goto RETRY
|
||||
)
|
||||
echo.
|
||||
echo Error: Download of external libraries failed.
|
||||
echo This is needed for building, please manually run 'svn cleanup' and 'svn update' in
|
||||
echo %BUILD_VS_LIBDIR% , until this is resolved you CANNOT make a successful blender build
|
||||
echo Until this is resolved you CANNOT make a successful blender build.
|
||||
echo.
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
) else (
|
||||
echo Not downloading libraries, until this is resolved you CANNOT make a successful blender build.
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
) else (
|
||||
if NOT EXIST %PYTHON% (
|
||||
if not "%SVN%"=="" (
|
||||
if not "%GIT%"=="" (
|
||||
echo.
|
||||
echo Python not found in external libraries, updating to latest version
|
||||
echo.
|
||||
"%SVN%" update %BUILD_VS_LIBDIR%
|
||||
"%GIT%" -C "%BLENDER_DIR%" submodule update "%BUILD_VS_LIBDIR%"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -53,8 +59,8 @@ if NOT EXIST %BUILD_VS_LIBDIR% (
|
|||
echo Error: Required libraries not found at "%BUILD_VS_LIBDIR%"
|
||||
echo This is needed for building, aborting!
|
||||
echo.
|
||||
if "%SVN%"=="" (
|
||||
echo This is most likely caused by svn.exe not being available.
|
||||
if "%GIT%"=="" (
|
||||
echo This is most likely caused by git.exe not being available.
|
||||
)
|
||||
exit /b 1
|
||||
)
|
|
@ -1,5 +1,4 @@
|
|||
REM find all dependencies and set the corresponding environment variables.
|
||||
for %%X in (svn.exe) do (set SVN=%%~$PATH:X)
|
||||
for %%X in (cmake.exe) do (set CMAKE=%%~$PATH:X)
|
||||
for %%X in (ctest.exe) do (set CTEST=%%~$PATH:X)
|
||||
for %%X in (git.exe) do (set GIT=%%~$PATH:X)
|
||||
|
@ -7,31 +6,32 @@ REM For python, default on 310 but if that does not exist also check
|
|||
REM the 311, 312 and finally 39 folders to see if those are there, it checks
|
||||
REM this far ahead to ensure good lib folder compatibility in the future
|
||||
REM it falls back to 3.9 just incase it is a very old lib folder.
|
||||
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\310\bin\python.exe
|
||||
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\310\bin\python.exe
|
||||
if EXIST %PYTHON% (
|
||||
goto detect_python_done
|
||||
)
|
||||
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\311\bin\python.exe
|
||||
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\311\bin\python.exe
|
||||
if EXIST %PYTHON% (
|
||||
goto detect_python_done
|
||||
)
|
||||
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\312\bin\python.exe
|
||||
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\312\bin\python.exe
|
||||
if EXIST %PYTHON% (
|
||||
goto detect_python_done
|
||||
)
|
||||
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\39\bin\python.exe
|
||||
set PYTHON=%BLENDER_DIR%\lib\windows_x64\python\39\bin\python.exe
|
||||
if EXIST %PYTHON% (
|
||||
goto detect_python_done
|
||||
)
|
||||
|
||||
if NOT EXIST %PYTHON% (
|
||||
echo Warning: Python not found, there is likely an issue with the library folder
|
||||
if EXIST %BLENDER_DIR%\lib\windows_x64\ (
|
||||
echo Warning: Python not found, there is likely an issue with the library folder
|
||||
)
|
||||
set PYTHON=""
|
||||
)
|
||||
|
||||
:detect_python_done
|
||||
if NOT "%verbose%" == "" (
|
||||
echo svn : "%SVN%"
|
||||
echo cmake : "%CMAKE%"
|
||||
echo ctest : "%CTEST%"
|
||||
echo git : "%GIT%"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
if EXIST %BLENDER_DIR%\..\lib\win64_vc15\llvm\bin\clang-format.exe (
|
||||
set CF_PATH=..\lib\win64_vc15\llvm\bin
|
||||
if EXIST %BLENDER_DIR%\lib\windows_x64\llvm\bin\clang-format.exe (
|
||||
set CF_PATH=lib\windows_x64\llvm\bin
|
||||
goto detect_done
|
||||
)
|
||||
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
set BUILD_VS_LIBDIR=lib/windows_x64
|
||||
|
||||
:RETRY
|
||||
"%GIT%" -C "%BLENDER_DIR%\" config --local "submodule.%BUILD_VS_LIBDIR%.update" "checkout"
|
||||
"%GIT%" -C "%BLENDER_DIR%\" submodule update --progress --init "%BUILD_VS_LIBDIR%"
|
||||
if errorlevel 1 (
|
||||
set /p LibRetry= "Error during update, retry? y/n"
|
||||
if /I "!LibRetry!"=="Y" (
|
||||
goto RETRY
|
||||
)
|
||||
echo.
|
||||
echo Error: Download of external libraries failed.
|
||||
echo Until this is resolved you CANNOT make a successful blender build.
|
||||
echo.
|
||||
exit /b 1
|
||||
)
|
||||
REM re-detect the dependencies after updating the libraries so any python version
|
||||
REM changes are accounted for.
|
||||
call "%~dp0\find_dependencies.cmd"
|
|
@ -110,9 +110,6 @@ if NOT "%1" == "" (
|
|||
) else if "%1" == "doc_py" (
|
||||
set DOC_PY=1
|
||||
goto EOF
|
||||
) else if "%1" == "svnfix" (
|
||||
set SVN_FIX=1
|
||||
goto EOF
|
||||
) else (
|
||||
echo Command "%1" unknown, aborting!
|
||||
goto ERR
|
||||
|
|
|
@ -4,9 +4,7 @@ set BUILD_CMAKE_ARGS=
|
|||
set BUILD_ARCH=
|
||||
set BUILD_VS_VER=
|
||||
set BUILD_VS_YEAR=
|
||||
set BUILD_VS_LIBDIRPOST=
|
||||
set BUILD_VS_LIBDIR=
|
||||
set BUILD_VS_SVNDIR=
|
||||
set KEY_NAME=
|
||||
set MSBUILD_PLATFORM=
|
||||
set MUST_CLEAN=
|
||||
|
|
|
@ -8,13 +8,10 @@ for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Branch_hash=%%i
|
|||
cd "%BLENDER_DIR%/scripts/addons"
|
||||
for /f "delims=" %%i in ('"%GIT%" rev-parse --abbrev-ref HEAD') do echo Addons_Branch_name=%%i
|
||||
for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Addons_Branch_hash=%%i
|
||||
if "%SVN%" == "" (
|
||||
echo SVN not found, cannot library information.
|
||||
goto EOF
|
||||
)
|
||||
set BUILD_VS_LIBDIR=%BLENDER_DIR%..\lib\win64_vc15
|
||||
for /f "delims=" %%i in ('"%SVN%" info --show-item=url --no-newline %BUILD_VS_LIBDIR% ') do echo Libs_URL=%%i
|
||||
for /f "delims=" %%i in ('"%SVN%" info --show-item=revision --no-newline %BUILD_VS_LIBDIR% ') do echo Libs_Revision=%%i
|
||||
for /f "delims=" %%i in ('"%SVN%" info --show-item=last-changed-date --no-newline %BUILD_VS_LIBDIR% ') do echo Libs_LastChange=%%i
|
||||
|
||||
cd "%BLENDER_DIR%/lib/windows_x64"
|
||||
for /f "delims=" %%i in ('"%GIT%" rev-parse --abbrev-ref HEAD') do echo Libs_Branch_name=%%i
|
||||
for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Libs_Branch_hash=%%i
|
||||
|
||||
cd "%BLENDER_DIR%"
|
||||
:EOF
|
|
@ -1,25 +0,0 @@
|
|||
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
|
||||
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
|
||||
|
||||
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
|
||||
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
|
||||
|
||||
echo Starting cleanup in %BUILD_VS_LIBDIR%.
|
||||
cd %BUILD_VS_LIBDIR%
|
||||
:RETRY
|
||||
"%SVN%" cleanup
|
||||
"%SVN%" update
|
||||
if errorlevel 1 (
|
||||
set /p LibRetry= "Error during update, retry? y/n"
|
||||
if /I "!LibRetry!"=="Y" (
|
||||
goto RETRY
|
||||
)
|
||||
echo.
|
||||
echo Error: Download of external libraries failed.
|
||||
echo This is needed for building, please manually run 'svn cleanup' and 'svn update' in
|
||||
echo %BUILD_VS_LIBDIR% , until this is resolved you CANNOT make a successful blender build
|
||||
echo.
|
||||
exit /b 1
|
||||
)
|
||||
echo Cleanup complete
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
|
||||
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
|
||||
|
||||
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
|
||||
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
|
||||
|
||||
cd %BUILD_VS_LIBDIR%
|
||||
:RETRY
|
||||
"%SVN%" update
|
||||
if errorlevel 1 (
|
||||
set /p LibRetry= "Error during update, retry? y/n"
|
||||
if /I "!LibRetry!"=="Y" (
|
||||
"%SVN%" cleanup
|
||||
goto RETRY
|
||||
)
|
||||
echo.
|
||||
echo Error: Download of external libraries failed.
|
||||
echo This is needed for building, please manually run 'svn cleanup' and 'svn update' in
|
||||
echo %BUILD_VS_LIBDIR% , until this is resolved you CANNOT make a successful blender build
|
||||
echo.
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
cd %BLENDER_DIR%
|
|
@ -8,6 +8,6 @@ exit /b 1
|
|||
:detect_python_done
|
||||
|
||||
REM Use -B to avoid writing __pycache__ in lib directory and causing update conflicts.
|
||||
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_test.py --git-command "%GIT%" --svn-command "%SVN%" --cmake-command="%CMAKE%" --ctest-command="%CTEST%" --config="%BUILD_TYPE%" %BUILD_DIR%
|
||||
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_test.py --git-command "%GIT%" --cmake-command="%CMAKE%" --ctest-command="%CTEST%" --config="%BUILD_TYPE%" %BUILD_DIR%
|
||||
|
||||
:EOF
|
||||
|
|
|
@ -5,6 +5,6 @@ if NOT EXIST %PYTHON% (
|
|||
:detect_python_done
|
||||
|
||||
REM Use -B to avoid writing __pycache__ in lib directory and causing update conflicts.
|
||||
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_update.py --git-command "%GIT%" --svn-command "%SVN%" %BUILD_UPDATE_ARGS%
|
||||
%PYTHON% -B %BLENDER_DIR%\build_files\utils\make_update.py --git-command "%GIT%" %BUILD_UPDATE_ARGS%
|
||||
|
||||
:EOF
|
||||
|
|
|
@ -135,18 +135,26 @@ ccl_device_forceinline bool triangle_light_sample(KernelGlobals kg,
|
|||
const float3 e1 = V[2] - V[0];
|
||||
const float3 e2 = V[2] - V[1];
|
||||
const float longest_edge_squared = max(len_squared(e0), max(len_squared(e1), len_squared(e2)));
|
||||
const float3 N0 = cross(e0, e1);
|
||||
float3 N0 = cross(e0, e1);
|
||||
/* Flip normal if necessary. */
|
||||
const int object_flag = kernel_data_fetch(object_flag, object);
|
||||
if (object_flag & SD_OBJECT_NEGATIVE_SCALE) {
|
||||
N0 = -N0;
|
||||
}
|
||||
|
||||
/* Do not draw samples from the side without MIS. */
|
||||
ls->shader = kernel_data_fetch(tri_shader, prim);
|
||||
const float distance_to_plane = dot(N0, V[0] - P) / dot(N0, N0);
|
||||
const int ls_shader_flag = kernel_data_fetch(shaders, ls->shader & SHADER_MASK).flags;
|
||||
if (!(ls_shader_flag & (distance_to_plane > 0 ? SD_MIS_BACK : SD_MIS_FRONT))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
float Nl = 0.0f;
|
||||
ls->Ng = safe_normalize_len(N0, &Nl);
|
||||
const float area = 0.5f * Nl;
|
||||
|
||||
/* flip normal if necessary */
|
||||
const int object_flag = kernel_data_fetch(object_flag, object);
|
||||
if (object_flag & SD_OBJECT_NEGATIVE_SCALE) {
|
||||
ls->Ng = -ls->Ng;
|
||||
}
|
||||
ls->eval_fac = 1.0f;
|
||||
ls->shader = kernel_data_fetch(tri_shader, prim);
|
||||
ls->object = object;
|
||||
ls->prim = prim;
|
||||
ls->lamp = LAMP_NONE;
|
||||
|
@ -154,8 +162,6 @@ ccl_device_forceinline bool triangle_light_sample(KernelGlobals kg,
|
|||
ls->type = LIGHT_TRIANGLE;
|
||||
ls->group = object_lightgroup(kg, object);
|
||||
|
||||
float distance_to_plane = fabsf(dot(N0, V[0] - P) / dot(N0, N0));
|
||||
|
||||
if (!in_volume_segment && (longest_edge_squared > distance_to_plane * distance_to_plane)) {
|
||||
/* A modified version of James Arvo, "Stratified Sampling of Spherical Triangles"
|
||||
* http://www.graphics.cornell.edu/pubs/1995/Arv95c.pdf */
|
||||
|
|
|
@ -108,7 +108,9 @@ LightTreeEmitter::LightTreeEmitter(Scene *scene,
|
|||
|
||||
/* TODO: need a better way to handle this when textures are used. */
|
||||
float area = triangle_area(vertices[0], vertices[1], vertices[2]);
|
||||
measure.energy = area * average(shader->emission_estimate);
|
||||
/* Use absolute value of emission_estimate so lights with negative strength are properly
|
||||
* supported in the light tree. */
|
||||
measure.energy = area * average(fabs(shader->emission_estimate));
|
||||
|
||||
/* NOTE: the original implementation used the bounding box centroid, but triangle centroid
|
||||
* seems to work fine */
|
||||
|
@ -220,7 +222,7 @@ LightTreeEmitter::LightTreeEmitter(Scene *scene,
|
|||
|
||||
/* Use absolute value of energy so lights with negative strength are properly supported in the
|
||||
* light tree. */
|
||||
measure.energy = fabsf(average(strength));
|
||||
measure.energy = average(fabs(strength));
|
||||
|
||||
light_set_membership = lamp->get_light_set_membership();
|
||||
}
|
||||
|
|
|
@ -264,7 +264,7 @@ void Shader::estimate_emission()
|
|||
}
|
||||
|
||||
ShaderInput *surf = graph->output()->input("Surface");
|
||||
emission_estimate = fabs(output_estimate_emission(surf->link, emission_is_constant));
|
||||
emission_estimate = output_estimate_emission(surf->link, emission_is_constant);
|
||||
|
||||
if (is_zero(emission_estimate)) {
|
||||
emission_sampling = EMISSION_SAMPLING_NONE;
|
||||
|
@ -274,8 +274,9 @@ void Shader::estimate_emission()
|
|||
* using a lot of memory in the light tree and potentially wasting samples
|
||||
* where indirect light samples are sufficient.
|
||||
* Possible optimization: estimate front and back emission separately. */
|
||||
emission_sampling = (reduce_max(emission_estimate) > 0.5f) ? EMISSION_SAMPLING_FRONT_BACK :
|
||||
EMISSION_SAMPLING_NONE;
|
||||
emission_sampling = (reduce_max(fabs(emission_estimate)) > 0.5f) ?
|
||||
EMISSION_SAMPLING_FRONT_BACK :
|
||||
EMISSION_SAMPLING_NONE;
|
||||
}
|
||||
else {
|
||||
emission_sampling = emission_sampling_method;
|
||||
|
|
|
@ -111,6 +111,19 @@ static const char *get_egl_error_message_string(EGLint error)
|
|||
}
|
||||
}
|
||||
|
||||
static void egl_print_error(const char *message, const EGLint error)
|
||||
{
|
||||
const char *code = get_egl_error_enum_string(error);
|
||||
const char *msg = get_egl_error_message_string(error);
|
||||
|
||||
fprintf(stderr,
|
||||
"%sEGL Error (0x%04X): %s: %s\n",
|
||||
message,
|
||||
uint(error),
|
||||
code ? code : "<Unknown>",
|
||||
msg ? msg : "<Unknown>");
|
||||
}
|
||||
|
||||
static bool egl_chk(bool result,
|
||||
const char *file = nullptr,
|
||||
int line = 0,
|
||||
|
@ -132,11 +145,7 @@ static bool egl_chk(bool result,
|
|||
code ? code : "<Unknown>",
|
||||
msg ? msg : "<Unknown>");
|
||||
#else
|
||||
fprintf(stderr,
|
||||
"EGL Error (0x%04X): %s: %s\n",
|
||||
uint(error),
|
||||
code ? code : "<Unknown>",
|
||||
msg ? msg : "<Unknown>");
|
||||
egl_print_error("", error);
|
||||
(void)(file);
|
||||
(void)(line);
|
||||
(void)(text);
|
||||
|
@ -343,33 +352,43 @@ GHOST_TSuccess GHOST_ContextEGL::initializeDrawingContext()
|
|||
goto error;
|
||||
}
|
||||
|
||||
if (!EGL_CHK(::eglInitialize(m_display, &egl_major, &egl_minor)) ||
|
||||
(egl_major == 0 && egl_minor == 0))
|
||||
{
|
||||
/* We failed to create a regular render window, retry and see if we can create a headless
|
||||
* render context. */
|
||||
::eglTerminate(m_display);
|
||||
const EGLBoolean init_display_result = ::eglInitialize(m_display, &egl_major, &egl_minor);
|
||||
const EGLint init_display_error = (init_display_result) ? 0 : eglGetError();
|
||||
|
||||
const char *egl_extension_st = eglQueryString(EGL_NO_DISPLAY, EGL_EXTENSIONS);
|
||||
assert(egl_extension_st != nullptr);
|
||||
assert(egl_extension_st == nullptr ||
|
||||
strstr(egl_extension_st, "EGL_MESA_platform_surfaceless") != nullptr);
|
||||
if (egl_extension_st == nullptr ||
|
||||
strstr(egl_extension_st, "EGL_MESA_platform_surfaceless") == nullptr)
|
||||
{
|
||||
goto error;
|
||||
if (!init_display_result || (egl_major == 0 && egl_minor == 0)) {
|
||||
/* We failed to create a regular render window, retry and see if we can create a headless
|
||||
* render context. */
|
||||
::eglTerminate(m_display);
|
||||
|
||||
const char *egl_extension_st = eglQueryString(EGL_NO_DISPLAY, EGL_EXTENSIONS);
|
||||
assert(egl_extension_st != nullptr);
|
||||
assert(egl_extension_st == nullptr ||
|
||||
strstr(egl_extension_st, "EGL_MESA_platform_surfaceless") != nullptr);
|
||||
if (egl_extension_st == nullptr ||
|
||||
strstr(egl_extension_st, "EGL_MESA_platform_surfaceless") == nullptr)
|
||||
{
|
||||
egl_print_error("Failed to create display GPU context: ", init_display_error);
|
||||
fprintf(
|
||||
stderr,
|
||||
"Failed to create headless GPU context: No EGL_MESA_platform_surfaceless extension");
|
||||
goto error;
|
||||
}
|
||||
|
||||
m_display = eglGetPlatformDisplayEXT(
|
||||
EGL_PLATFORM_SURFACELESS_MESA, EGL_DEFAULT_DISPLAY, nullptr);
|
||||
|
||||
const EGLBoolean headless_result = ::eglInitialize(m_display, &egl_major, &egl_minor);
|
||||
const EGLint init_headless_error = (headless_result) ? 0 : eglGetError();
|
||||
|
||||
if (!headless_result) {
|
||||
egl_print_error("Failed to create display GPU context: ", init_display_error);
|
||||
egl_print_error("Failed to create headless GPU context: ", init_headless_error);
|
||||
goto error;
|
||||
}
|
||||
}
|
||||
|
||||
m_display = eglGetPlatformDisplayEXT(
|
||||
EGL_PLATFORM_SURFACELESS_MESA, EGL_DEFAULT_DISPLAY, nullptr);
|
||||
|
||||
if (!EGL_CHK(::eglInitialize(m_display, &egl_major, &egl_minor))) {
|
||||
goto error;
|
||||
}
|
||||
/* Because the first eglInitialize will print an error to the terminal, print a "success"
|
||||
* message here to let the user know that we successfully recovered from the error. */
|
||||
fprintf(stderr, "\nManaged to successfully fallback to surfaceless EGL rendering!\n\n");
|
||||
}
|
||||
|
||||
#ifdef WITH_GHOST_DEBUG
|
||||
fprintf(stderr, "EGL Version %d.%d\n", egl_major, egl_minor);
|
||||
#endif
|
||||
|
|
|
@ -135,6 +135,12 @@ static int gwl_registry_handler_interface_slot_max();
|
|||
static int gwl_registry_handler_interface_slot_from_string(const char *interface);
|
||||
static const GWL_RegistryHandler *gwl_registry_handler_from_interface_slot(int interface_slot);
|
||||
|
||||
static bool xkb_compose_state_feed_and_get_utf8(
|
||||
xkb_compose_state *compose_state,
|
||||
xkb_state *state,
|
||||
const xkb_keycode_t key,
|
||||
char r_utf8_buf[sizeof(GHOST_TEventKeyData::utf8_buf)]);
|
||||
|
||||
#ifdef USE_EVENT_BACKGROUND_THREAD
|
||||
static void gwl_display_event_thread_destroy(GWL_Display *display);
|
||||
|
||||
|
@ -1103,6 +1109,37 @@ static void gwl_seat_key_layout_active_state_update_mask(GWL_Seat *seat)
|
|||
}
|
||||
}
|
||||
|
||||
/** Callback that runs from GHOST's timer. */
|
||||
static void gwl_seat_key_repeat_timer_fn(GHOST_ITimerTask *task, uint64_t time_ms)
|
||||
{
|
||||
GWL_KeyRepeatPlayload *payload = static_cast<GWL_KeyRepeatPlayload *>(task->getUserData());
|
||||
|
||||
GWL_Seat *seat = payload->seat;
|
||||
wl_surface *wl_surface_focus = seat->keyboard.wl.surface_window;
|
||||
if (UNLIKELY(wl_surface_focus == nullptr)) {
|
||||
return;
|
||||
}
|
||||
|
||||
GHOST_IWindow *win = ghost_wl_surface_user_data(wl_surface_focus);
|
||||
GHOST_SystemWayland *system = seat->system;
|
||||
const uint64_t event_ms = payload->time_ms_init + time_ms;
|
||||
/* Calculate this value every time in case modifier keys are pressed. */
|
||||
|
||||
char utf8_buf[sizeof(GHOST_TEventKeyData::utf8_buf)] = {'\0'};
|
||||
if (seat->xkb.compose_state &&
|
||||
xkb_compose_state_feed_and_get_utf8(
|
||||
seat->xkb.compose_state, seat->xkb.state, payload->key_code, utf8_buf))
|
||||
{
|
||||
/* `utf8_buf` has been filled by a compose action. */
|
||||
}
|
||||
else {
|
||||
xkb_state_key_get_utf8(seat->xkb.state, payload->key_code, utf8_buf, sizeof(utf8_buf));
|
||||
}
|
||||
|
||||
system->pushEvent_maybe_pending(new GHOST_EventKey(
|
||||
event_ms, GHOST_kEventKeyDown, win, payload->key_data.gkey, true, utf8_buf));
|
||||
}
|
||||
|
||||
/**
|
||||
* \note Caller must lock `timer_mutex`.
|
||||
*/
|
||||
|
@ -1743,6 +1780,8 @@ static void ghost_wayland_log_handler(const char *msg, va_list arg)
|
|||
__attribute__((format(printf, 1, 0)));
|
||||
#endif
|
||||
|
||||
static bool ghost_wayland_log_handler_is_background = false;
|
||||
|
||||
/**
|
||||
* Callback for WAYLAND to run when there is an error.
|
||||
*
|
||||
|
@ -1751,6 +1790,15 @@ static void ghost_wayland_log_handler(const char *msg, va_list arg)
|
|||
*/
|
||||
static void ghost_wayland_log_handler(const char *msg, va_list arg)
|
||||
{
|
||||
/* This is fine in background mode, we will try to fall back to headless GPU context.
|
||||
* Happens when render farm process runs without user login session. */
|
||||
if (ghost_wayland_log_handler_is_background &&
|
||||
(strstr(msg, "error: XDG_RUNTIME_DIR not set in the environment") ||
|
||||
strstr(msg, "error: XDG_RUNTIME_DIR is invalid or not set in the environment")))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
fprintf(stderr, "GHOST/Wayland: ");
|
||||
vfprintf(stderr, msg, arg); /* Includes newline. */
|
||||
|
||||
|
@ -2761,13 +2809,11 @@ static void keyboard_depressed_state_key_event(GWL_Seat *seat,
|
|||
}
|
||||
|
||||
static void keyboard_depressed_state_push_events_from_change(
|
||||
GWL_Seat *seat, const GWL_KeyboardDepressedState &key_depressed_prev)
|
||||
GWL_Seat *seat,
|
||||
GHOST_IWindow *win,
|
||||
const uint64_t event_ms,
|
||||
const GWL_KeyboardDepressedState &key_depressed_prev)
|
||||
{
|
||||
GHOST_IWindow *win = ghost_wl_surface_user_data(seat->keyboard.wl.surface_window);
|
||||
const GHOST_SystemWayland *system = seat->system;
|
||||
/* Caller has no time-stamp, set from system. */
|
||||
const uint64_t event_ms = system->getMilliSeconds();
|
||||
|
||||
/* Separate key up and down into separate passes so key down events always come after key up.
|
||||
* Do this so users of GHOST can use the last pressed or released modifier to check
|
||||
* if the modifier is held instead of counting modifiers pressed as is done here,
|
||||
|
@ -4688,6 +4734,8 @@ static void keyboard_handle_enter(void *data,
|
|||
CLOG_INFO(LOG, 2, "enter");
|
||||
|
||||
GWL_Seat *seat = static_cast<GWL_Seat *>(data);
|
||||
GHOST_IWindow *win = ghost_wl_surface_user_data(wl_surface);
|
||||
|
||||
seat->keyboard.serial = serial;
|
||||
seat->keyboard.wl.surface_window = wl_surface;
|
||||
|
||||
|
@ -4699,6 +4747,12 @@ static void keyboard_handle_enter(void *data,
|
|||
GWL_KeyboardDepressedState key_depressed_prev = seat->key_depressed;
|
||||
keyboard_depressed_state_reset(seat);
|
||||
|
||||
/* Keep track of the last held repeating key, start the repeat timer if one exists. */
|
||||
struct {
|
||||
uint32_t key = std::numeric_limits<uint32_t>::max();
|
||||
xkb_keysym_t sym = 0;
|
||||
} repeat;
|
||||
|
||||
uint32_t *key;
|
||||
WL_ARRAY_FOR_EACH (key, keys) {
|
||||
const xkb_keycode_t key_code = *key + EVDEV_OFFSET;
|
||||
|
@ -4708,9 +4762,41 @@ static void keyboard_handle_enter(void *data,
|
|||
if (gkey != GHOST_kKeyUnknown) {
|
||||
keyboard_depressed_state_key_event(seat, gkey, GHOST_kEventKeyDown);
|
||||
}
|
||||
|
||||
if (xkb_keymap_key_repeats(xkb_state_get_keymap(seat->xkb.state), key_code)) {
|
||||
repeat.key = *key;
|
||||
repeat.sym = sym;
|
||||
}
|
||||
}
|
||||
|
||||
keyboard_depressed_state_push_events_from_change(seat, key_depressed_prev);
|
||||
/* Caller has no time-stamp, set from system. */
|
||||
const uint64_t event_ms = seat->system->getMilliSeconds();
|
||||
keyboard_depressed_state_push_events_from_change(seat, win, event_ms, key_depressed_prev);
|
||||
|
||||
if ((repeat.key != std::numeric_limits<uint32_t>::max()) && (seat->key_repeat.rate > 0)) {
|
||||
/* Since the key has been held, immediately send a press event.
|
||||
* This also ensures the key will be registered as pressed, see #117896. */
|
||||
#ifdef USE_EVENT_BACKGROUND_THREAD
|
||||
std::lock_guard lock_timer_guard{*seat->system->timer_mutex};
|
||||
#endif
|
||||
/* Should have been cleared on leave, set here just in case. */
|
||||
if (UNLIKELY(seat->key_repeat.timer)) {
|
||||
keyboard_handle_key_repeat_cancel(seat);
|
||||
}
|
||||
|
||||
const xkb_keycode_t key_code = repeat.key + EVDEV_OFFSET;
|
||||
const GHOST_TKey gkey = xkb_map_gkey_or_scan_code(repeat.sym, repeat.key);
|
||||
|
||||
GWL_KeyRepeatPlayload *key_repeat_payload = new GWL_KeyRepeatPlayload();
|
||||
key_repeat_payload->seat = seat;
|
||||
key_repeat_payload->key_code = key_code;
|
||||
key_repeat_payload->key_data.gkey = gkey;
|
||||
|
||||
gwl_seat_key_repeat_timer_add(seat, gwl_seat_key_repeat_timer_fn, key_repeat_payload, false);
|
||||
/* Ensure there is a press event on enter so this is known to be held before any mouse
|
||||
* button events which may use a key-binding that depends on this key being held. */
|
||||
gwl_seat_key_repeat_timer_fn(seat->key_repeat.timer, 0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -5001,33 +5087,7 @@ static void keyboard_handle_key(void *data,
|
|||
}
|
||||
|
||||
if (key_repeat_payload) {
|
||||
auto key_repeat_fn = [](GHOST_ITimerTask *task, uint64_t time_ms) {
|
||||
GWL_KeyRepeatPlayload *payload = static_cast<GWL_KeyRepeatPlayload *>(task->getUserData());
|
||||
|
||||
GWL_Seat *seat = payload->seat;
|
||||
if (wl_surface *wl_surface_focus = seat->keyboard.wl.surface_window) {
|
||||
GHOST_IWindow *win = ghost_wl_surface_user_data(wl_surface_focus);
|
||||
GHOST_SystemWayland *system = seat->system;
|
||||
const uint64_t event_ms = payload->time_ms_init + time_ms;
|
||||
/* Calculate this value every time in case modifier keys are pressed. */
|
||||
|
||||
char utf8_buf[sizeof(GHOST_TEventKeyData::utf8_buf)] = {'\0'};
|
||||
if (seat->xkb.compose_state &&
|
||||
xkb_compose_state_feed_and_get_utf8(
|
||||
seat->xkb.compose_state, seat->xkb.state, payload->key_code, utf8_buf))
|
||||
{
|
||||
/* `utf8_buf` has been filled by a compose action. */
|
||||
}
|
||||
else {
|
||||
xkb_state_key_get_utf8(seat->xkb.state, payload->key_code, utf8_buf, sizeof(utf8_buf));
|
||||
}
|
||||
|
||||
system->pushEvent_maybe_pending(new GHOST_EventKey(
|
||||
event_ms, GHOST_kEventKeyDown, win, payload->key_data.gkey, true, utf8_buf));
|
||||
}
|
||||
};
|
||||
|
||||
gwl_seat_key_repeat_timer_add(seat, key_repeat_fn, key_repeat_payload, true);
|
||||
gwl_seat_key_repeat_timer_add(seat, gwl_seat_key_repeat_timer_fn, key_repeat_payload, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -6823,6 +6883,7 @@ GHOST_SystemWayland::GHOST_SystemWayland(bool background)
|
|||
#endif
|
||||
display_(new GWL_Display)
|
||||
{
|
||||
ghost_wayland_log_handler_is_background = background;
|
||||
wl_log_set_handler_client(ghost_wayland_log_handler);
|
||||
|
||||
display_->system = this;
|
||||
|
|
|
@ -339,7 +339,11 @@ template<typename T> inline T *MEM_cnew(const char *allocation_name, const T &ot
|
|||
public: \
|
||||
void *operator new(size_t num_bytes) \
|
||||
{ \
|
||||
return MEM_mallocN(num_bytes, _id); \
|
||||
return MEM_mallocN_aligned(num_bytes, __STDCPP_DEFAULT_NEW_ALIGNMENT__, _id); \
|
||||
} \
|
||||
void *operator new(size_t num_bytes, std::align_val_t alignment) \
|
||||
{ \
|
||||
return MEM_mallocN_aligned(num_bytes, size_t(alignment), _id); \
|
||||
} \
|
||||
void operator delete(void *mem) \
|
||||
{ \
|
||||
|
@ -349,7 +353,11 @@ template<typename T> inline T *MEM_cnew(const char *allocation_name, const T &ot
|
|||
} \
|
||||
void *operator new[](size_t num_bytes) \
|
||||
{ \
|
||||
return MEM_mallocN(num_bytes, _id "[]"); \
|
||||
return MEM_mallocN_aligned(num_bytes, __STDCPP_DEFAULT_NEW_ALIGNMENT__, _id "[]"); \
|
||||
} \
|
||||
void *operator new[](size_t num_bytes, std::align_val_t alignment) \
|
||||
{ \
|
||||
return MEM_mallocN_aligned(num_bytes, size_t(alignment), _id "[]"); \
|
||||
} \
|
||||
void operator delete[](void *mem) \
|
||||
{ \
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Subproject commit b3ce9f92ca40ab491aa1644ea7a85d1e6116bf5a
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 57e1be05cf7d4ee0fe53980f93336892c65e148a
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 8d7216600248a1f79836bc97b23b49cf1cb0509e
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 71e6832c48d25b4929bcdfc1637ababab843e551
|
7
make.bat
7
make.bat
|
@ -56,11 +56,6 @@ if "%BUILD_VS_YEAR%" == "" (
|
|||
)
|
||||
)
|
||||
|
||||
if "%SVN_FIX%" == "1" (
|
||||
call "%BLENDER_DIR%\build_files\windows\svn_fix.cmd"
|
||||
goto EOF
|
||||
)
|
||||
|
||||
if "%BUILD_UPDATE%" == "1" (
|
||||
REM First see if the SVN libs are there and check them out if they are not.
|
||||
call "%BLENDER_DIR%\build_files\windows\check_libraries.cmd"
|
||||
|
@ -70,7 +65,7 @@ if "%BUILD_UPDATE%" == "1" (
|
|||
REM running tends to be problematic. The python script that update_sources
|
||||
REM calls later on may still try to switch branches and run into trouble,
|
||||
REM but for *most* people this will side step the problem.
|
||||
call "%BLENDER_DIR%\build_files\windows\svn_update.cmd"
|
||||
call "%BLENDER_DIR%\build_files\windows\lib_update.cmd"
|
||||
)
|
||||
REM Finally call the python script shared between all platforms that updates git
|
||||
REM and does any other SVN work like update the tests or branch switches
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 6e43ac4de4474de1c3b00baceaaa85b79d884222
|
|
@ -57,7 +57,7 @@ if not os.path.exists(blender_bin):
|
|||
blender_bin = blender_app_path
|
||||
|
||||
icons_blend = (
|
||||
os.path.join(ROOTDIR, "..", "lib", "resources", "icon_geom.blend"),
|
||||
os.path.join(ROOTDIR, "release", "datafiles", "assets", "icons", "toolbar.blend"),
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
import bpy
|
||||
from bpy.types import Panel
|
||||
from rna_prop_ui import PropertyPanel
|
||||
import rna_prop_ui
|
||||
|
||||
from bpy.app.translations import contexts as i18n_contexts
|
||||
|
||||
|
@ -260,6 +260,16 @@ class BONE_PT_collections(BoneButtonsPanel, Panel):
|
|||
layout.use_property_split = False
|
||||
|
||||
bone = context.bone or context.edit_bone
|
||||
object = context.pose_object or context.edit_object or context.object
|
||||
if not object:
|
||||
layout.active = False
|
||||
sub = layout.column(align=True)
|
||||
sub.label(text="Cannot figure out which object this bone belongs to.")
|
||||
sub.label(text="Please file a bug report.")
|
||||
return
|
||||
|
||||
armature = object.data
|
||||
is_solo_active = armature.collections.is_solo_active
|
||||
|
||||
if not bone.collections:
|
||||
layout.active = False
|
||||
|
@ -275,8 +285,15 @@ class BONE_PT_collections(BoneButtonsPanel, Panel):
|
|||
# Name & visibility of bcoll. Safe things, so aligned together.
|
||||
row = bcoll_row.row(align=True)
|
||||
row.label(text=bcoll.name)
|
||||
row.prop(bcoll, "is_visible", text="",
|
||||
icon='HIDE_OFF' if bcoll.is_visible else 'HIDE_ON')
|
||||
|
||||
# Sub-layout that's dimmed when the bone collection's own visibility flag doesn't matter.
|
||||
sub_visible = row.row(align=True)
|
||||
sub_visible.active = (not is_solo_active) and bcoll.is_visible_ancestors
|
||||
sub_visible.prop(bcoll, "is_visible", text="",
|
||||
icon='HIDE_OFF' if bcoll.is_visible else 'HIDE_ON')
|
||||
|
||||
row.prop(bcoll, "is_solo", text="",
|
||||
icon='SOLO_ON' if bcoll.is_solo else 'SOLO_OFF')
|
||||
|
||||
# Unassignment operator, less safe so with a bit of spacing.
|
||||
props = bcoll_row.operator("armature.collection_unassign_named",
|
||||
|
@ -530,7 +547,7 @@ class BONE_PT_deform(BoneButtonsPanel, Panel):
|
|||
col.prop(bone, "tail_radius", text="Tail")
|
||||
|
||||
|
||||
class BONE_PT_custom_props(BoneButtonsPanel, PropertyPanel, Panel):
|
||||
class BONE_PT_custom_props(BoneButtonsPanel, rna_prop_ui.PropertyPanel, Panel):
|
||||
COMPAT_ENGINES = {
|
||||
'BLENDER_RENDER',
|
||||
'BLENDER_EEVEE',
|
||||
|
@ -539,14 +556,45 @@ class BONE_PT_custom_props(BoneButtonsPanel, PropertyPanel, Panel):
|
|||
}
|
||||
_property_type = bpy.types.Bone, bpy.types.EditBone, bpy.types.PoseBone
|
||||
|
||||
@property
|
||||
def _context_path(self):
|
||||
obj = bpy.context.object
|
||||
if obj and obj.mode == 'POSE':
|
||||
return "active_pose_bone"
|
||||
else:
|
||||
@classmethod
|
||||
def _poll(cls, context):
|
||||
context_path = cls._get_context_path(context)
|
||||
rna_item, _context_member = rna_prop_ui.rna_idprop_context_value(
|
||||
context, context_path, cls._property_type)
|
||||
return bool(rna_item)
|
||||
|
||||
def draw(self, context):
|
||||
context_path = self._get_context_path(context)
|
||||
rna_prop_ui.draw(self.layout, context, context_path, self._property_type)
|
||||
|
||||
@classmethod
|
||||
def _get_context_path(self, context):
|
||||
obj = context.object
|
||||
if not obj:
|
||||
# We have to return _something_. If there is some bone by some
|
||||
# miracle, just use it.
|
||||
return "bone"
|
||||
|
||||
if obj.mode != 'POSE':
|
||||
# Outside of pose mode, active_bone is the one to use. It's either a
|
||||
# Bone or an EditBone, depending on the mode.
|
||||
return "active_bone"
|
||||
|
||||
if context.active_pose_bone is not None:
|
||||
# There is an active pose bone, so use it.
|
||||
return "active_pose_bone"
|
||||
|
||||
# When the active bone is hidden, `context.active_pose_bone` is None, but
|
||||
# `context.bone` still points to it. Use that to still get the pose bone.
|
||||
if context.bone is None:
|
||||
# If there is no active bone, let the rest of the code refer to the
|
||||
# also-None active pose bone, as that's more appropriate given we're
|
||||
# currently in pose mode.
|
||||
return "active_pose_bone"
|
||||
|
||||
bone_path = obj.pose.bones[context.bone.name].path_from_id()
|
||||
return f"object.{bone_path}"
|
||||
|
||||
|
||||
classes = (
|
||||
BONE_PT_context_bone,
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
namespace blender::asset_system::tests {
|
||||
|
||||
/* UUIDs from lib/tests/asset_library/blender_assets.cats.txt */
|
||||
/* UUIDs from tests/data/asset_library/blender_assets.cats.txt */
|
||||
const bUUID UUID_ID_WITHOUT_PATH("e34dd2c5-5d2e-4668-9794-1db5de2a4f71");
|
||||
const bUUID UUID_POSES_ELLIE("df60e1f6-2259-475b-93d9-69a1b4a8db78");
|
||||
const bUUID UUID_POSES_ELLIE_WHITESPACE("b06132f6-5687-4751-a6dd-392740eb3c46");
|
||||
|
@ -31,7 +31,7 @@ const bUUID UUID_POSES_RUZENA_FACE("82162c1f-06cc-4d91-a9bf-4f72c104e348");
|
|||
const bUUID UUID_WITHOUT_SIMPLENAME("d7916a31-6ca9-4909-955f-182ca2b81fa3");
|
||||
const bUUID UUID_ANOTHER_RUZENA("00000000-d9fa-4b91-b704-e6af1f1339ef");
|
||||
|
||||
/* UUIDs from lib/tests/asset_library/modified_assets.cats.txt */
|
||||
/* UUIDs from tests/data/asset_library/modified_assets.cats.txt */
|
||||
const bUUID UUID_AGENT_47("c5744ba5-43f5-4f73-8e52-010ad4a61b34");
|
||||
|
||||
/* Subclass that adds accessors such that protected fields can be used in tests. */
|
||||
|
|
|
@ -15,3 +15,9 @@ struct Library;
|
|||
struct Main;
|
||||
|
||||
void BKE_library_filepath_set(Main *bmain, Library *lib, const char *filepath);
|
||||
|
||||
/**
|
||||
* Rebuild the hierarchy of libraries, after e.g. deleting or relocating one, often some indirectly
|
||||
* linked libraries lose their 'parent' pointer, making them wrongly directly used ones.
|
||||
*/
|
||||
void BKE_library_main_rebuild_hierarchy(Main *bmain);
|
||||
|
|
|
@ -479,6 +479,11 @@ inline const bNode *bNodeTree::group_output_node() const
|
|||
return this->runtime->group_output_node;
|
||||
}
|
||||
|
||||
inline blender::Span<bNode *> bNodeTree::group_input_nodes()
|
||||
{
|
||||
return this->nodes_by_type("NodeGroupInput");
|
||||
}
|
||||
|
||||
inline blender::Span<const bNode *> bNodeTree::group_input_nodes() const
|
||||
{
|
||||
return this->nodes_by_type("NodeGroupInput");
|
||||
|
|
|
@ -141,8 +141,9 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
const int expected_length = avs->path_ef - avs->path_sf;
|
||||
BLI_assert(expected_length > 0); /* Because the `if` above. */
|
||||
/* Adding 1 because the avs range is inclusive on both ends. */
|
||||
const int expected_length = (avs->path_ef - avs->path_sf) + 1;
|
||||
BLI_assert(expected_length > 1); /* Because the `if` above. */
|
||||
|
||||
/* If there is already a motionpath, just return that, provided its settings
|
||||
* are ok (saves extra free+alloc). */
|
||||
|
@ -159,7 +160,7 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
|
|||
/* Only reuse a path if it was already a valid path, and of the expected length. */
|
||||
if (mpath->start_frame != mpath->end_frame && mpath->length == expected_length) {
|
||||
mpath->start_frame = avs->path_sf;
|
||||
mpath->end_frame = avs->path_ef;
|
||||
mpath->end_frame = avs->path_ef + 1;
|
||||
return mpath;
|
||||
}
|
||||
|
||||
|
@ -173,7 +174,7 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
|
|||
|
||||
/* Copy mpath settings from the viz settings. */
|
||||
mpath->start_frame = avs->path_sf;
|
||||
mpath->end_frame = avs->path_ef;
|
||||
mpath->end_frame = avs->path_ef + 1;
|
||||
mpath->length = expected_length;
|
||||
|
||||
if (avs->path_bakeflag & MOTIONPATH_BAKE_HEADS) {
|
||||
|
|
|
@ -43,6 +43,7 @@
|
|||
#include "BKE_lib_override.hh"
|
||||
#include "BKE_lib_query.hh"
|
||||
#include "BKE_lib_remap.hh"
|
||||
#include "BKE_library.hh"
|
||||
#include "BKE_main.hh"
|
||||
#include "BKE_main_namemap.hh"
|
||||
#include "BKE_material.h"
|
||||
|
@ -1894,7 +1895,7 @@ void BKE_blendfile_library_relocate(BlendfileLinkAppendContext *lapp_context,
|
|||
if (lib->id.tag & LIB_TAG_DOIT) {
|
||||
id_us_clear_real(&lib->id);
|
||||
if (lib->id.us == 0) {
|
||||
BKE_id_free(bmain, (ID *)lib);
|
||||
BKE_id_delete(bmain, lib);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1916,6 +1917,8 @@ void BKE_blendfile_library_relocate(BlendfileLinkAppendContext *lapp_context,
|
|||
}
|
||||
FOREACH_MAIN_ID_END;
|
||||
|
||||
BKE_library_main_rebuild_hierarchy(bmain);
|
||||
|
||||
/* Resync overrides if needed. */
|
||||
if (!USER_EXPERIMENTAL_TEST(&U, no_override_auto_resync)) {
|
||||
BlendFileReadReport report{};
|
||||
|
|
|
@ -963,6 +963,7 @@ void BKE_gpencil_modifier_blend_read_data(BlendDataReader *reader, ListBase *lb,
|
|||
BLO_read_data_address(reader, &hmd->curfalloff);
|
||||
if (hmd->curfalloff) {
|
||||
BKE_curvemapping_blend_read(reader, hmd->curfalloff);
|
||||
BKE_curvemapping_init(hmd->curfalloff);
|
||||
}
|
||||
}
|
||||
else if (md->type == eGpencilModifierType_Noise) {
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
#include "BKE_lib_id.hh"
|
||||
#include "BKE_lib_override.hh"
|
||||
#include "BKE_lib_remap.hh"
|
||||
#include "BKE_library.hh"
|
||||
#include "BKE_main.hh"
|
||||
#include "BKE_main_namemap.hh"
|
||||
|
||||
|
@ -213,9 +214,15 @@ void BKE_id_free_us(Main *bmain, void *idv) /* test users */
|
|||
}
|
||||
|
||||
if (id->us == 0) {
|
||||
const bool is_lib = GS(id->name) == ID_LI;
|
||||
|
||||
BKE_libblock_unlink(bmain, id, false, false);
|
||||
|
||||
BKE_id_free(bmain, id);
|
||||
|
||||
if (is_lib) {
|
||||
BKE_library_main_rebuild_hierarchy(bmain);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -226,6 +233,7 @@ static size_t id_delete(Main *bmain,
|
|||
const int tag = LIB_TAG_DOIT;
|
||||
ListBase *lbarray[INDEX_ID_MAX];
|
||||
int base_count, i;
|
||||
bool has_deleted_library = false;
|
||||
|
||||
/* Used by batch tagged deletion, when we call BKE_id_free then, id is no more in Main database,
|
||||
* and has already properly unlinked its other IDs usages.
|
||||
|
@ -400,6 +408,9 @@ static size_t id_delete(Main *bmain,
|
|||
ID_REAL_USERS(id),
|
||||
(id->tag & LIB_TAG_EXTRAUSER_SET) != 0 ? 1 : 0);
|
||||
}
|
||||
if (!has_deleted_library && GS(id->name) == ID_LI) {
|
||||
has_deleted_library = true;
|
||||
}
|
||||
id_free(bmain, id, free_flag, !do_tagged_deletion);
|
||||
++num_datablocks_deleted;
|
||||
}
|
||||
|
@ -409,6 +420,10 @@ static size_t id_delete(Main *bmain,
|
|||
BKE_layer_collection_resync_allow();
|
||||
BKE_main_collection_sync_remap(bmain);
|
||||
|
||||
if (has_deleted_library) {
|
||||
BKE_library_main_rebuild_hierarchy(bmain);
|
||||
}
|
||||
|
||||
bmain->is_memfile_undo_written = false;
|
||||
return num_datablocks_deleted;
|
||||
}
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
#include "BLI_utildefines.h"
|
||||
|
||||
#include "BLI_blenlib.h"
|
||||
#include "BLI_ghash.h"
|
||||
#include "BLI_set.hh"
|
||||
|
||||
#include "BLT_translation.h"
|
||||
|
||||
|
@ -136,3 +138,158 @@ void BKE_library_filepath_set(Main *bmain, Library *lib, const char *filepath)
|
|||
BLI_path_abs(lib->filepath_abs, blendfile_path);
|
||||
}
|
||||
}
|
||||
|
||||
static void rebuild_hierarchy_best_parent_find(Main *bmain,
|
||||
blender::Set<Library *> &directly_used_libs,
|
||||
Library *lib)
|
||||
{
|
||||
BLI_assert(!directly_used_libs.contains(lib));
|
||||
|
||||
Library *best_parent_lib = nullptr;
|
||||
bool do_break = false;
|
||||
ListBase *lb;
|
||||
ID *id_iter;
|
||||
FOREACH_MAIN_LISTBASE_BEGIN (bmain, lb) {
|
||||
FOREACH_MAIN_LISTBASE_ID_BEGIN (lb, id_iter) {
|
||||
if (!ID_IS_LINKED(id_iter) || id_iter->lib != lib) {
|
||||
continue;
|
||||
}
|
||||
MainIDRelationsEntry *entry = static_cast<MainIDRelationsEntry *>(
|
||||
BLI_ghash_lookup(bmain->relations->relations_from_pointers, id_iter));
|
||||
for (MainIDRelationsEntryItem *item = entry->from_ids; item; item = item->next) {
|
||||
ID *from_id = item->id_pointer.from;
|
||||
if (!ID_IS_LINKED(from_id)) {
|
||||
BLI_assert_unreachable();
|
||||
continue;
|
||||
}
|
||||
Library *from_id_lib = from_id->lib;
|
||||
if (from_id_lib == lib) {
|
||||
continue;
|
||||
}
|
||||
if (directly_used_libs.contains(from_id_lib)) {
|
||||
/* Found the first best possible candidate, no need to search further. */
|
||||
BLI_assert(best_parent_lib == nullptr || best_parent_lib->temp_index > 0);
|
||||
best_parent_lib = from_id_lib;
|
||||
do_break = true;
|
||||
break;
|
||||
}
|
||||
if (!from_id_lib->parent) {
|
||||
rebuild_hierarchy_best_parent_find(bmain, directly_used_libs, from_id_lib);
|
||||
}
|
||||
if (!best_parent_lib || best_parent_lib->temp_index > from_id_lib->temp_index) {
|
||||
best_parent_lib = from_id_lib;
|
||||
if (best_parent_lib->temp_index == 0) {
|
||||
/* Found the first best possible candidate, no need to search further. */
|
||||
BLI_assert(directly_used_libs.contains(best_parent_lib));
|
||||
do_break = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (do_break) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
FOREACH_MAIN_LISTBASE_ID_END;
|
||||
if (do_break) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
FOREACH_MAIN_LISTBASE_END;
|
||||
|
||||
/* NOTE: It may happen that no parent library is found, e.g. if after deleting a directly used
|
||||
* library, its indirect dependency is still around, but none of its linked IDs are used by local
|
||||
* data. */
|
||||
if (best_parent_lib) {
|
||||
lib->parent = best_parent_lib;
|
||||
lib->temp_index = best_parent_lib->temp_index + 1;
|
||||
}
|
||||
else {
|
||||
lib->parent = nullptr;
|
||||
lib->temp_index = 0;
|
||||
directly_used_libs.add(lib);
|
||||
}
|
||||
}
|
||||
|
||||
void BKE_library_main_rebuild_hierarchy(Main *bmain)
|
||||
{
|
||||
BKE_main_relations_create(bmain, 0);
|
||||
|
||||
/* Find all libraries with directly linked IDs (i.e. IDs used by local data). */
|
||||
blender::Set<Library *> directly_used_libs;
|
||||
ID *id_iter;
|
||||
FOREACH_MAIN_ID_BEGIN (bmain, id_iter) {
|
||||
if (!ID_IS_LINKED(id_iter)) {
|
||||
continue;
|
||||
}
|
||||
id_iter->lib->temp_index = 0;
|
||||
if (directly_used_libs.contains(id_iter->lib)) {
|
||||
continue;
|
||||
}
|
||||
MainIDRelationsEntry *entry = static_cast<MainIDRelationsEntry *>(
|
||||
BLI_ghash_lookup(bmain->relations->relations_from_pointers, id_iter));
|
||||
for (MainIDRelationsEntryItem *item = entry->from_ids; item; item = item->next) {
|
||||
if (!ID_IS_LINKED(item->id_pointer.from)) {
|
||||
directly_used_libs.add(id_iter->lib);
|
||||
id_iter->lib->parent = nullptr;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
FOREACH_MAIN_ID_END;
|
||||
|
||||
LISTBASE_FOREACH (Library *, lib_iter, &bmain->libraries) {
|
||||
/* A directly used library. */
|
||||
if (directly_used_libs.contains(lib_iter)) {
|
||||
BLI_assert(lib_iter->temp_index == 0);
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Assume existing parent is still valid, since it was not cleared in previous loop above.
|
||||
* Just compute 'hierarchy value' in temp index, if needed. */
|
||||
if (lib_iter->parent) {
|
||||
if (lib_iter->temp_index > 0) {
|
||||
continue;
|
||||
}
|
||||
blender::Vector<Library *> parent_libraries;
|
||||
for (Library *parent_lib_iter = lib_iter;
|
||||
parent_lib_iter && parent_lib_iter->temp_index == 0;
|
||||
parent_lib_iter = parent_lib_iter->parent)
|
||||
{
|
||||
parent_libraries.append(parent_lib_iter);
|
||||
}
|
||||
int parent_temp_index = parent_libraries.last()->temp_index + int(parent_libraries.size()) -
|
||||
1;
|
||||
for (Library *parent_lib_iter : parent_libraries) {
|
||||
BLI_assert(parent_lib_iter != parent_libraries.last() ||
|
||||
parent_lib_iter->temp_index == parent_temp_index);
|
||||
parent_lib_iter->temp_index = parent_temp_index--;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Otherwise, it's an indirectly used library with no known parent, another loop is needed to
|
||||
* ansure all knwon hierarcy has valid indices when trying to find the best valid parent
|
||||
* library. */
|
||||
}
|
||||
|
||||
/* For all libraries known to be indirect, but without a known parent, find a best valid parent
|
||||
* (i.e. a 'most directly used' library). */
|
||||
LISTBASE_FOREACH (Library *, lib_iter, &bmain->libraries) {
|
||||
/* A directly used library. */
|
||||
if (directly_used_libs.contains(lib_iter)) {
|
||||
BLI_assert(lib_iter->temp_index == 0);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (lib_iter->parent) {
|
||||
BLI_assert(lib_iter->temp_index > 0);
|
||||
}
|
||||
else {
|
||||
BLI_assert(lib_iter->temp_index == 0);
|
||||
rebuild_hierarchy_best_parent_find(bmain, directly_used_libs, lib_iter);
|
||||
}
|
||||
}
|
||||
|
||||
BKE_main_relations_free(bmain);
|
||||
}
|
||||
|
|
|
@ -905,6 +905,58 @@ class NodeTreeMainUpdater {
|
|||
}
|
||||
}
|
||||
|
||||
/* Find conflicts between on corresponding menu sockets on different group input nodes. */
|
||||
const Span<bNode *> group_input_nodes = ntree.group_input_nodes();
|
||||
for (const int interface_input_i : ntree.interface_inputs().index_range()) {
|
||||
const bNodeTreeInterfaceSocket &interface_socket =
|
||||
*ntree.interface_inputs()[interface_input_i];
|
||||
if (interface_socket.socket_type != StringRef("NodeSocketMenu")) {
|
||||
continue;
|
||||
}
|
||||
const RuntimeNodeEnumItems *found_enum_items = nullptr;
|
||||
bool found_conflict = false;
|
||||
for (bNode *input_node : group_input_nodes) {
|
||||
const bNodeSocket &socket = input_node->output_socket(interface_input_i);
|
||||
const auto &socket_value = *socket.default_value_typed<bNodeSocketValueMenu>();
|
||||
if (socket_value.has_conflict()) {
|
||||
found_conflict = true;
|
||||
break;
|
||||
}
|
||||
if (found_enum_items == nullptr) {
|
||||
found_enum_items = socket_value.enum_items;
|
||||
}
|
||||
else if (socket_value.enum_items != nullptr) {
|
||||
if (found_enum_items != socket_value.enum_items) {
|
||||
found_conflict = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (found_conflict) {
|
||||
/* Make sure that all group input sockets know that there is a socket. */
|
||||
for (bNode *input_node : group_input_nodes) {
|
||||
bNodeSocket &socket = input_node->output_socket(interface_input_i);
|
||||
auto &socket_value = *socket.default_value_typed<bNodeSocketValueMenu>();
|
||||
if (socket_value.enum_items) {
|
||||
socket_value.enum_items->remove_user_and_delete_if_last();
|
||||
socket_value.enum_items = nullptr;
|
||||
}
|
||||
socket_value.runtime_flag |= NodeSocketValueMenuRuntimeFlag::NODE_MENU_ITEMS_CONFLICT;
|
||||
}
|
||||
}
|
||||
else if (found_enum_items != nullptr) {
|
||||
/* Make sure all corresponding menu sockets have the same menu reference. */
|
||||
for (bNode *input_node : group_input_nodes) {
|
||||
bNodeSocket &socket = input_node->output_socket(interface_input_i);
|
||||
auto &socket_value = *socket.default_value_typed<bNodeSocketValueMenu>();
|
||||
if (socket_value.enum_items == nullptr) {
|
||||
found_enum_items->add_user();
|
||||
socket_value.enum_items = found_enum_items;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Build list of new enum items for the node tree interface. */
|
||||
Vector<bNodeSocketValueMenu> interface_enum_items(ntree.interface_inputs().size(), {0});
|
||||
for (const bNode *group_input_node : ntree.group_input_nodes()) {
|
||||
|
|
|
@ -480,7 +480,18 @@ int BLI_rename(const char *from, const char *to)
|
|||
#elif defined(__GLIBC_PREREQ)
|
||||
# if __GLIBC_PREREQ(2, 28)
|
||||
/* Most common Linux cases. */
|
||||
return renameat2(AT_FDCWD, from, AT_FDCWD, to, RENAME_NOREPLACE);
|
||||
int ret = renameat2(AT_FDCWD, from, AT_FDCWD, to, RENAME_NOREPLACE);
|
||||
if (ret < 0 && errno == EINVAL) {
|
||||
/* Most likely a filesystem that doesn't support RENAME_NOREPLACE.
|
||||
* (For example NFS, Samba, exFAT, NTFS, etc)
|
||||
* Retry with a non atomic operation.
|
||||
*/
|
||||
if (BLI_exists(to)) {
|
||||
return 1;
|
||||
}
|
||||
return rename(from, to);
|
||||
}
|
||||
return ret;
|
||||
# endif
|
||||
#else
|
||||
/* At least all BSD's currently. */
|
||||
|
|
|
@ -9,7 +9,7 @@ class BlendfileLoadingTest : public BlendfileLoadingBaseTest {};
|
|||
|
||||
TEST_F(BlendfileLoadingTest, CanaryTest)
|
||||
{
|
||||
/* Load the smallest blend file we have in the SVN lib/tests directory. */
|
||||
/* Load the smallest blend file we have in the tests/data directory. */
|
||||
if (!blendfile_load("modifier_stack" SEP_STR "array_test.blend")) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -25,10 +25,10 @@ class BlendfileLoadingBaseTest : public testing::Test {
|
|||
/* Frees the depsgraph & blendfile. */
|
||||
virtual void TearDown();
|
||||
|
||||
/* Loads a blend file from the lib/tests directory from SVN.
|
||||
/* Loads a blend file from the tests/data directory from SVN.
|
||||
* Returns 'ok' flag (true=good, false=bad) and sets this->bfile.
|
||||
* Fails the test if the file cannot be loaded (still returns though).
|
||||
* Requires the CLI argument --test-asset-dir to point to ../../lib/tests.
|
||||
* Requires the CLI argument --test-asset-dir to point to ../tests/data.
|
||||
*
|
||||
* WARNING: only files saved with Blender 2.80+ can be loaded. Since Blender
|
||||
* is only partially initialized (most importantly, without window manager),
|
||||
|
|
|
@ -93,7 +93,7 @@ static void motion_path_get_frame_range_to_draw(bAnimVizSettings *avs,
|
|||
}
|
||||
else {
|
||||
start = avs->path_sf;
|
||||
end = avs->path_ef;
|
||||
end = avs->path_ef + 1;
|
||||
}
|
||||
|
||||
if (start > end) {
|
||||
|
|
|
@ -4568,7 +4568,7 @@ static bool achannel_is_broken(const bAnimListElem *ale)
|
|||
const FCurve *fcu = static_cast<const FCurve *>(ale->data);
|
||||
|
||||
/* The channel is disabled (has a bad rna path), or it's a driver that failed to evaluate. */
|
||||
return (ale->flag & FCURVE_DISABLED) ||
|
||||
return (fcu->flag & FCURVE_DISABLED) ||
|
||||
(fcu->driver != nullptr && (fcu->driver->flag & DRIVER_FLAG_INVALID));
|
||||
}
|
||||
default:
|
||||
|
|
|
@ -3271,6 +3271,9 @@ static void ui_draw_but_HSVCUBE(uiBut *but, const rcti *rect)
|
|||
float *hsv = cpicker->hsv_perceptual;
|
||||
float hsv_n[3];
|
||||
|
||||
/* Is this the larger color canvas or narrow color slider? */
|
||||
bool is_canvas = ELEM(hsv_but->gradient_type, UI_GRAD_SV, UI_GRAD_HV, UI_GRAD_HS);
|
||||
|
||||
/* Initialize for compatibility. */
|
||||
copy_v3_v3(hsv_n, hsv);
|
||||
|
||||
|
@ -3292,15 +3295,15 @@ static void ui_draw_but_HSVCUBE(uiBut *but, const rcti *rect)
|
|||
imm_draw_box_wire_2d(pos, (rect->xmin), (rect->ymin), (rect->xmax), (rect->ymax));
|
||||
immUnbindProgram();
|
||||
|
||||
if (BLI_rcti_size_x(rect) / BLI_rcti_size_y(rect) < 3) {
|
||||
/* This is for the full square HSV cube. */
|
||||
if (is_canvas) {
|
||||
/* Round cursor in the large square area. */
|
||||
float margin = (4.0f * UI_SCALE_FAC);
|
||||
CLAMP(x, rect->xmin + margin, rect->xmax - margin);
|
||||
CLAMP(y, rect->ymin + margin, rect->ymax - margin);
|
||||
ui_hsv_cursor(x, y, zoom, rgb, hsv, but->flag & UI_SELECT);
|
||||
}
|
||||
else {
|
||||
/* This is for the narrow horizontal gradient. */
|
||||
/* Square indicator in the narrow area. */
|
||||
rctf rectf;
|
||||
BLI_rctf_rcti_copy(&rectf, rect);
|
||||
const float margin = (2.0f * UI_SCALE_FAC);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
#include "WM_api.hh"
|
||||
#include "WM_types.hh"
|
||||
|
||||
#include "ED_object.hh"
|
||||
#include "ED_screen.hh"
|
||||
|
||||
#include "DNA_array_utils.hh"
|
||||
|
@ -216,6 +217,12 @@ static bool bake_simulation_poll(bContext *C)
|
|||
CTX_wm_operator_poll_msg_set(C, "File must be saved before baking");
|
||||
return false;
|
||||
}
|
||||
Object *ob = ED_object_active_context(C);
|
||||
const bool use_frame_cache = ob->flag & OB_FLAG_USE_SIMULATION_CACHE;
|
||||
if (!use_frame_cache) {
|
||||
CTX_wm_operator_poll_msg_set(C, "Cache has to be enabled");
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -821,26 +821,28 @@ static void renamebutton_cb(bContext *C, void * /*arg1*/, char *oldname)
|
|||
BLI_path_join(newname, sizeof(newname), params->dir, filename);
|
||||
|
||||
if (!STREQ(orgname, newname)) {
|
||||
if (!BLI_exists(newname)) {
|
||||
errno = 0;
|
||||
if ((BLI_rename(orgname, newname) != 0) || !BLI_exists(newname)) {
|
||||
WM_reportf(RPT_ERROR, "Could not rename: %s", errno ? strerror(errno) : "unknown error");
|
||||
WM_report_banner_show(wm, win);
|
||||
}
|
||||
else {
|
||||
/* If rename is successful, scroll to newly renamed entry. */
|
||||
STRNCPY(params->renamefile, filename);
|
||||
file_params_invoke_rename_postscroll(wm, win, sfile);
|
||||
}
|
||||
|
||||
/* to make sure we show what is on disk */
|
||||
ED_fileselect_clear(wm, sfile);
|
||||
}
|
||||
else {
|
||||
errno = 0;
|
||||
if ((BLI_rename(orgname, newname) != 0) || !BLI_exists(newname)) {
|
||||
WM_reportf(RPT_ERROR, "Could not rename: %s", errno ? strerror(errno) : "unknown error");
|
||||
WM_report_banner_show(wm, win);
|
||||
/* Renaming failed, reset the name for further renaming handling. */
|
||||
STRNCPY(params->renamefile, oldname);
|
||||
}
|
||||
else {
|
||||
/* If rename is successful, set renamefile to newly renamed entry.
|
||||
* This is used later to select and scroll to the file.
|
||||
*/
|
||||
STRNCPY(params->renamefile, filename);
|
||||
}
|
||||
|
||||
/* Ensure we select and scroll to the renamed file.
|
||||
* This is done even if the rename fails as we want to make sure that the file we tried to
|
||||
* rename is still selected and in view. (it can move if something added files/folders to the
|
||||
* directory while we were renaming.
|
||||
*/
|
||||
file_params_invoke_rename_postscroll(wm, win, sfile);
|
||||
/* to make sure we show what is on disk */
|
||||
ED_fileselect_clear(wm, sfile);
|
||||
ED_region_tag_redraw(region);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -279,7 +279,7 @@ eSnapMode SnapData::snap_edge_points_impl(SnapObjectContext *sctx,
|
|||
if (lambda < (range) || (1.0f - range) < lambda) {
|
||||
int v_id = lambda < 0.5f ? 0 : 1;
|
||||
|
||||
if (this->snap_point(v_pair[v_id], v_id)) {
|
||||
if (this->snap_point(v_pair[v_id], vindex[v_id])) {
|
||||
elem = SCE_SNAP_TO_EDGE_ENDPOINT;
|
||||
this->copy_vert_no(vindex[v_id], this->nearest_point.no);
|
||||
}
|
||||
|
|
|
@ -215,7 +215,7 @@ static const std::optional<bke::AttrDomain> convert_usd_varying_to_blender(
|
|||
blender::Map<pxr::TfToken, bke::AttrDomain> map;
|
||||
map.add_new(pxr::UsdGeomTokens->faceVarying, bke::AttrDomain::Corner);
|
||||
map.add_new(pxr::UsdGeomTokens->vertex, bke::AttrDomain::Point);
|
||||
map.add_new(pxr::UsdGeomTokens->varying, bke::AttrDomain::Corner);
|
||||
map.add_new(pxr::UsdGeomTokens->varying, bke::AttrDomain::Point);
|
||||
map.add_new(pxr::UsdGeomTokens->face, bke::AttrDomain::Face);
|
||||
/* As there's no "constant" type in Blender, for now we're
|
||||
* translating into a point Attribute. */
|
||||
|
@ -402,7 +402,7 @@ void USDMeshReader::read_color_data_primvar(Mesh *mesh,
|
|||
pxr::TfToken interp = primvar.GetInterpolation();
|
||||
|
||||
if ((interp == pxr::UsdGeomTokens->faceVarying && usd_colors.size() != mesh->corners_num) ||
|
||||
(interp == pxr::UsdGeomTokens->varying && usd_colors.size() != mesh->corners_num) ||
|
||||
(interp == pxr::UsdGeomTokens->varying && usd_colors.size() != mesh->verts_num) ||
|
||||
(interp == pxr::UsdGeomTokens->vertex && usd_colors.size() != mesh->verts_num) ||
|
||||
(interp == pxr::UsdGeomTokens->constant && usd_colors.size() != 1) ||
|
||||
(interp == pxr::UsdGeomTokens->uniform && usd_colors.size() != mesh->faces_num))
|
||||
|
@ -420,11 +420,7 @@ void USDMeshReader::read_color_data_primvar(Mesh *mesh,
|
|||
|
||||
bke::AttrDomain color_domain = bke::AttrDomain::Point;
|
||||
|
||||
if (ELEM(interp,
|
||||
pxr::UsdGeomTokens->varying,
|
||||
pxr::UsdGeomTokens->faceVarying,
|
||||
pxr::UsdGeomTokens->uniform))
|
||||
{
|
||||
if (ELEM(interp, pxr::UsdGeomTokens->faceVarying, pxr::UsdGeomTokens->uniform)) {
|
||||
color_domain = bke::AttrDomain::Corner;
|
||||
}
|
||||
|
||||
|
@ -445,7 +441,7 @@ void USDMeshReader::read_color_data_primvar(Mesh *mesh,
|
|||
ColorGeometry4f(usd_colors[0][0], usd_colors[0][1], usd_colors[0][2], 1.0f));
|
||||
}
|
||||
/* Check for situations that allow for a straight-forward copy by index. */
|
||||
else if (interp == pxr::UsdGeomTokens->vertex ||
|
||||
else if (interp == pxr::UsdGeomTokens->vertex || interp == pxr::UsdGeomTokens->varying ||
|
||||
(interp == pxr::UsdGeomTokens->faceVarying && !is_left_handed_))
|
||||
{
|
||||
for (int i = 0; i < usd_colors.size(); i++) {
|
||||
|
@ -524,7 +520,7 @@ void USDMeshReader::read_uv_data_primvar(Mesh *mesh,
|
|||
|
||||
if ((varying_type == pxr::UsdGeomTokens->faceVarying && usd_uvs.size() != mesh->corners_num) ||
|
||||
(varying_type == pxr::UsdGeomTokens->vertex && usd_uvs.size() != mesh->verts_num) ||
|
||||
(varying_type == pxr::UsdGeomTokens->varying && usd_uvs.size() != mesh->corners_num))
|
||||
(varying_type == pxr::UsdGeomTokens->varying && usd_uvs.size() != mesh->verts_num))
|
||||
{
|
||||
BKE_reportf(reports(),
|
||||
RPT_WARNING,
|
||||
|
@ -545,7 +541,7 @@ void USDMeshReader::read_uv_data_primvar(Mesh *mesh,
|
|||
return;
|
||||
}
|
||||
|
||||
if (ELEM(varying_type, pxr::UsdGeomTokens->faceVarying, pxr::UsdGeomTokens->varying)) {
|
||||
if (varying_type == pxr::UsdGeomTokens->faceVarying) {
|
||||
if (is_left_handed_) {
|
||||
/* Reverse the index order. */
|
||||
const OffsetIndices faces = mesh->faces();
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "DEG_depsgraph.hh"
|
||||
|
||||
#include "RNA_types.hh"
|
||||
|
|
|
@ -57,9 +57,9 @@ typedef struct bMotionPath {
|
|||
/** The number of cached verts. */
|
||||
int length;
|
||||
|
||||
/** For drawing paths, the start frame number. */
|
||||
/** For drawing paths, the start frame number. Inclusive.*/
|
||||
int start_frame;
|
||||
/** For drawing paths, the end frame number. */
|
||||
/** For drawing paths, the end frame number. Exclusive. */
|
||||
int end_frame;
|
||||
|
||||
/** Optional custom color. */
|
||||
|
@ -113,7 +113,7 @@ typedef struct bAnimVizSettings {
|
|||
short path_bakeflag;
|
||||
char _pad[4];
|
||||
|
||||
/** Start and end frames of path-calculation range. */
|
||||
/** Start and end frames of path-calculation range. Both are inclusive.*/
|
||||
int path_sf, path_ef;
|
||||
/** Number of frames before/after current frame to show. */
|
||||
int path_bc, path_ac;
|
||||
|
|
|
@ -787,6 +787,7 @@ typedef struct bNodeTree {
|
|||
bNode *group_output_node();
|
||||
const bNode *group_output_node() const;
|
||||
/** Get all input nodes of the node group. */
|
||||
blender::Span<bNode *> group_input_nodes();
|
||||
blender::Span<const bNode *> group_input_nodes() const;
|
||||
|
||||
/** Zones in the node tree. Currently there are only simulation zones in geometry nodes. */
|
||||
|
|
|
@ -904,12 +904,6 @@ const EnumPropertyItem *RNA_node_tree_interface_socket_menu_itemf(bContext * /*C
|
|||
return RNA_node_enum_definition_itemf(*data->enum_items, r_free);
|
||||
}
|
||||
|
||||
static void rna_NodeTreeInterfaceSocket_idname_set(PointerRNA *ptr, const char *value)
|
||||
{
|
||||
bNodeTreeInterfaceSocket &socket = *static_cast<bNodeTreeInterfaceSocket *>(ptr->data);
|
||||
socket.set_socket_type(value);
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
static void rna_def_node_interface_item(BlenderRNA *brna)
|
||||
|
@ -1063,8 +1057,6 @@ static void rna_def_node_interface_socket(BlenderRNA *brna)
|
|||
RNA_def_property_string_sdna(prop, nullptr, "socket_type");
|
||||
RNA_def_property_flag(prop, PROP_REGISTER);
|
||||
RNA_def_property_ui_text(prop, "Socket Type Name", "Name of the socket type");
|
||||
RNA_def_property_string_funcs(prop, nullptr, nullptr, "rna_NodeTreeInterfaceSocket_idname_set");
|
||||
RNA_def_property_update(prop, NC_NODE | NA_EDITED, "rna_NodeTreeInterfaceItem_update");
|
||||
|
||||
func = RNA_def_function(srna, "draw", nullptr);
|
||||
RNA_def_function_flag(func, FUNC_REGISTER_OPTIONAL);
|
||||
|
|
|
@ -2502,6 +2502,7 @@ static eHandlerActionFlag wm_handler_operator_call(bContext *C,
|
|||
else {
|
||||
/* Not very common, but modal operators may report before finishing. */
|
||||
if (!BLI_listbase_is_empty(&op->reports->list)) {
|
||||
WM_event_add_notifier(C, NC_SPACE | ND_SPACE_INFO_REPORT, nullptr);
|
||||
WM_reports_from_reports_move(wm, op->reports);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1672,10 +1672,6 @@ install(
|
|||
|
||||
set(ASSET_BUNDLE_DIR ${CMAKE_SOURCE_DIR}/release/datafiles/assets/publish/)
|
||||
|
||||
if(NOT EXISTS "${ASSET_BUNDLE_DIR}")
|
||||
set(ASSET_BUNDLE_DIR ${CMAKE_SOURCE_DIR}/../lib/assets/publish/)
|
||||
endif()
|
||||
|
||||
if(EXISTS "${ASSET_BUNDLE_DIR}")
|
||||
install(
|
||||
DIRECTORY ${ASSET_BUNDLE_DIR}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 48773362f12222a81288a29f1e474b89dd32b3ca
|
|
@ -14,7 +14,7 @@ namespace blender::tests {
|
|||
|
||||
/* These strings are passed on the CLI with the --test-asset-dir and --test-release-dir arguments.
|
||||
* The arguments are added automatically when invoking tests via `ctest`. */
|
||||
const std::string &flags_test_asset_dir(); /* ../lib/tests in the SVN directory. */
|
||||
const std::string &flags_test_asset_dir(); /* tests/data in the Blender repository. */
|
||||
const std::string &flags_test_release_dir(); /* bin/{blender version} in the build directory. */
|
||||
|
||||
} // namespace blender::tests
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
#include "MEM_guardedalloc.h"
|
||||
|
||||
DEFINE_string(test_assets_dir, "", "lib/tests directory from SVN containing the test assets.");
|
||||
DEFINE_string(test_assets_dir, "", "tests/data directory containing the test assets.");
|
||||
DEFINE_string(test_release_dir, "", "bin/{blender version} directory of the current build.");
|
||||
|
||||
namespace blender::tests {
|
||||
|
@ -14,8 +14,7 @@ namespace blender::tests {
|
|||
const std::string &flags_test_asset_dir()
|
||||
{
|
||||
if (FLAGS_test_assets_dir.empty()) {
|
||||
ADD_FAILURE()
|
||||
<< "Pass the flag --test-assets-dir and point to the lib/tests directory from SVN.";
|
||||
ADD_FAILURE() << "Pass the flag --test-assets-dir and point to the tests/data directory.";
|
||||
}
|
||||
return FLAGS_test_assets_dir;
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ class TestEnvironment:
|
|||
self.build_dir = base_dir / 'build'
|
||||
self.install_dir = self.build_dir / "bin"
|
||||
self.lib_dir = base_dir / 'lib'
|
||||
self.benchmarks_dir = self.blender_git_dir.parent / 'lib' / 'benchmarks'
|
||||
self.benchmarks_dir = self.blender_git_dir / 'tests' / 'benchmarks'
|
||||
self.git_executable = 'git'
|
||||
self.cmake_executable = 'cmake'
|
||||
self.cmake_options = ['-DWITH_INTERNATIONAL=OFF', '-DWITH_BUILDINFO=OFF']
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
# and don't give deterministic results
|
||||
set(USE_EXPERIMENTAL_TESTS FALSE)
|
||||
|
||||
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/../lib/tests)
|
||||
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/tests/data)
|
||||
set(TEST_PYTHON_DIR ${CMAKE_SOURCE_DIR}/tests/python)
|
||||
set(TEST_OUT_DIR ${CMAKE_BINARY_DIR}/tests)
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
./blender.bin --background -noaudio --factory-startup --python tests/python/bl_alembic_io_test.py -- --testdir /path/to/lib/tests/alembic
|
||||
./blender.bin --background -noaudio --factory-startup --python tests/python/bl_alembic_io_test.py -- --testdir /path/to/tests/data/alembic
|
||||
"""
|
||||
|
||||
import math
|
||||
|
|
|
@ -9,7 +9,7 @@ import sys
|
|||
from rna_prop_ui import rna_idprop_quote_path
|
||||
|
||||
"""
|
||||
blender -b -noaudio --factory-startup --python tests/python/bl_animation_drivers.py -- --testdir /path/to/lib/tests/animation
|
||||
blender -b -noaudio --factory-startup --python tests/python/bl_animation_drivers.py -- --testdir /path/to/tests/data/animation
|
||||
"""
|
||||
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
blender -b -noaudio --factory-startup --python tests/python/bl_animation_fcurves.py -- --testdir /path/to/lib/tests/animation
|
||||
blender -b -noaudio --factory-startup --python tests/python/bl_animation_fcurves.py -- --testdir /path/to/tests/data/animation
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
|
|
|
@ -9,7 +9,7 @@ import sys
|
|||
from math import radians
|
||||
|
||||
"""
|
||||
blender -b -noaudio --factory-startup --python tests/python/bl_animation_keyframing.py -- --testdir /path/to/lib/tests/animation
|
||||
blender -b -noaudio --factory-startup --python tests/python/bl_animation_keyframing.py -- --testdir /path/to/tests/data/animation
|
||||
"""
|
||||
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ def argparse_create():
|
|||
|
||||
# When --help or no args are given, print this help
|
||||
description = ("Test basic versioning code by opening all blend files "
|
||||
"in `lib/tests` directory.")
|
||||
"in `tests/data` directory.")
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
parser.add_argument(
|
||||
"--src-test-dir",
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
./blender.bin --background -noaudio --factory-startup --python tests/python/bl_constraints.py -- --testdir /path/to/lib/tests/constraints
|
||||
./blender.bin --background -noaudio --factory-startup --python tests/python/bl_constraints.py -- --testdir /path/to/tests/data/constraints
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
blender -b -noaudio --factory-startup --python tests/python/bl_rigging_symmetrize.py -- --testdir /path/to/lib/tests/animation
|
||||
blender -b -noaudio --factory-startup --python tests/python/bl_rigging_symmetrize.py -- --testdir /path/to/tests/data/animation
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
# and don't give deterministic results
|
||||
set(USE_EXPERIMENTAL_TESTS FALSE)
|
||||
|
||||
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/../lib/tests)
|
||||
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/tests/data)
|
||||
set(TEST_OUT_DIR ${CMAKE_BINARY_DIR}/tests)
|
||||
|
||||
# ugh, any better way to do this on testing only?
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
"""
|
||||
Call as follows:
|
||||
python collada_mesh_simple.py --blender PATH_TO_BLENDER_EXE --testdir PATH_TO_SVN/lib/tests/collada/mesh
|
||||
python collada_mesh_simple.py --blender PATH_TO_BLENDER_EXE --testdir tests/data/collada/mesh
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
"""
|
||||
Call as follows:
|
||||
python collada_mesh_simple.py --blender PATH_TO_BLENDER_EXE --testdir PATH_TO_SVN/lib/tests/collada/mesh
|
||||
python collada_mesh_simple.py --blender PATH_TO_BLENDER_EXE --testdir tests/data/collada/mesh
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# To run all tests, use
|
||||
# BLENDER_VERBOSE=1 ./bin/blender ../lib/tests/modeling/curve_to_mesh.blend --python ../blender/tests/python/bl_curve_to_mesh.py -- --run-all-tests
|
||||
# BLENDER_VERBOSE=1 ./bin/blender ../tests/data/modeling/curve_to_mesh.blend --python ../blender/tests/python/bl_curve_to_mesh.py -- --run-all-tests
|
||||
# (that assumes the test is run from a build directory in the same directory as the source code)
|
||||
import bpy
|
||||
import os
|
||||
|
|
|
@ -429,7 +429,7 @@ class Report:
|
|||
"-o", diff_color_img,
|
||||
)
|
||||
try:
|
||||
subprocess.check_output(command)
|
||||
subprocess.check_output(command, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
if self.verbose:
|
||||
print_message(e.output.decode("utf-8", 'ignore'))
|
||||
|
@ -447,7 +447,7 @@ class Report:
|
|||
"-o", diff_alpha_img,
|
||||
)
|
||||
try:
|
||||
subprocess.check_output(command)
|
||||
subprocess.check_output(command, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
if self.verbose:
|
||||
msg = e.output.decode("utf-8", 'ignore')
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
# Use '--write-blend=/tmp/test.blend' to view output
|
||||
|
||||
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/../lib/tests)
|
||||
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/tests/data)
|
||||
set(TEST_OUT_DIR ${CMAKE_BINARY_DIR}/tests)
|
||||
|
||||
# ugh, any better way to do this on testing only?
|
||||
|
|
Loading…
Reference in New Issue