1
1

Compare commits

..

1 Commits

Author SHA1 Message Date
2e4e3adb3e Mesh performance tests 2021-05-04 18:02:25 +10:00
2228 changed files with 35956 additions and 72760 deletions

View File

@@ -255,7 +255,6 @@ ForEachMacros:
- SCULPT_VERTEX_DUPLICATES_AND_NEIGHBORS_ITER_BEGIN
- SCULPT_VERTEX_NEIGHBORS_ITER_BEGIN
- SEQ_ALL_BEGIN
- SEQ_ITERATOR_FOREACH
- SURFACE_QUAD_ITER_BEGIN
- foreach
- ED_screen_areas_iter
@@ -265,5 +264,4 @@ ForEachMacros:
- VECTOR_SET_SLOT_PROBING_BEGIN
StatementMacros:
- PyObject_HEAD
- PyObject_VAR_HEAD

View File

@@ -836,7 +836,7 @@ if(WITH_PYTHON)
# because UNIX will search for the old Python paths which may not exist.
# giving errors about missing paths before this case is met.
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.9")
message(FATAL_ERROR "At least Python 3.9 is required to build, but found Python ${PYTHON_VERSION}")
message(FATAL_ERROR "At least Python 3.9 is required to build")
endif()
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/scripts/addons")

View File

@@ -29,7 +29,7 @@ set(BLOSC_EXTRA_ARGS
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
)
# Prevent blosc from including its own local copy of zlib in the object file
# Prevent blosc from including it's own local copy of zlib in the object file
# and cause linker errors with everybody else.
set(BLOSC_EXTRA_ARGS ${BLOSC_EXTRA_ARGS}
-DPREFER_EXTERNAL_ZLIB=ON

View File

@@ -43,12 +43,6 @@ endif()
if(WIN32)
set(EMBREE_BUILD_DIR ${BUILD_MODE}/)
if(BUILD_MODE STREQUAL Debug)
list(APPEND EMBREE_EXTRA_ARGS
-DEMBREE_TBBMALLOC_LIBRARY_NAME=tbbmalloc_debug
-DEMBREE_TBB_LIBRARY_NAME=tbb_debug
)
endif()
else()
set(EMBREE_BUILD_DIR)
endif()

View File

@@ -22,7 +22,6 @@ if(WIN32)
-DTBB_BUILD_TBBMALLOC_PROXY=On
-DTBB_BUILD_STATIC=Off
-DTBB_BUILD_TESTS=Off
-DCMAKE_DEBUG_POSTFIX=_debug
)
set(TBB_LIBRARY tbb)
set(TBB_STATIC_LIBRARY Off)
@@ -56,17 +55,17 @@ if(WIN32)
ExternalProject_Add_Step(external_tbb after_install
# findtbb.cmake in some deps *NEEDS* to find tbb_debug.lib even if they are not going to use it
# to make that test pass, we place a copy with the right name in the lib folder.
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${LIBDIR}/tbb/lib/tbb_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb.dll ${LIBDIR}/tbb/bin/tbb_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc.dll ${LIBDIR}/tbb/bin/tbbmalloc_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${HARVEST_TARGET}/tbb/lib/tbb_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.dll ${HARVEST_TARGET}/tbb/lib/tbb_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.dll
# Normal collection of build artifacts
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${HARVEST_TARGET}/tbb/lib/tbb.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb.dll ${HARVEST_TARGET}/tbb/bin/tbb.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.dll ${HARVEST_TARGET}/tbb/lib/tbb.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_proxy.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy.dll
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/tbb/include/ ${HARVEST_TARGET}/tbb/include/
DEPENDEES install
)
@@ -77,12 +76,11 @@ if(WIN32)
# to make that test pass, we place a copy with the right name in the lib folder.
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${LIBDIR}/tbb/lib/tbb.lib
# Normal collection of build artifacts
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${HARVEST_TARGET}/tbb/lib/tbb_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb_debug.dll ${HARVEST_TARGET}/tbb/bin/tbb_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy_debug.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_debug.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_proxy_debug.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_proxy_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${HARVEST_TARGET}/tbb/lib/debug/tbb_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.dll ${HARVEST_TARGET}/tbb/lib/debug/tbb_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/debug/tbbmalloc.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/lib/debug/tbbmalloc_proxy.dll
DEPENDEES install
)
endif()

View File

@@ -43,7 +43,7 @@ set(JPEG_FILE libjpeg-turbo-${JPEG_VERSION}.tar.gz)
set(BOOST_VERSION 1.73.0)
set(BOOST_VERSION_NODOTS 1_73_0)
set(BOOST_VERSION_NODOTS_SHORT 1_73)
set(BOOST_URI https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz)
set(BOOST_URI https://dl.bintray.com/boostorg/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz)
set(BOOST_HASH 4036cd27ef7548b8d29c30ea10956196)
set(BOOST_HASH_TYPE MD5)
set(BOOST_FILE boost_${BOOST_VERSION_NODOTS}.tar.gz)
@@ -297,10 +297,10 @@ set(OPENJPEG_HASH 63f5a4713ecafc86de51bfad89cc07bb788e9bba24ebbf0c4ca637621aadb6
set(OPENJPEG_HASH_TYPE SHA256)
set(OPENJPEG_FILE openjpeg-v${OPENJPEG_VERSION}.tar.gz)
set(FFMPEG_VERSION 4.4)
set(FFMPEG_VERSION 4.2.3)
set(FFMPEG_URI http://ffmpeg.org/releases/ffmpeg-${FFMPEG_VERSION}.tar.bz2)
set(FFMPEG_HASH 42093549751b582cf0f338a21a3664f52e0a9fbe0d238d3c992005e493607d0e)
set(FFMPEG_HASH_TYPE SHA256)
set(FFMPEG_HASH 695fad11f3baf27784e24cb0e977b65a)
set(FFMPEG_HASH_TYPE MD5)
set(FFMPEG_FILE ffmpeg-${FFMPEG_VERSION}.tar.bz2)
set(FFTW_VERSION 3.3.8)
@@ -432,9 +432,9 @@ set(USD_HASH 1dd1e2092d085ed393c1f7c450a4155a)
set(USD_HASH_TYPE MD5)
set(USD_FILE usd-v${USD_VERSION}.tar.gz)
set(OIDN_VERSION 1.4.0)
set(OIDN_VERSION 1.3.0)
set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz)
set(OIDN_HASH 421824019becc5b664a22a2b98332bc5)
set(OIDN_HASH 301a5a0958d375a942014df0679b9270)
set(OIDN_HASH_TYPE MD5)
set(OIDN_FILE oidn-${OIDN_VERSION}.src.tar.gz)

View File

@@ -37,7 +37,7 @@ if [ $USE_DEBUG_TRAP -ne 0 ]; then
trap 'err_report $LINENO' ERR
fi
# Noisy, show every line that runs with its line number.
# Noisy, show every line that runs with it's line number.
if [ $USE_DEBUG_LOG -ne 0 ]; then
PS4='\e[0;33m$(printf %4d ${LINENO}):\e\033[0m '
set -x
@@ -553,18 +553,18 @@ EMBREE_FORCE_BUILD=false
EMBREE_FORCE_REBUILD=false
EMBREE_SKIP=false
OIDN_VERSION="1.4.0"
OIDN_VERSION_SHORT="1.4"
OIDN_VERSION_MIN="1.4.0"
OIDN_VERSION_MAX="1.5"
OIDN_VERSION="1.3.0"
OIDN_VERSION_SHORT="1.3"
OIDN_VERSION_MIN="1.3.0"
OIDN_VERSION_MAX="1.4"
OIDN_FORCE_BUILD=false
OIDN_FORCE_REBUILD=false
OIDN_SKIP=false
ISPC_VERSION="1.14.1"
FFMPEG_VERSION="4.4"
FFMPEG_VERSION_SHORT="4.4"
FFMPEG_VERSION="4.2.3"
FFMPEG_VERSION_SHORT="4.2"
FFMPEG_VERSION_MIN="3.0"
FFMPEG_VERSION_MAX="5.0"
FFMPEG_FORCE_BUILD=false

View File

@@ -20,7 +20,7 @@
# ILMBASE_LIBRARIES - list of libraries to link against when using IlmBase.
# ILMBASE_FOUND - True if IlmBase was found.
# Other standard issue macros
# Other standarnd issue macros
include(FindPackageHandleStandardArgs)
include(FindPackageMessage)
include(SelectLibraryConfigurations)

View File

@@ -22,7 +22,7 @@
# These are defined by the FindIlmBase module.
# OPENEXR_FOUND - True if OpenEXR was found.
# Other standard issue macros
# Other standarnd issue macros
include(SelectLibraryConfigurations)
include(FindPackageHandleStandardArgs)
include(FindPackageMessage)

View File

@@ -1,3 +1,33 @@
diff -Naur oidn-1.3.0/cmake/FindTBB.cmake external_openimagedenoise/cmake/FindTBB.cmake
--- oidn-1.3.0/cmake/FindTBB.cmake 2021-02-04 16:20:26 -0700
+++ external_openimagedenoise/cmake/FindTBB.cmake 2021-02-12 09:35:53 -0700
@@ -332,20 +332,22 @@
${TBB_ROOT}/lib/${TBB_ARCH}/${TBB_VCVER}
${TBB_ROOT}/lib
)
-
# On Windows, also search the DLL so that the client may install it.
file(GLOB DLL_NAMES
${TBB_ROOT}/bin/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
${TBB_ROOT}/bin/${LIB_NAME}.dll
+ ${TBB_ROOT}/lib/${LIB_NAME}.dll
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB1}.dll
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB2}.dll
${TBB_ROOT}/../redist/${TBB_ARCH}/tbb/${TBB_VCVER}/${LIB_NAME}.dll
${TBB_ROOT}/../redist/${TBB_ARCH}_win/tbb/${TBB_VCVER}/${LIB_NAME}.dll
)
- list(GET DLL_NAMES 0 DLL_NAME)
- get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
- set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
+ if (DLL_NAMES)
+ list(GET DLL_NAMES 0 DLL_NAME)
+ get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
+ set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
+ endif()
elseif(APPLE)
set(LIB_PATHS ${TBB_ROOT}/lib)
else()
--- external_openimagedenoise/cmake/oidn_ispc.cmake 2021-02-15 17:29:34.000000000 +0100
+++ external_openimagedenoise/cmake/oidn_ispc.cmake2 2021-02-15 17:29:28.000000000 +0100
@@ -98,7 +98,7 @@

View File

@@ -1,4 +1,70 @@
Buildbot Configuration
=====================
Blender Buildbot
================
Files used by Buildbot's `compile-code` step.
Code signing
------------
Code signing is done as part of INSTALL target, which makes it possible to sign
files which are aimed into a bundle and coming from a non-signed source (such as
libraries SVN).
This is achieved by specifying `worker_codesign.cmake` as a post-install script
run by CMake. This CMake script simply involves an utility script written in
Python which takes care of an actual signing.
### Configuration
Client configuration doesn't need anything special, other than variable
`SHARED_STORAGE_DIR` pointing to a location which is watched by a server.
This is done in `config_builder.py` file and is stored in Git (which makes it
possible to have almost zero-configuration buildbot machines).
Server configuration requires copying `config_server_template.py` under the
name of `config_server.py` and tweaking values, which are platform-specific.
#### Windows configuration
There are two things which are needed on Windows in order to have code signing
to work:
- `TIMESTAMP_AUTHORITY_URL` which is most likely set http://timestamp.digicert.com
- `CERTIFICATE_FILEPATH` which is a full file path to a PKCS #12 key (.pfx).
## Tips
### Self-signed certificate on Windows
It is easiest to test configuration using self-signed certificate.
The certificate manipulation utilities are coming with Windows SDK.
Unfortunately, they are not added to PATH. Here is an example of how to make
sure they are easily available:
```
set PATH=C:\Program Files (x86)\Windows Kits\10\App Certification Kit;%PATH%
set PATH=C:\Program Files (x86)\Windows Kits\10\bin\10.0.18362.0\x64;%PATH%
```
Generate CA:
```
makecert -r -pe -n "CN=Blender Test CA" -ss CA -sr CurrentUser -a sha256 ^
-cy authority -sky signature -sv BlenderTestCA.pvk BlenderTestCA.cer
```
Import the generated CA:
```
certutil -user -addstore Root BlenderTestCA.cer
```
Create self-signed certificate and pack it into PKCS #12:
```
makecert -pe -n "CN=Blender Test SPC" -a sha256 -cy end ^
-sky signature ^
-ic BlenderTestCA.cer -iv BlenderTestCA.pvk ^
-sv BlenderTestSPC.pvk BlenderTestSPC.cer
pvk2pfx -pvk BlenderTestSPC.pvk -spc BlenderTestSPC.cer -pfx BlenderTestSPC.pfx
```

View File

@@ -0,0 +1,127 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import argparse
import os
import re
import subprocess
import sys
def is_tool(name):
"""Check whether `name` is on PATH and marked as executable."""
# from whichcraft import which
from shutil import which
return which(name) is not None
class Builder:
def __init__(self, name, branch, codesign):
self.name = name
self.branch = branch
self.is_release_branch = re.match("^blender-v(.*)-release$", branch) is not None
self.codesign = codesign
# Buildbot runs from build/ directory
self.blender_dir = os.path.abspath(os.path.join('..', 'blender.git'))
self.build_dir = os.path.abspath(os.path.join('..', 'build'))
self.install_dir = os.path.abspath(os.path.join('..', 'install'))
self.upload_dir = os.path.abspath(os.path.join('..', 'install'))
# Detect platform
if name.startswith('mac'):
self.platform = 'mac'
self.command_prefix = []
elif name.startswith('linux'):
self.platform = 'linux'
if is_tool('scl'):
self.command_prefix = ['scl', 'enable', 'devtoolset-9', '--']
else:
self.command_prefix = []
elif name.startswith('win'):
self.platform = 'win'
self.command_prefix = []
else:
raise ValueError('Unkonw platform for builder ' + self.platform)
# Always 64 bit now
self.bits = 64
def create_builder_from_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('builder_name')
parser.add_argument('branch', default='master', nargs='?')
parser.add_argument("--codesign", action="store_true")
args = parser.parse_args()
return Builder(args.builder_name, args.branch, args.codesign)
class VersionInfo:
def __init__(self, builder):
# Get version information
buildinfo_h = os.path.join(builder.build_dir, "source", "creator", "buildinfo.h")
blender_h = os.path.join(builder.blender_dir, "source", "blender", "blenkernel", "BKE_blender_version.h")
version_number = int(self._parse_header_file(blender_h, 'BLENDER_VERSION'))
version_number_patch = int(self._parse_header_file(blender_h, 'BLENDER_VERSION_PATCH'))
version_numbers = (version_number // 100, version_number % 100, version_number_patch)
self.short_version = "%d.%d" % (version_numbers[0], version_numbers[1])
self.version = "%d.%d.%d" % version_numbers
self.version_cycle = self._parse_header_file(blender_h, 'BLENDER_VERSION_CYCLE')
self.hash = self._parse_header_file(buildinfo_h, 'BUILD_HASH')[1:-1]
if self.version_cycle == "release":
# Final release
self.full_version = self.version
self.is_development_build = False
elif self.version_cycle == "rc":
# Release candidate
self.full_version = self.version + self.version_cycle
self.is_development_build = False
else:
# Development build
self.full_version = self.version + '-' + self.hash
self.is_development_build = True
def _parse_header_file(self, filename, define):
import re
regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define)
with open(filename, "r") as file:
for l in file:
match = regex.match(l)
if match:
return match.group(1)
return None
def call(cmd, env=None, exit_on_error=True):
print(' '.join(cmd))
# Flush to ensure correct order output on Windows.
sys.stdout.flush()
sys.stderr.flush()
retcode = subprocess.call(cmd, env=env)
if exit_on_error and retcode != 0:
sys.exit(retcode)
return retcode

View File

@@ -0,0 +1,81 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
from dataclasses import dataclass
from pathlib import Path
from typing import List
@dataclass
class AbsoluteAndRelativeFileName:
"""
Helper class which keeps track of absolute file path for a direct access and
corresponding relative path against given base.
The relative part is used to construct a file name within an archive which
contains files which are to be signed or which has been signed already
(depending on whether the archive is addressed to signing server or back
to the buildbot worker).
"""
# Base directory which is where relative_filepath is relative to.
base_dir: Path
# Full absolute path of the corresponding file.
absolute_filepath: Path
# Derived from full file path, contains part of the path which is relative
# to a desired base path.
relative_filepath: Path
def __init__(self, base_dir: Path, filepath: Path):
self.base_dir = base_dir
self.absolute_filepath = filepath.resolve()
self.relative_filepath = self.absolute_filepath.relative_to(
self.base_dir)
@classmethod
def from_path(cls, path: Path) -> 'AbsoluteAndRelativeFileName':
assert path.is_absolute()
assert path.is_file()
base_dir = path.parent
return AbsoluteAndRelativeFileName(base_dir, path)
@classmethod
def recursively_from_directory(cls, base_dir: Path) \
-> List['AbsoluteAndRelativeFileName']:
"""
Create list of AbsoluteAndRelativeFileName for all the files in the
given directory.
NOTE: Result will be pointing to a resolved paths.
"""
assert base_dir.is_absolute()
assert base_dir.is_dir()
base_dir = base_dir.resolve()
result = []
for filename in base_dir.glob('**/*'):
if not filename.is_file():
continue
result.append(AbsoluteAndRelativeFileName(base_dir, filename))
return result

View File

@@ -0,0 +1,245 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import dataclasses
import json
import os
from pathlib import Path
from typing import Optional
import codesign.util as util
class ArchiveStateError(Exception):
message: str
def __init__(self, message):
self.message = message
super().__init__(self.message)
@dataclasses.dataclass
class ArchiveState:
"""
Additional information (state) of the archive
Includes information like expected file size of the archive file in the case
the archive file is expected to be successfully created.
If the archive can not be created, this state will contain error message
indicating details of error.
"""
# Size in bytes of the corresponding archive.
file_size: Optional[int] = None
# Non-empty value indicates that error has happenned.
error_message: str = ''
def has_error(self) -> bool:
"""
Check whether the archive is at error state
"""
return self.error_message
def serialize_to_string(self) -> str:
payload = dataclasses.asdict(self)
return json.dumps(payload, sort_keys=True, indent=4)
def serialize_to_file(self, filepath: Path) -> None:
string = self.serialize_to_string()
filepath.write_text(string)
@classmethod
def deserialize_from_string(cls, string: str) -> 'ArchiveState':
try:
object_as_dict = json.loads(string)
except json.decoder.JSONDecodeError:
raise ArchiveStateError('Error parsing JSON')
return cls(**object_as_dict)
@classmethod
def deserialize_from_file(cls, filepath: Path):
string = filepath.read_text()
return cls.deserialize_from_string(string)
class ArchiveWithIndicator:
"""
The idea of this class is to wrap around logic which takes care of keeping
track of a name of an archive and synchronization routines between buildbot
worker and signing server.
The synchronization is done based on creating a special file after the
archive file is knowingly ready for access.
"""
# Base directory where the archive is stored (basically, a basename() of
# the absolute archive file name).
#
# For example, 'X:\\TEMP\\'.
base_dir: Path
# Absolute file name of the archive.
#
# For example, 'X:\\TEMP\\FOO.ZIP'.
archive_filepath: Path
# Absolute name of a file which acts as an indication of the fact that the
# archive is ready and is available for access.
#
# This is how synchronization between buildbot worker and signing server is
# done:
# - First, the archive is created under archive_filepath name.
# - Second, the indication file is created under ready_indicator_filepath
# name.
# - Third, the colleague of whoever created the indicator name watches for
# the indication file to appear, and once it's there it access the
# archive.
ready_indicator_filepath: Path
def __init__(
self, base_dir: Path, archive_name: str, ready_indicator_name: str):
"""
Construct the object from given base directory and name of the archive
file:
ArchiveWithIndicator(Path('X:\\TEMP'), 'FOO.ZIP', 'INPUT_READY')
"""
self.base_dir = base_dir
self.archive_filepath = self.base_dir / archive_name
self.ready_indicator_filepath = self.base_dir / ready_indicator_name
def is_ready_unsafe(self) -> bool:
"""
Check whether the archive is ready for access.
No guarding about possible network failres is done here.
"""
if not self.ready_indicator_filepath.exists():
return False
try:
archive_state = ArchiveState.deserialize_from_file(
self.ready_indicator_filepath)
except ArchiveStateError as error:
print(f'Error deserializing archive state: {error.message}')
return False
if archive_state.has_error():
# If the error did happen during codesign procedure there will be no
# corresponding archive file.
# The caller code will deal with the error check further.
return True
# Sometimes on macOS indicator file appears prior to the actual archive
# despite the order of creation and os.sync() used in tag_ready().
# So consider archive not ready if there is an indicator without an
# actual archive.
if not self.archive_filepath.exists():
print('Found indicator without actual archive, waiting for archive '
f'({self.archive_filepath}) to appear.')
return False
# Wait for until archive is fully stored.
actual_archive_size = self.archive_filepath.stat().st_size
if actual_archive_size != archive_state.file_size:
print('Partial/invalid archive size (expected '
f'{archive_state.file_size} got {actual_archive_size})')
return False
return True
def is_ready(self) -> bool:
"""
Check whether the archive is ready for access.
Will tolerate possible network failures: if there is a network failure
or if there is still no proper permission on a file False is returned.
"""
# There are some intermitten problem happening at a random which is
# translates to "OSError : [WinError 59] An unexpected network error occurred".
# Some reports suggests it might be due to lack of permissions to the file,
# which might be applicable in our case since it's possible that file is
# initially created with non-accessible permissions and gets chmod-ed
# after initial creation.
try:
return self.is_ready_unsafe()
except OSError as e:
print(f'Exception checking archive: {e}')
return False
def tag_ready(self, error_message='') -> None:
"""
Tag the archive as ready by creating the corresponding indication file.
NOTE: It is expected that the archive was never tagged as ready before
and that there are no subsequent tags of the same archive.
If it is violated, an assert will fail.
"""
assert not self.is_ready()
# Try the best to make sure everything is synced to the file system,
# to avoid any possibility of stamp appearing on a network share prior to
# an actual file.
if util.get_current_platform() != util.Platform.WINDOWS:
os.sync()
archive_size = -1
if self.archive_filepath.exists():
archive_size = self.archive_filepath.stat().st_size
archive_info = ArchiveState(
file_size=archive_size, error_message=error_message)
self.ready_indicator_filepath.write_text(
archive_info.serialize_to_string())
def get_state(self) -> ArchiveState:
"""
Get state object for this archive
The state is read from the corresponding state file.
"""
try:
return ArchiveState.deserialize_from_file(self.ready_indicator_filepath)
except ArchiveStateError as error:
return ArchiveState(error_message=f'Error in information format: {error}')
def clean(self) -> None:
"""
Remove both archive and the ready indication file.
"""
util.ensure_file_does_not_exist_or_die(self.ready_indicator_filepath)
util.ensure_file_does_not_exist_or_die(self.archive_filepath)
def is_fully_absent(self) -> bool:
"""
Check whether both archive and its ready indicator are absent.
Is used for a sanity check during code signing process by both
buildbot worker and signing server.
"""
return (not self.archive_filepath.exists() and
not self.ready_indicator_filepath.exists())

View File

@@ -0,0 +1,501 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Signing process overview.
#
# From buildbot worker side:
# - Files which needs to be signed are collected from either a directory to
# sign all signable files in there, or by filename of a single file to sign.
# - Those files gets packed into an archive and stored in a location location
# which is watched by the signing server.
# - A marker READY file is created which indicates the archive is ready for
# access.
# - Wait for the server to provide an archive with signed files.
# This is done by watching for the READY file which corresponds to an archive
# coming from the signing server.
# - Unpack the signed signed files from the archives and replace original ones.
#
# From code sign server:
# - Watch special location for a READY file which indicates the there is an
# archive with files which are to be signed.
# - Unpack the archive to a temporary location.
# - Run codesign tool and make sure all the files are signed.
# - Pack the signed files and store them in a location which is watched by
# the buildbot worker.
# - Create a READY file which indicates that the archive with signed files is
# ready.
import abc
import logging
import shutil
import subprocess
import time
import tarfile
import uuid
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Iterable, List
import codesign.util as util
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
from codesign.archive_with_indicator import ArchiveWithIndicator
from codesign.exception import CodeSignException
logger = logging.getLogger(__name__)
logger_builder = logger.getChild('builder')
logger_server = logger.getChild('server')
def pack_files(files: Iterable[AbsoluteAndRelativeFileName],
archive_filepath: Path) -> None:
"""
Create tar archive from given files for the signing pipeline.
Is used by buildbot worker to create an archive of files which are to be
signed, and by signing server to send signed files back to the worker.
"""
with tarfile.TarFile.open(archive_filepath, 'w') as tar_file_handle:
for file_info in files:
tar_file_handle.add(file_info.absolute_filepath,
arcname=file_info.relative_filepath)
def extract_files(archive_filepath: Path,
extraction_dir: Path) -> None:
"""
Extract all files form the given archive into the given direcotry.
"""
# TODO(sergey): Verify files in the archive have relative path.
with tarfile.TarFile.open(archive_filepath, mode='r') as tar_file_handle:
tar_file_handle.extractall(path=extraction_dir)
class BaseCodeSigner(metaclass=abc.ABCMeta):
"""
Base class for a platform-specific signer of binaries.
Contains all the logic shared across platform-specific implementations, such
as synchronization and notification logic.
Platform specific bits (such as actual command for signing the binary) are
to be implemented as a subclass.
Provides utilities code signing as a whole, including functionality needed
by a signing server and a buildbot worker.
The signer and builder may run on separate machines, the only requirement is
that they have access to a directory which is shared between them. For the
security concerns this is to be done as a separate machine (or as a Shared
Folder configuration in VirtualBox configuration). This directory might be
mounted under different base paths, but its underlying storage is to be
the same.
The code signer is short-lived on a buildbot worker side, and is living
forever on a code signing server side.
"""
# TODO(sergey): Find a neat way to have config annotated.
# config: Config
# Storage directory where builder puts files which are requested to be
# signed.
# Consider this an input of the code signing server.
unsigned_storage_dir: Path
# Storage where signed files are stored.
# Consider this an output of the code signer server.
signed_storage_dir: Path
# Platform the code is currently executing on.
platform: util.Platform
def __init__(self, config):
self.config = config
absolute_shared_storage_dir = config.SHARED_STORAGE_DIR.resolve()
# Unsigned (signing server input) configuration.
self.unsigned_storage_dir = absolute_shared_storage_dir / 'unsigned'
# Signed (signing server output) configuration.
self.signed_storage_dir = absolute_shared_storage_dir / 'signed'
self.platform = util.get_current_platform()
def cleanup_environment_for_builder(self) -> None:
# TODO(sergey): Revisit need of cleaning up the existing files.
# In practice it wasn't so helpful, and with multiple clients
# talking to the same server it becomes even more tricky.
pass
def cleanup_environment_for_signing_server(self) -> None:
# TODO(sergey): Revisit need of cleaning up the existing files.
# In practice it wasn't so helpful, and with multiple clients
# talking to the same server it becomes even more tricky.
pass
def generate_request_id(self) -> str:
"""
Generate an unique identifier for code signing request.
"""
return str(uuid.uuid4())
def archive_info_for_request_id(
self, path: Path, request_id: str) -> ArchiveWithIndicator:
return ArchiveWithIndicator(
path, f'{request_id}.tar', f'{request_id}.ready')
def signed_archive_info_for_request_id(
self, request_id: str) -> ArchiveWithIndicator:
return self.archive_info_for_request_id(
self.signed_storage_dir, request_id)
def unsigned_archive_info_for_request_id(
self, request_id: str) -> ArchiveWithIndicator:
return self.archive_info_for_request_id(
self.unsigned_storage_dir, request_id)
############################################################################
# Buildbot worker side helpers.
@abc.abstractmethod
def check_file_is_to_be_signed(
self, file: AbsoluteAndRelativeFileName) -> bool:
"""
Check whether file is to be signed.
Is used by both single file signing pipeline and recursive directory
signing pipeline.
This is where code signer is to check whether file is to be signed or
not. This check might be based on a simple extension test or on actual
test whether file have a digital signature already or not.
"""
def collect_files_to_sign(self, path: Path) \
-> List[AbsoluteAndRelativeFileName]:
"""
Get all files which need to be signed from the given path.
NOTE: The path might either be a file or directory.
This function is run from the buildbot worker side.
"""
# If there is a single file provided trust the buildbot worker that it
# is eligible for signing.
if path.is_file():
file = AbsoluteAndRelativeFileName.from_path(path)
if not self.check_file_is_to_be_signed(file):
return []
return [file]
all_files = AbsoluteAndRelativeFileName.recursively_from_directory(
path)
files_to_be_signed = [file for file in all_files
if self.check_file_is_to_be_signed(file)]
return files_to_be_signed
def wait_for_signed_archive_or_die(self, request_id) -> None:
"""
Wait until archive with signed files is available.
Will only return if the archive with signed files is available. If there
was an error during code sign procedure the SystemExit exception is
raised, with the message set to the error reported by the codesign
server.
Will only wait for the configured time. If that time exceeds and there
is still no responce from the signing server the application will exit
with a non-zero exit code.
"""
signed_archive_info = self.signed_archive_info_for_request_id(
request_id)
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
request_id)
timeout_in_seconds = self.config.TIMEOUT_IN_SECONDS
time_start = time.monotonic()
while not signed_archive_info.is_ready():
time.sleep(1)
time_slept_in_seconds = time.monotonic() - time_start
if time_slept_in_seconds > timeout_in_seconds:
signed_archive_info.clean()
unsigned_archive_info.clean()
raise SystemExit("Signing server didn't finish signing in "
f'{timeout_in_seconds} seconds, dying :(')
archive_state = signed_archive_info.get_state()
if archive_state.has_error():
signed_archive_info.clean()
unsigned_archive_info.clean()
raise SystemExit(
f'Error happenned during codesign procedure: {archive_state.error_message}')
def copy_signed_files_to_directory(
self, signed_dir: Path, destination_dir: Path) -> None:
"""
Copy all files from signed_dir to destination_dir.
This function will overwrite any existing file. Permissions are copied
from the source files, but other metadata, such as timestamps, are not.
"""
for signed_filepath in signed_dir.glob('**/*'):
if not signed_filepath.is_file():
continue
relative_filepath = signed_filepath.relative_to(signed_dir)
destination_filepath = destination_dir / relative_filepath
destination_filepath.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(signed_filepath, destination_filepath)
def run_buildbot_path_sign_pipeline(self, path: Path) -> None:
"""
Run all steps needed to make given path signed.
Path points to an unsigned file or a directory which contains unsigned
files.
If the path points to a single file then this file will be signed.
This is used to sign a final bundle such as .msi on Windows or .dmg on
macOS.
NOTE: The code signed implementation might actually reject signing the
file, in which case the file will be left unsigned. This isn't anything
to be considered a failure situation, just might happen when buildbot
worker can not detect whether signing is really required in a specific
case or not.
If the path points to a directory then code signer will sign all
signable files from it (finding them recursively).
"""
self.cleanup_environment_for_builder()
# Make sure storage directory exists.
self.unsigned_storage_dir.mkdir(parents=True, exist_ok=True)
# Collect all files which needs to be signed and pack them into a single
# archive which will be sent to the signing server.
logger_builder.info('Collecting files which are to be signed...')
files = self.collect_files_to_sign(path)
if not files:
logger_builder.info('No files to be signed, ignoring.')
return
logger_builder.info('Found %d files to sign.', len(files))
request_id = self.generate_request_id()
signed_archive_info = self.signed_archive_info_for_request_id(
request_id)
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
request_id)
pack_files(files=files,
archive_filepath=unsigned_archive_info.archive_filepath)
unsigned_archive_info.tag_ready()
# Wait for the signing server to finish signing.
logger_builder.info('Waiting signing server to sign the files...')
self.wait_for_signed_archive_or_die(request_id)
# Extract signed files from archive and move files to final location.
with TemporaryDirectory(prefix='blender-buildbot-') as temp_dir_str:
unpacked_signed_files_dir = Path(temp_dir_str)
logger_builder.info('Extracting signed files from archive...')
extract_files(
archive_filepath=signed_archive_info.archive_filepath,
extraction_dir=unpacked_signed_files_dir)
destination_dir = path
if destination_dir.is_file():
destination_dir = destination_dir.parent
self.copy_signed_files_to_directory(
unpacked_signed_files_dir, destination_dir)
logger_builder.info('Removing archive with signed files...')
signed_archive_info.clean()
############################################################################
# Signing server side helpers.
def wait_for_sign_request(self) -> str:
"""
Wait for the buildbot to request signing of an archive.
Returns an identifier of signing request.
"""
# TOOD(sergey): Support graceful shutdown on Ctrl-C.
logger_server.info(
f'Waiting for a request directory {self.unsigned_storage_dir} to appear.')
while not self.unsigned_storage_dir.exists():
time.sleep(1)
logger_server.info(
'Waiting for a READY indicator of any signing request.')
request_id = None
while request_id is None:
for file in self.unsigned_storage_dir.iterdir():
if file.suffix != '.ready':
continue
request_id = file.stem
logger_server.info(f'Found READY for request ID {request_id}.')
if request_id is None:
time.sleep(1)
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
request_id)
while not unsigned_archive_info.is_ready():
time.sleep(1)
return request_id
@abc.abstractmethod
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
"""
Sign all files in the given directory.
NOTE: Signing should happen in-place.
"""
def run_signing_pipeline(self, request_id: str):
"""
Run the full signing pipeline starting from the point when buildbot
worker have requested signing.
"""
# Make sure storage directory exists.
self.signed_storage_dir.mkdir(parents=True, exist_ok=True)
with TemporaryDirectory(prefix='blender-codesign-') as temp_dir_str:
temp_dir = Path(temp_dir_str)
signed_archive_info = self.signed_archive_info_for_request_id(
request_id)
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
request_id)
logger_server.info('Extracting unsigned files from archive...')
extract_files(
archive_filepath=unsigned_archive_info.archive_filepath,
extraction_dir=temp_dir)
logger_server.info('Collecting all files which needs signing...')
files = AbsoluteAndRelativeFileName.recursively_from_directory(
temp_dir)
logger_server.info('Signing all requested files...')
try:
self.sign_all_files(files)
except CodeSignException as error:
signed_archive_info.tag_ready(error_message=error.message)
unsigned_archive_info.clean()
logger_server.info('Signing is complete with errors.')
return
logger_server.info('Packing signed files...')
pack_files(files=files,
archive_filepath=signed_archive_info.archive_filepath)
signed_archive_info.tag_ready()
logger_server.info('Removing signing request...')
unsigned_archive_info.clean()
logger_server.info('Signing is complete.')
def run_signing_server(self):
logger_server.info('Starting new code signing server...')
self.cleanup_environment_for_signing_server()
logger_server.info('Code signing server is ready')
while True:
logger_server.info('Waiting for the signing request in %s...',
self.unsigned_storage_dir)
request_id = self.wait_for_sign_request()
logger_server.info(
f'Beging signign procedure for request ID {request_id}.')
self.run_signing_pipeline(request_id)
############################################################################
# Command executing.
#
# Abstracted to a degree that allows to run commands from a foreign
# platform.
# The goal with this is to allow performing dry-run tests of code signer
# server from other platforms (for example, to test that macOS code signer
# does what it is supposed to after doing a refactor on Linux).
# TODO(sergey): What is the type annotation for the command?
def run_command_or_mock(self, command, platform: util.Platform) -> None:
"""
Run given command if current platform matches given one
If the platform is different then it will only be printed allowing
to verify logic of the code signing process.
"""
if platform != self.platform:
logger_server.info(
f'Will run command for {platform}: {command}')
return
logger_server.info(f'Running command: {command}')
subprocess.run(command)
# TODO(sergey): What is the type annotation for the command?
def check_output_or_mock(self, command,
platform: util.Platform,
allow_nonzero_exit_code=False) -> str:
"""
Run given command if current platform matches given one
If the platform is different then it will only be printed allowing
to verify logic of the code signing process.
If allow_nonzero_exit_code is truth then the output will be returned
even if application quit with non-zero exit code.
Otherwise an subprocess.CalledProcessError exception will be raised
in such case.
"""
if platform != self.platform:
logger_server.info(
f'Will run command for {platform}: {command}')
return
if allow_nonzero_exit_code:
process = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output = process.communicate()[0]
return output.decode()
logger_server.info(f'Running command: {command}')
return subprocess.check_output(
command, stderr=subprocess.STDOUT).decode()

View File

@@ -0,0 +1,62 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Configuration of a code signer which is specific to the code running from
# buildbot's worker.
import sys
from pathlib import Path
import codesign.util as util
from codesign.config_common import *
platform = util.get_current_platform()
if platform == util.Platform.LINUX:
SHARED_STORAGE_DIR = Path('/data/codesign')
elif platform == util.Platform.WINDOWS:
SHARED_STORAGE_DIR = Path('Z:\\codesign')
elif platform == util.Platform.MACOS:
SHARED_STORAGE_DIR = Path('/Volumes/codesign_macos/codesign')
# https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
LOGGING = {
'version': 1,
'formatters': {
'default': {'format': '%(asctime)-15s %(levelname)8s %(name)s %(message)s'}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'default',
'stream': 'ext://sys.stderr',
}
},
'loggers': {
'codesign': {'level': 'INFO'},
},
'root': {
'level': 'WARNING',
'handlers': [
'console',
],
}
}

View File

@@ -0,0 +1,36 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
from pathlib import Path
# Timeout in seconds for the signing process.
#
# This is how long buildbot packing step will wait signing server to
# perform signing.
#
# NOTE: Notarization could take a long time, hence the rather high value
# here. Might consider using different timeout for different platforms.
TIMEOUT_IN_SECONDS = 45 * 60 * 60
# Directory which is shared across buildbot worker and signing server.
#
# This is where worker puts files requested for signing as well as where
# server puts signed files.
SHARED_STORAGE_DIR: Path

View File

@@ -0,0 +1,101 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Configuration of a code signer which is specific to the code signing server.
#
# NOTE: DO NOT put any sensitive information here, put it in an actual
# configuration on the signing machine.
from pathlib import Path
from codesign.config_common import *
CODESIGN_DIRECTORY = Path(__file__).absolute().parent
BLENDER_GIT_ROOT_DIRECTORY = CODESIGN_DIRECTORY.parent.parent.parent
################################################################################
# Common configuration.
# Directory where folders for codesign requests and signed result are stored.
# For example, /data/codesign
SHARED_STORAGE_DIR: Path
################################################################################
# macOS-specific configuration.
MACOS_ENTITLEMENTS_FILE = \
BLENDER_GIT_ROOT_DIRECTORY / 'release' / 'darwin' / 'entitlements.plist'
# Identity of the Developer ID Application certificate which is to be used for
# codesign tool.
# Use `security find-identity -v -p codesigning` to find the identity.
#
# NOTE: This identity is just an example from release/darwin/README.txt.
MACOS_CODESIGN_IDENTITY = 'AE825E26F12D08B692F360133210AF46F4CF7B97'
# User name (Apple ID) which will be used to request notarization.
MACOS_XCRUN_USERNAME = 'me@example.com'
# One-time application password which will be used to request notarization.
MACOS_XCRUN_PASSWORD = '@keychain:altool-password'
# Timeout in seconds within which the notarial office is supposed to reply.
MACOS_NOTARIZE_TIMEOUT_IN_SECONDS = 60 * 60
################################################################################
# Windows-specific configuration.
# URL to the timestamping authority.
WIN_TIMESTAMP_AUTHORITY_URL = 'http://timestamp.digicert.com'
# Full path to the certificate used for signing.
#
# The path and expected file format might vary depending on a platform.
#
# On Windows it is usually is a PKCS #12 key (.pfx), so the path will look
# like Path('C:\\Secret\\Blender.pfx').
WIN_CERTIFICATE_FILEPATH: Path
################################################################################
# Logging configuration, common for all platforms.
# https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
LOGGING = {
'version': 1,
'formatters': {
'default': {'format': '%(asctime)-15s %(levelname)8s %(name)s %(message)s'}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'default',
'stream': 'ext://sys.stderr',
}
},
'loggers': {
'codesign': {'level': 'INFO'},
},
'root': {
'level': 'WARNING',
'handlers': [
'console',
],
}
}

View File

@@ -0,0 +1,26 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
class CodeSignException(Exception):
message: str
def __init__(self, message):
self.message = message
super().__init__(self.message)

View File

@@ -0,0 +1,72 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# NOTE: This is a no-op signer (since there isn't really a procedure to sign
# Linux binaries yet). Used to debug and verify the code signing routines on
# a Linux environment.
import logging
from pathlib import Path
from typing import List
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
from codesign.base_code_signer import BaseCodeSigner
logger = logging.getLogger(__name__)
logger_server = logger.getChild('server')
class LinuxCodeSigner(BaseCodeSigner):
def is_active(self) -> bool:
"""
Check whether this signer is active.
if it is inactive, no files will be signed.
Is used to be able to debug code signing pipeline on Linux, where there
is no code signing happening in the actual buildbot and release
environment.
"""
return False
def check_file_is_to_be_signed(
self, file: AbsoluteAndRelativeFileName) -> bool:
if file.relative_filepath == Path('blender'):
return True
if (file.relative_filepath.parts[-3:-1] == ('python', 'bin') and
file.relative_filepath.name.startwith('python')):
return True
if file.relative_filepath.suffix == '.so':
return True
return False
def collect_files_to_sign(self, path: Path) \
-> List[AbsoluteAndRelativeFileName]:
if not self.is_active():
return []
return super().collect_files_to_sign(path)
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
num_files = len(files)
for file_index, file in enumerate(files):
logger.info('Server: Signed file [%d/%d] %s',
file_index + 1, num_files, file.relative_filepath)

View File

@@ -0,0 +1,456 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import logging
import re
import stat
import subprocess
import time
from pathlib import Path
from typing import List
import codesign.util as util
from buildbot_utils import Builder
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
from codesign.base_code_signer import BaseCodeSigner
from codesign.exception import CodeSignException
logger = logging.getLogger(__name__)
logger_server = logger.getChild('server')
# NOTE: Check is done as filename.endswith(), so keep the dot
EXTENSIONS_TO_BE_SIGNED = {'.dylib', '.so', '.dmg'}
# Prefixes of a file (not directory) name which are to be signed.
# Used to sign extra executable files in Contents/Resources.
NAME_PREFIXES_TO_BE_SIGNED = {'python'}
class NotarizationException(CodeSignException):
pass
def is_file_from_bundle(file: AbsoluteAndRelativeFileName) -> bool:
"""
Check whether file is coming from an .app bundle
"""
parts = file.relative_filepath.parts
if not parts:
return False
if not parts[0].endswith('.app'):
return False
return True
def get_bundle_from_file(
file: AbsoluteAndRelativeFileName) -> AbsoluteAndRelativeFileName:
"""
Get AbsoluteAndRelativeFileName descriptor of bundle
"""
assert(is_file_from_bundle(file))
parts = file.relative_filepath.parts
bundle_name = parts[0]
base_dir = file.base_dir
bundle_filepath = file.base_dir / bundle_name
return AbsoluteAndRelativeFileName(base_dir, bundle_filepath)
def is_bundle_executable_file(file: AbsoluteAndRelativeFileName) -> bool:
"""
Check whether given file is an executable within an app bundle
"""
if not is_file_from_bundle(file):
return False
parts = file.relative_filepath.parts
num_parts = len(parts)
if num_parts < 3:
return False
if parts[1:3] != ('Contents', 'MacOS'):
return False
return True
def xcrun_field_value_from_output(field: str, output: str) -> str:
"""
Get value of a given field from xcrun output.
If field is not found empty string is returned.
"""
field_prefix = field + ': '
for line in output.splitlines():
line = line.strip()
if line.startswith(field_prefix):
return line[len(field_prefix):]
return ''
class MacOSCodeSigner(BaseCodeSigner):
def check_file_is_to_be_signed(
self, file: AbsoluteAndRelativeFileName) -> bool:
if file.relative_filepath.name.startswith('.'):
return False
if is_bundle_executable_file(file):
return True
base_name = file.relative_filepath.name
if any(base_name.startswith(prefix)
for prefix in NAME_PREFIXES_TO_BE_SIGNED):
return True
mode = file.absolute_filepath.lstat().st_mode
if mode & stat.S_IXUSR != 0:
file_output = subprocess.check_output(
("file", file.absolute_filepath)).decode()
if "64-bit executable" in file_output:
return True
return file.relative_filepath.suffix in EXTENSIONS_TO_BE_SIGNED
def collect_files_to_sign(self, path: Path) \
-> List[AbsoluteAndRelativeFileName]:
# Include all files when signing app or dmg bundle: all the files are
# needed to do valid signature of bundle.
if path.name.endswith('.app'):
return AbsoluteAndRelativeFileName.recursively_from_directory(path)
if path.is_dir():
files = []
for child in path.iterdir():
if child.name.endswith('.app'):
current_files = AbsoluteAndRelativeFileName.recursively_from_directory(
child)
else:
current_files = super().collect_files_to_sign(child)
for current_file in current_files:
files.append(AbsoluteAndRelativeFileName(
path, current_file.absolute_filepath))
return files
return super().collect_files_to_sign(path)
############################################################################
# Codesign.
def codesign_remove_signature(
self, file: AbsoluteAndRelativeFileName) -> None:
"""
Make sure given file does not have codesign signature
This is needed because codesigning is not possible for file which has
signature already.
"""
logger_server.info(
'Removing codesign signature from %s...', file.relative_filepath)
command = ['codesign', '--remove-signature', file.absolute_filepath]
self.run_command_or_mock(command, util.Platform.MACOS)
def codesign_file(
self, file: AbsoluteAndRelativeFileName) -> None:
"""
Sign given file
NOTE: File must not have any signatures.
"""
logger_server.info(
'Codesigning %s...', file.relative_filepath)
entitlements_file = self.config.MACOS_ENTITLEMENTS_FILE
command = ['codesign',
'--timestamp',
'--options', 'runtime',
f'--entitlements={entitlements_file}',
'--sign', self.config.MACOS_CODESIGN_IDENTITY,
file.absolute_filepath]
self.run_command_or_mock(command, util.Platform.MACOS)
def codesign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
"""
Run codesign tool on all eligible files in the given list.
Will ignore all files which are not to be signed. For the rest will
remove possible existing signature and add a new signature.
"""
num_files = len(files)
have_ignored_files = False
signed_files = []
for file_index, file in enumerate(files):
# Ignore file if it is not to be signed.
# Allows to manually construct ZIP of a bundle and get it signed.
if not self.check_file_is_to_be_signed(file):
logger_server.info(
'Ignoring file [%d/%d] %s',
file_index + 1, num_files, file.relative_filepath)
have_ignored_files = True
continue
logger_server.info(
'Running codesigning routines for file [%d/%d] %s...',
file_index + 1, num_files, file.relative_filepath)
self.codesign_remove_signature(file)
self.codesign_file(file)
signed_files.append(file)
if have_ignored_files:
logger_server.info('Signed %d files:', len(signed_files))
num_signed_files = len(signed_files)
for file_index, signed_file in enumerate(signed_files):
logger_server.info(
'- [%d/%d] %s',
file_index + 1, num_signed_files,
signed_file.relative_filepath)
def codesign_bundles(
self, files: List[AbsoluteAndRelativeFileName]) -> None:
"""
Codesign all .app bundles in the given list of files.
Bundle is deducted from paths of the files, and every bundle is only
signed once.
"""
signed_bundles = set()
extra_files = []
for file in files:
if not is_file_from_bundle(file):
continue
bundle = get_bundle_from_file(file)
bundle_name = bundle.relative_filepath
if bundle_name in signed_bundles:
continue
logger_server.info('Running codesign routines on bundle %s',
bundle_name)
# It is not possible to remove signature from DMG.
if bundle.relative_filepath.name.endswith('.app'):
self.codesign_remove_signature(bundle)
self.codesign_file(bundle)
signed_bundles.add(bundle_name)
# Codesign on a bundle adds an extra folder with information.
# It needs to be compied to the source.
code_signature_directory = \
bundle.absolute_filepath / 'Contents' / '_CodeSignature'
code_signature_files = \
AbsoluteAndRelativeFileName.recursively_from_directory(
code_signature_directory)
for code_signature_file in code_signature_files:
bundle_relative_file = AbsoluteAndRelativeFileName(
bundle.base_dir,
code_signature_directory /
code_signature_file.relative_filepath)
extra_files.append(bundle_relative_file)
files.extend(extra_files)
############################################################################
# Notarization.
def notarize_get_bundle_id(self, file: AbsoluteAndRelativeFileName) -> str:
"""
Get bundle ID which will be used to notarize DMG
"""
name = file.relative_filepath.name
app_name = name.split('-', 2)[0].lower()
app_name_words = app_name.split()
if len(app_name_words) > 1:
app_name_id = ''.join(word.capitalize() for word in app_name_words)
else:
app_name_id = app_name_words[0]
# TODO(sergey): Consider using "alpha" for buildbot builds.
return f'org.blenderfoundation.{app_name_id}.release'
def notarize_request(self, file) -> str:
"""
Request notarization of the given file.
Returns UUID of the notarization request. If error occurred None is
returned instead of UUID.
"""
bundle_id = self.notarize_get_bundle_id(file)
logger_server.info('Bundle ID: %s', bundle_id)
logger_server.info('Submitting file to the notarial office.')
command = [
'xcrun', 'altool', '--notarize-app', '--verbose',
'-f', file.absolute_filepath,
'--primary-bundle-id', bundle_id,
'--username', self.config.MACOS_XCRUN_USERNAME,
'--password', self.config.MACOS_XCRUN_PASSWORD]
output = self.check_output_or_mock(
command, util.Platform.MACOS, allow_nonzero_exit_code=True)
for line in output.splitlines():
line = line.strip()
if line.startswith('RequestUUID = '):
request_uuid = line[14:]
return request_uuid
# Check whether the package has been already submitted.
if 'The software asset has already been uploaded.' in line:
request_uuid = re.sub(
'.*The upload ID is ([A-Fa-f0-9\-]+).*', '\\1', line)
logger_server.warning(
f'The package has been already submitted under UUID {request_uuid}')
return request_uuid
logger_server.error(output)
logger_server.error('xcrun command did not report RequestUUID')
return None
def notarize_review_status(self, xcrun_output: str) -> bool:
"""
Review status returned by xcrun's notarization info
Returns truth if the notarization process has finished.
If there are errors during notarization, a NotarizationException()
exception is thrown with status message from the notarial office.
"""
# Parse status and message
status = xcrun_field_value_from_output('Status', xcrun_output)
status_message = xcrun_field_value_from_output(
'Status Message', xcrun_output)
if status == 'success':
logger_server.info(
'Package successfully notarized: %s', status_message)
return True
if status == 'invalid':
logger_server.error(xcrun_output)
logger_server.error(
'Package notarization has failed: %s', status_message)
raise NotarizationException(status_message)
if status == 'in progress':
return False
logger_server.info(
'Unknown notarization status %s (%s)', status, status_message)
return False
def notarize_wait_result(self, request_uuid: str) -> None:
"""
Wait for until notarial office have a reply
"""
logger_server.info(
'Waiting for a result from the notarization office.')
command = ['xcrun', 'altool',
'--notarization-info', request_uuid,
'--username', self.config.MACOS_XCRUN_USERNAME,
'--password', self.config.MACOS_XCRUN_PASSWORD]
time_start = time.monotonic()
timeout_in_seconds = self.config.MACOS_NOTARIZE_TIMEOUT_IN_SECONDS
while True:
xcrun_output = self.check_output_or_mock(
command, util.Platform.MACOS, allow_nonzero_exit_code=True)
if self.notarize_review_status(xcrun_output):
break
logger_server.info('Keep waiting for notarization office.')
time.sleep(30)
time_slept_in_seconds = time.monotonic() - time_start
if time_slept_in_seconds > timeout_in_seconds:
logger_server.error(
"Notarial office didn't reply in %f seconds.",
timeout_in_seconds)
def notarize_staple(self, file: AbsoluteAndRelativeFileName) -> bool:
"""
Staple notarial label on the file
"""
logger_server.info('Stapling notarial stamp.')
command = ['xcrun', 'stapler', 'staple', '-v', file.absolute_filepath]
self.check_output_or_mock(command, util.Platform.MACOS)
def notarize_dmg(self, file: AbsoluteAndRelativeFileName) -> bool:
"""
Run entire pipeline to get DMG notarized.
"""
logger_server.info('Begin notarization routines on %s',
file.relative_filepath)
# Submit file for notarization.
request_uuid = self.notarize_request(file)
if not request_uuid:
return False
logger_server.info('Received Request UUID: %s', request_uuid)
# Wait for the status from the notarization office.
if not self.notarize_wait_result(request_uuid):
return False
# Staple.
self.notarize_staple(file)
def notarize_all_dmg(
self, files: List[AbsoluteAndRelativeFileName]) -> bool:
"""
Notarize all DMG images from the input.
Images are supposed to be codesigned already.
"""
for file in files:
if not file.relative_filepath.name.endswith('.dmg'):
continue
if not self.check_file_is_to_be_signed(file):
continue
self.notarize_dmg(file)
############################################################################
# Entry point.
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
# TODO(sergey): Handle errors somehow.
self.codesign_all_files(files)
self.codesign_bundles(files)
self.notarize_all_dmg(files)

View File

@@ -0,0 +1,52 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import logging.config
import sys
from pathlib import Path
from typing import Optional
import codesign.config_builder
import codesign.util as util
from codesign.base_code_signer import BaseCodeSigner
class SimpleCodeSigner:
code_signer: Optional[BaseCodeSigner]
def __init__(self):
platform = util.get_current_platform()
if platform == util.Platform.LINUX:
from codesign.linux_code_signer import LinuxCodeSigner
self.code_signer = LinuxCodeSigner(codesign.config_builder)
elif platform == util.Platform.MACOS:
from codesign.macos_code_signer import MacOSCodeSigner
self.code_signer = MacOSCodeSigner(codesign.config_builder)
elif platform == util.Platform.WINDOWS:
from codesign.windows_code_signer import WindowsCodeSigner
self.code_signer = WindowsCodeSigner(codesign.config_builder)
else:
self.code_signer = None
def sign_file_or_directory(self, path: Path) -> None:
logging.config.dictConfig(codesign.config_builder.LOGGING)
self.code_signer.run_buildbot_path_sign_pipeline(path)

View File

@@ -0,0 +1,54 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import sys
from enum import Enum
from pathlib import Path
class Platform(Enum):
LINUX = 1
MACOS = 2
WINDOWS = 3
def get_current_platform() -> Platform:
if sys.platform == 'linux':
return Platform.LINUX
elif sys.platform == 'darwin':
return Platform.MACOS
elif sys.platform == 'win32':
return Platform.WINDOWS
raise Exception(f'Unknown platform {sys.platform}')
def ensure_file_does_not_exist_or_die(filepath: Path) -> None:
"""
If the file exists, unlink it.
If the file path exists and is not a file an assert will trigger.
If the file path does not exists nothing happens.
"""
if not filepath.exists():
return
if not filepath.is_file():
# TODO(sergey): Provide information about what the filepath actually is.
raise SystemExit(f'{filepath} is expected to be a file, but is not')
filepath.unlink()

View File

@@ -0,0 +1,117 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import logging
from pathlib import Path
from typing import List
import codesign.util as util
from buildbot_utils import Builder
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
from codesign.base_code_signer import BaseCodeSigner
from codesign.exception import CodeSignException
logger = logging.getLogger(__name__)
logger_server = logger.getChild('server')
# NOTE: Check is done as filename.endswith(), so keep the dot
EXTENSIONS_TO_BE_SIGNED = {'.exe', '.dll', '.pyd', '.msi'}
BLACKLIST_FILE_PREFIXES = (
'api-ms-', 'concrt', 'msvcp', 'ucrtbase', 'vcomp', 'vcruntime')
class SigntoolException(CodeSignException):
pass
class WindowsCodeSigner(BaseCodeSigner):
def check_file_is_to_be_signed(
self, file: AbsoluteAndRelativeFileName) -> bool:
base_name = file.relative_filepath.name
if any(base_name.startswith(prefix)
for prefix in BLACKLIST_FILE_PREFIXES):
return False
return file.relative_filepath.suffix in EXTENSIONS_TO_BE_SIGNED
def get_sign_command_prefix(self) -> List[str]:
return [
'signtool', 'sign', '/v',
'/f', self.config.WIN_CERTIFICATE_FILEPATH,
'/tr', self.config.WIN_TIMESTAMP_AUTHORITY_URL]
def run_codesign_tool(self, filepath: Path) -> None:
command = self.get_sign_command_prefix() + [filepath]
try:
codesign_output = self.check_output_or_mock(command, util.Platform.WINDOWS)
except subprocess.CalledProcessError as e:
raise SigntoolException(f'Error running signtool {e}')
logger_server.info(f'signtool output:\n{codesign_output}')
got_number_of_success = False
for line in codesign_output.split('\n'):
line_clean = line.strip()
line_clean_lower = line_clean.lower()
if line_clean_lower.startswith('number of warnings') or \
line_clean_lower.startswith('number of errors'):
number = int(line_clean_lower.split(':')[1])
if number != 0:
raise SigntoolException('Non-clean success of signtool')
if line_clean_lower.startswith('number of files successfully signed'):
got_number_of_success = True
number = int(line_clean_lower.split(':')[1])
if number != 1:
raise SigntoolException('Signtool did not consider codesign a success')
if not got_number_of_success:
raise SigntoolException('Signtool did not report number of files signed')
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
# NOTE: Sign files one by one to avoid possible command line length
# overflow (which could happen if we ever decide to sign every binary
# in the install folder, for example).
#
# TODO(sergey): Consider doing batched signing of handful of files in
# one go (but only if this actually known to be much faster).
num_files = len(files)
for file_index, file in enumerate(files):
# Ignore file if it is not to be signed.
# Allows to manually construct ZIP of package and get it signed.
if not self.check_file_is_to_be_signed(file):
logger_server.info(
'Ignoring file [%d/%d] %s',
file_index + 1, num_files, file.relative_filepath)
continue
logger_server.info(
'Running signtool command for file [%d/%d] %s...',
file_index + 1, num_files, file.relative_filepath)
self.run_codesign_tool(file.absolute_filepath)

View File

@@ -0,0 +1,37 @@
#!/usr/bin/env python3
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# NOTE: This is a no-op signer (since there isn't really a procedure to sign
# Linux binaries yet). Used to debug and verify the code signing routines on
# a Linux environment.
import logging.config
from pathlib import Path
from typing import List
from codesign.linux_code_signer import LinuxCodeSigner
import codesign.config_server
if __name__ == "__main__":
logging.config.dictConfig(codesign.config_server.LOGGING)
code_signer = LinuxCodeSigner(codesign.config_server)
code_signer.run_signing_server()

View File

@@ -0,0 +1,41 @@
#!/usr/bin/env python3
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import logging.config
from pathlib import Path
from typing import List
from codesign.macos_code_signer import MacOSCodeSigner
import codesign.config_server
if __name__ == "__main__":
entitlements_file = codesign.config_server.MACOS_ENTITLEMENTS_FILE
if not entitlements_file.exists():
raise SystemExit(
'Entitlements file {entitlements_file} does not exist.')
if not entitlements_file.is_file():
raise SystemExit(
'Entitlements file {entitlements_file} is not a file.')
logging.config.dictConfig(codesign.config_server.LOGGING)
code_signer = MacOSCodeSigner(codesign.config_server)
code_signer.run_signing_server()

View File

@@ -0,0 +1,11 @@
@echo off
rem This is an entry point of the codesign server for Windows.
rem It makes sure that signtool.exe is within the current PATH and can be
rem used by the Python script.
SETLOCAL
set PATH=C:\Program Files (x86)\Windows Kits\10\App Certification Kit;%PATH%
codesign_server_windows.py

View File

@@ -0,0 +1,54 @@
#!/usr/bin/env python3
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Implementation of codesign server for Windows.
#
# NOTE: If signtool.exe is not in the PATH use codesign_server_windows.bat
import logging.config
import shutil
from pathlib import Path
from typing import List
import codesign.util as util
from codesign.windows_code_signer import WindowsCodeSigner
import codesign.config_server
if __name__ == "__main__":
logging.config.dictConfig(codesign.config_server.LOGGING)
logger = logging.getLogger(__name__)
logger_server = logger.getChild('server')
# TODO(sergey): Consider moving such sanity checks into
# CodeSigner.check_environment_or_die().
if not shutil.which('signtool.exe'):
if util.get_current_platform() == util.Platform.WINDOWS:
raise SystemExit("signtool.exe is not found in %PATH%")
logger_server.info(
'signtool.exe not found, '
'but will not be used on this foreign platform')
code_signer = WindowsCodeSigner(codesign.config_server)
code_signer.run_signing_server()

View File

@@ -0,0 +1,551 @@
#!/usr/bin/env python3
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import argparse
import re
import shutil
import subprocess
import sys
import time
from pathlib import Path
from tempfile import TemporaryDirectory, NamedTemporaryFile
from typing import List
BUILDBOT_DIRECTORY = Path(__file__).absolute().parent
CODESIGN_SCRIPT = BUILDBOT_DIRECTORY / 'worker_codesign.py'
BLENDER_GIT_ROOT_DIRECTORY = BUILDBOT_DIRECTORY.parent.parent
DARWIN_DIRECTORY = BLENDER_GIT_ROOT_DIRECTORY / 'release' / 'darwin'
# Extra size which is added on top of actual files size when estimating size
# of destination DNG.
EXTRA_DMG_SIZE_IN_BYTES = 800 * 1024 * 1024
################################################################################
# Common utilities
def get_directory_size(root_directory: Path) -> int:
"""
Get size of directory on disk
"""
total_size = 0
for file in root_directory.glob('**/*'):
total_size += file.lstat().st_size
return total_size
################################################################################
# DMG bundling specific logic
def create_argument_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
'source_dir',
type=Path,
help='Source directory which points to either existing .app bundle'
'or to a directory with .app bundles.')
parser.add_argument(
'--background-image',
type=Path,
help="Optional background picture which will be set on the DMG."
"If not provided default Blender's one is used.")
parser.add_argument(
'--volume-name',
type=str,
help='Optional name of a volume which will be used for DMG.')
parser.add_argument(
'--dmg',
type=Path,
help='Optional argument which points to a final DMG file name.')
parser.add_argument(
'--applescript',
type=Path,
help="Optional path to applescript to set up folder looks of DMG."
"If not provided default Blender's one is used.")
parser.add_argument(
'--codesign',
action="store_true",
help="Code sign and notarize DMG contents.")
return parser
def collect_app_bundles(source_dir: Path) -> List[Path]:
"""
Collect all app bundles which are to be put into DMG
If the source directory points to FOO.app it will be the only app bundle
packed.
Otherwise all .app bundles from given directory are placed to a single
DMG.
"""
if source_dir.name.endswith('.app'):
return [source_dir]
app_bundles = []
for filename in source_dir.glob('*'):
if not filename.is_dir():
continue
if not filename.name.endswith('.app'):
continue
app_bundles.append(filename)
return app_bundles
def collect_and_log_app_bundles(source_dir: Path) -> List[Path]:
app_bundles = collect_app_bundles(source_dir)
if not app_bundles:
print('No app bundles found for packing')
return
print(f'Found {len(app_bundles)} to pack:')
for app_bundle in app_bundles:
print(f'- {app_bundle}')
return app_bundles
def estimate_dmg_size(app_bundles: List[Path]) -> int:
"""
Estimate size of DMG to hold requested app bundles
The size is based on actual size of all files in all bundles plus some
space to compensate for different size-on-disk plus some space to hold
codesign signatures.
Is better to be on a high side since the empty space is compressed, but
lack of space might cause silent failures later on.
"""
app_bundles_size = 0
for app_bundle in app_bundles:
app_bundles_size += get_directory_size(app_bundle)
return app_bundles_size + EXTRA_DMG_SIZE_IN_BYTES
def copy_app_bundles_to_directory(app_bundles: List[Path],
directory: Path) -> None:
"""
Copy all bundles to a given directory
This directory is what the DMG will be created from.
"""
for app_bundle in app_bundles:
print(f'Copying {app_bundle.name}...')
shutil.copytree(app_bundle, directory / app_bundle.name)
def get_main_app_bundle(app_bundles: List[Path]) -> Path:
"""
Get application bundle main for the installation
"""
return app_bundles[0]
def create_dmg_image(app_bundles: List[Path],
dmg_filepath: Path,
volume_name: str) -> None:
"""
Create DMG disk image and put app bundles in it
No DMG configuration or codesigning is happening here.
"""
if dmg_filepath.exists():
print(f'Removing existing writable DMG {dmg_filepath}...')
dmg_filepath.unlink()
print('Preparing directory with app bundles for the DMG...')
with TemporaryDirectory(prefix='blender-dmg-content-') as content_dir_str:
# Copy all bundles to a clean directory.
content_dir = Path(content_dir_str)
copy_app_bundles_to_directory(app_bundles, content_dir)
# Estimate size of the DMG.
dmg_size = estimate_dmg_size(app_bundles)
print(f'Estimated DMG size: {dmg_size:,} bytes.')
# Create the DMG.
print(f'Creating writable DMG {dmg_filepath}')
command = ('hdiutil',
'create',
'-size', str(dmg_size),
'-fs', 'HFS+',
'-srcfolder', content_dir,
'-volname', volume_name,
'-format', 'UDRW',
dmg_filepath)
subprocess.run(command)
def get_writable_dmg_filepath(dmg_filepath: Path):
"""
Get file path for writable DMG image
"""
parent = dmg_filepath.parent
return parent / (dmg_filepath.stem + '-temp.dmg')
def mount_readwrite_dmg(dmg_filepath: Path) -> None:
"""
Mount writable DMG
Mounting point would be /Volumes/<volume name>
"""
print(f'Mounting read-write DMG ${dmg_filepath}')
command = ('hdiutil',
'attach', '-readwrite',
'-noverify',
'-noautoopen',
dmg_filepath)
subprocess.run(command)
def get_mount_directory_for_volume_name(volume_name: str) -> Path:
"""
Get directory under which the volume will be mounted
"""
return Path('/Volumes') / volume_name
def eject_volume(volume_name: str) -> None:
"""
Eject given volume, if mounted
"""
mount_directory = get_mount_directory_for_volume_name(volume_name)
if not mount_directory.exists():
return
mount_directory_str = str(mount_directory)
print(f'Ejecting volume {volume_name}')
# Figure out which device to eject.
mount_output = subprocess.check_output(['mount']).decode()
device = ''
for line in mount_output.splitlines():
if f'on {mount_directory_str} (' not in line:
continue
tokens = line.split(' ', 3)
if len(tokens) < 3:
continue
if tokens[1] != 'on':
continue
if device:
raise Exception(
f'Multiple devices found for mounting point {mount_directory}')
device = tokens[0]
if not device:
raise Exception(
f'No device found for mounting point {mount_directory}')
print(f'{mount_directory} is mounted as device {device}, ejecting...')
subprocess.run(['diskutil', 'eject', device])
def copy_background_if_needed(background_image_filepath: Path,
mount_directory: Path) -> None:
"""
Copy background to the DMG
If the background image is not specified it will not be copied.
"""
if not background_image_filepath:
print('No background image provided.')
return
print(f'Copying background image {background_image_filepath}')
destination_dir = mount_directory / '.background'
destination_dir.mkdir(exist_ok=True)
destination_filepath = destination_dir / background_image_filepath.name
shutil.copy(background_image_filepath, destination_filepath)
def create_applications_link(mount_directory: Path) -> None:
"""
Create link to /Applications in the given location
"""
print('Creating link to /Applications')
command = ('ln', '-s', '/Applications', mount_directory / ' ')
subprocess.run(command)
def run_applescript(applescript: Path,
volume_name: str,
app_bundles: List[Path],
background_image_filepath: Path) -> None:
"""
Run given applescript to adjust look and feel of the DMG
"""
main_app_bundle = get_main_app_bundle(app_bundles)
with NamedTemporaryFile(
mode='w', suffix='.applescript') as temp_applescript:
print('Adjusting applescript for volume name...')
# Adjust script to the specific volume name.
with open(applescript, mode='r') as input:
for line in input.readlines():
stripped_line = line.strip()
if stripped_line.startswith('tell disk'):
line = re.sub('tell disk ".*"',
f'tell disk "{volume_name}"',
line)
elif stripped_line.startswith('set background picture'):
if not background_image_filepath:
continue
else:
background_image_short = \
'.background:' + background_image_filepath.name
line = re.sub('to file ".*"',
f'to file "{background_image_short}"',
line)
line = line.replace('blender.app', main_app_bundle.name)
temp_applescript.write(line)
temp_applescript.flush()
print('Running applescript...')
command = ('osascript', temp_applescript.name)
subprocess.run(command)
print('Waiting for applescript...')
# NOTE: This is copied from bundle.sh. The exact reason for sleep is
# still remained a mystery.
time.sleep(5)
def codesign(subject: Path):
"""
Codesign file or directory
NOTE: For DMG it will also notarize.
"""
command = (CODESIGN_SCRIPT, subject)
subprocess.run(command)
def codesign_app_bundles_in_dmg(mount_directory: str) -> None:
"""
Code sign all binaries and bundles in the mounted directory
"""
print(f'Codesigning all app bundles in {mount_directory}')
codesign(mount_directory)
def codesign_and_notarize_dmg(dmg_filepath: Path) -> None:
"""
Run codesign and notarization pipeline on the DMG
"""
print(f'Codesigning and notarizing DMG {dmg_filepath}')
codesign(dmg_filepath)
def compress_dmg(writable_dmg_filepath: Path,
final_dmg_filepath: Path) -> None:
"""
Compress temporary read-write DMG
"""
command = ('hdiutil', 'convert',
writable_dmg_filepath,
'-format', 'UDZO',
'-o', final_dmg_filepath)
if final_dmg_filepath.exists():
print(f'Removing old compressed DMG {final_dmg_filepath}')
final_dmg_filepath.unlink()
print('Compressing disk image...')
subprocess.run(command)
def create_final_dmg(app_bundles: List[Path],
dmg_filepath: Path,
background_image_filepath: Path,
volume_name: str,
applescript: Path,
codesign: bool) -> None:
"""
Create DMG with all app bundles
Will take care configuring background, signing all binaries and app bundles
and notarizing the DMG.
"""
print('Running all routines to create final DMG')
writable_dmg_filepath = get_writable_dmg_filepath(dmg_filepath)
mount_directory = get_mount_directory_for_volume_name(volume_name)
# Make sure volume is not mounted.
# If it is mounted it will prevent removing old DMG files and could make
# it so app bundles are copied to the wrong place.
eject_volume(volume_name)
create_dmg_image(app_bundles, writable_dmg_filepath, volume_name)
mount_readwrite_dmg(writable_dmg_filepath)
# Run codesign first, prior to copying amything else.
#
# This allows to recurs into the content of bundles without worrying about
# possible interfereice of Application symlink.
if codesign:
codesign_app_bundles_in_dmg(mount_directory)
copy_background_if_needed(background_image_filepath, mount_directory)
create_applications_link(mount_directory)
run_applescript(applescript, volume_name, app_bundles,
background_image_filepath)
print('Ejecting read-write DMG image...')
eject_volume(volume_name)
compress_dmg(writable_dmg_filepath, dmg_filepath)
writable_dmg_filepath.unlink()
if codesign:
codesign_and_notarize_dmg(dmg_filepath)
def ensure_dmg_extension(filepath: Path) -> Path:
"""
Make sure given file have .dmg extension
"""
if filepath.suffix != '.dmg':
return filepath.with_suffix(f'{filepath.suffix}.dmg')
return filepath
def get_dmg_filepath(requested_name: Path, app_bundles: List[Path]) -> Path:
"""
Get full file path for the final DMG image
Will use the provided one when possible, otherwise will deduct it from
app bundles.
If the name is deducted, the DMG is stored in the current directory.
"""
if requested_name:
return ensure_dmg_extension(requested_name.absolute())
# TODO(sergey): This is not necessarily the main one.
main_bundle = app_bundles[0]
# Strip .app from the name
return Path(main_bundle.name[:-4] + '.dmg').absolute()
def get_background_image(requested_background_image: Path) -> Path:
"""
Get effective filepath for the background image
"""
if requested_background_image:
return requested_background_image.absolute()
return DARWIN_DIRECTORY / 'background.tif'
def get_applescript(requested_applescript: Path) -> Path:
"""
Get effective filepath for the applescript
"""
if requested_applescript:
return requested_applescript.absolute()
return DARWIN_DIRECTORY / 'blender.applescript'
def get_volume_name_from_dmg_filepath(dmg_filepath: Path) -> str:
"""
Deduct volume name from the DMG path
Will use first part of the DMG file name prior to dash.
"""
tokens = dmg_filepath.stem.split('-')
words = tokens[0].split()
return ' '.join(word.capitalize() for word in words)
def get_volume_name(requested_volume_name: str,
dmg_filepath: Path) -> str:
"""
Get effective name for DMG volume
"""
if requested_volume_name:
return requested_volume_name
return get_volume_name_from_dmg_filepath(dmg_filepath)
def main():
parser = create_argument_parser()
args = parser.parse_args()
# Get normalized input parameters.
source_dir = args.source_dir.absolute()
background_image_filepath = get_background_image(args.background_image)
applescript = get_applescript(args.applescript)
codesign = args.codesign
app_bundles = collect_and_log_app_bundles(source_dir)
if not app_bundles:
return
dmg_filepath = get_dmg_filepath(args.dmg, app_bundles)
volume_name = get_volume_name(args.volume_name, dmg_filepath)
print(f'Will produce DMG "{dmg_filepath.name}" (without quotes)')
create_final_dmg(app_bundles,
dmg_filepath,
background_image_filepath,
volume_name,
applescript,
codesign)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,44 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# This is a script which is used as POST-INSTALL one for regular CMake's
# INSTALL target.
# It is used by buildbot workers to sign every binary which is going into
# the final buundle.
# On Windows Python 3 there only is python.exe, no python3.exe.
#
# On other platforms it is possible to have python2 and python3, and a
# symbolic link to python to either of them. So on those platforms use
# an explicit Python version.
if(WIN32)
set(PYTHON_EXECUTABLE python)
else()
set(PYTHON_EXECUTABLE python3)
endif()
execute_process(
COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_LIST_DIR}/worker_codesign.py"
"${CMAKE_INSTALL_PREFIX}"
WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}
RESULT_VARIABLE exit_code
)
if(NOT exit_code EQUAL "0")
message(FATAL_ERROR "Non-zero exit code of codesign tool")
endif()

View File

@@ -0,0 +1,74 @@
#!/usr/bin/env python3
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Helper script which takes care of signing provided location.
#
# The location can either be a directory (in which case all eligible binaries
# will be signed) or a single file (in which case a single file will be signed).
#
# This script takes care of all the complexity of communicating between process
# which requests file to be signed and the code signing server.
#
# NOTE: Signing happens in-place.
import argparse
import sys
from pathlib import Path
from codesign.simple_code_signer import SimpleCodeSigner
def create_argument_parser():
parser = argparse.ArgumentParser()
parser.add_argument('path_to_sign', type=Path)
return parser
def main():
parser = create_argument_parser()
args = parser.parse_args()
path_to_sign = args.path_to_sign.absolute()
if sys.platform == 'win32':
# When WIX packed is used to generate .msi on Windows the CPack will
# install two different projects and install them to different
# installation prefix:
#
# - C:\b\build\_CPack_Packages\WIX\Blender
# - C:\b\build\_CPack_Packages\WIX\Unspecified
#
# Annoying part is: CMake's post-install script will only be run
# once, with the install prefix which corresponds to a project which
# was installed last. But we want to sign binaries from all projects.
# So in order to do so we detect that we are running for a CPack's
# project used for WIX and force parent directory (which includes both
# projects) to be signed.
#
# Here we force both projects to be signed.
if path_to_sign.name == 'Unspecified' and 'WIX' in str(path_to_sign):
path_to_sign = path_to_sign.parent
code_signer = SimpleCodeSigner()
code_signer.sign_file_or_directory(path_to_sign)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,135 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import os
import shutil
import buildbot_utils
def get_cmake_options(builder):
codesign_script = os.path.join(
builder.blender_dir, 'build_files', 'buildbot', 'worker_codesign.cmake')
config_file = "build_files/cmake/config/blender_release.cmake"
options = ['-DCMAKE_BUILD_TYPE:STRING=Release',
'-DWITH_GTESTS=ON']
if builder.platform == 'mac':
options.append('-DCMAKE_OSX_ARCHITECTURES:STRING=x86_64')
options.append('-DCMAKE_OSX_DEPLOYMENT_TARGET=10.9')
elif builder.platform == 'win':
options.extend(['-G', 'Visual Studio 16 2019', '-A', 'x64'])
if builder.codesign:
options.extend(['-DPOSTINSTALL_SCRIPT:PATH=' + codesign_script])
elif builder.platform == 'linux':
config_file = "build_files/buildbot/config/blender_linux.cmake"
optix_sdk_dir = os.path.join(builder.blender_dir, '..', '..', 'NVIDIA-Optix-SDK-7.1')
options.append('-DOPTIX_ROOT_DIR:PATH=' + optix_sdk_dir)
# Workaround to build sm_30 kernels with CUDA 10, since CUDA 11 no longer supports that architecture
if builder.platform == 'win':
options.append('-DCUDA10_TOOLKIT_ROOT_DIR:PATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.1')
options.append('-DCUDA10_NVCC_EXECUTABLE:FILEPATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.1/bin/nvcc.exe')
options.append('-DCUDA11_TOOLKIT_ROOT_DIR:PATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.1')
options.append('-DCUDA11_NVCC_EXECUTABLE:FILEPATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.1/bin/nvcc.exe')
elif builder.platform == 'linux':
options.append('-DCUDA10_TOOLKIT_ROOT_DIR:PATH=/usr/local/cuda-10.1')
options.append('-DCUDA10_NVCC_EXECUTABLE:FILEPATH=/usr/local/cuda-10.1/bin/nvcc')
options.append('-DCUDA11_TOOLKIT_ROOT_DIR:PATH=/usr/local/cuda-11.1')
options.append('-DCUDA11_NVCC_EXECUTABLE:FILEPATH=/usr/local/cuda-11.1/bin/nvcc')
options.append("-C" + os.path.join(builder.blender_dir, config_file))
options.append("-DCMAKE_INSTALL_PREFIX=%s" % (builder.install_dir))
return options
def update_git(builder):
# Do extra git fetch because not all platform/git/buildbot combinations
# update the origin remote, causing buildinfo to detect local changes.
os.chdir(builder.blender_dir)
print("Fetching remotes")
command = ['git', 'fetch', '--all']
buildbot_utils.call(builder.command_prefix + command)
def clean_directories(builder):
# Make sure no garbage remained from the previous run
if os.path.isdir(builder.install_dir):
shutil.rmtree(builder.install_dir)
# Make sure build directory exists and enter it
os.makedirs(builder.build_dir, exist_ok=True)
# Remove buildinfo files to force buildbot to re-generate them.
for buildinfo in ('buildinfo.h', 'buildinfo.h.txt', ):
full_path = os.path.join(builder.build_dir, 'source', 'creator', buildinfo)
if os.path.exists(full_path):
print("Removing {}" . format(buildinfo))
os.remove(full_path)
def cmake_configure(builder):
# CMake configuration
os.chdir(builder.build_dir)
cmake_cache = os.path.join(builder.build_dir, 'CMakeCache.txt')
if os.path.exists(cmake_cache):
print("Removing CMake cache")
os.remove(cmake_cache)
print("CMake configure:")
cmake_options = get_cmake_options(builder)
command = ['cmake', builder.blender_dir] + cmake_options
buildbot_utils.call(builder.command_prefix + command)
def cmake_build(builder):
# CMake build
os.chdir(builder.build_dir)
# NOTE: CPack will build an INSTALL target, which would mean that code
# signing will happen twice when using `make install` and CPack.
# The tricky bit here is that it is not possible to know whether INSTALL
# target is used by CPack or by a buildbot itaself. Extra level on top of
# this is that on Windows it is required to build INSTALL target in order
# to have unit test binaries to run.
# So on the one hand we do an extra unneeded code sign on Windows, but on
# a positive side we don't add complexity and don't make build process more
# fragile trying to avoid this. The signing process is way faster than just
# a clean build of buildbot, especially with regression tests enabled.
if builder.platform == 'win':
command = ['cmake', '--build', '.', '--target', 'install', '--config', 'Release']
else:
command = ['make', '-s', '-j16', 'install']
print("CMake build:")
buildbot_utils.call(builder.command_prefix + command)
if __name__ == "__main__":
builder = buildbot_utils.create_builder_from_arguments()
update_git(builder)
clean_directories(builder)
cmake_configure(builder)
cmake_build(builder)

View File

@@ -0,0 +1,208 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Runs on buildbot worker, creating a release package using the build
# system and zipping it into buildbot_upload.zip. This is then uploaded
# to the master in the next buildbot step.
import os
import sys
from pathlib import Path
import buildbot_utils
def get_package_name(builder, platform=None):
info = buildbot_utils.VersionInfo(builder)
package_name = 'blender-' + info.full_version
if platform:
package_name += '-' + platform
if not (builder.branch == 'master' or builder.is_release_branch):
if info.is_development_build:
package_name = builder.branch + "-" + package_name
return package_name
def sign_file_or_directory(path):
from codesign.simple_code_signer import SimpleCodeSigner
code_signer = SimpleCodeSigner()
code_signer.sign_file_or_directory(Path(path))
def create_buildbot_upload_zip(builder, package_files):
import zipfile
buildbot_upload_zip = os.path.join(builder.upload_dir, "buildbot_upload.zip")
if os.path.exists(buildbot_upload_zip):
os.remove(buildbot_upload_zip)
try:
z = zipfile.ZipFile(buildbot_upload_zip, "w", compression=zipfile.ZIP_STORED)
for filepath, filename in package_files:
print("Packaged", filename)
z.write(filepath, arcname=filename)
z.close()
except Exception as ex:
sys.stderr.write('Create buildbot_upload.zip failed: ' + str(ex) + '\n')
sys.exit(1)
def create_tar_xz(src, dest, package_name):
# One extra to remove leading os.sep when cleaning root for package_root
ln = len(src) + 1
flist = list()
# Create list of tuples containing file and archive name
for root, dirs, files in os.walk(src):
package_root = os.path.join(package_name, root[ln:])
flist.extend([(os.path.join(root, file), os.path.join(package_root, file)) for file in files])
import tarfile
# Set UID/GID of archived files to 0, otherwise they'd be owned by whatever
# user compiled the package. If root then unpacks it to /usr/local/ you get
# a security issue.
def _fakeroot(tarinfo):
tarinfo.gid = 0
tarinfo.gname = "root"
tarinfo.uid = 0
tarinfo.uname = "root"
return tarinfo
package = tarfile.open(dest, 'w:xz', preset=9)
for entry in flist:
package.add(entry[0], entry[1], recursive=False, filter=_fakeroot)
package.close()
def cleanup_files(dirpath, extension):
for f in os.listdir(dirpath):
filepath = os.path.join(dirpath, f)
if os.path.isfile(filepath) and f.endswith(extension):
os.remove(filepath)
def pack_mac(builder):
info = buildbot_utils.VersionInfo(builder)
os.chdir(builder.build_dir)
cleanup_files(builder.build_dir, '.dmg')
package_name = get_package_name(builder, 'macOS')
package_filename = package_name + '.dmg'
package_filepath = os.path.join(builder.build_dir, package_filename)
release_dir = os.path.join(builder.blender_dir, 'release', 'darwin')
buildbot_dir = os.path.join(builder.blender_dir, 'build_files', 'buildbot')
bundle_script = os.path.join(buildbot_dir, 'worker_bundle_dmg.py')
command = [bundle_script]
command += ['--dmg', package_filepath]
if info.is_development_build:
background_image = os.path.join(release_dir, 'buildbot', 'background.tif')
command += ['--background-image', background_image]
if builder.codesign:
command += ['--codesign']
command += [builder.install_dir]
buildbot_utils.call(command)
create_buildbot_upload_zip(builder, [(package_filepath, package_filename)])
def pack_win(builder):
info = buildbot_utils.VersionInfo(builder)
os.chdir(builder.build_dir)
cleanup_files(builder.build_dir, '.zip')
# CPack will add the platform name
cpack_name = get_package_name(builder, None)
package_name = get_package_name(builder, 'windows' + str(builder.bits))
command = ['cmake', '-DCPACK_OVERRIDE_PACKAGENAME:STRING=' + cpack_name, '.']
buildbot_utils.call(builder.command_prefix + command)
command = ['cpack', '-G', 'ZIP']
buildbot_utils.call(builder.command_prefix + command)
package_filename = package_name + '.zip'
package_filepath = os.path.join(builder.build_dir, package_filename)
package_files = [(package_filepath, package_filename)]
if info.version_cycle == 'release':
# Installer only for final release builds, otherwise will get
# 'this product is already installed' messages.
command = ['cpack', '-G', 'WIX']
buildbot_utils.call(builder.command_prefix + command)
package_filename = package_name + '.msi'
package_filepath = os.path.join(builder.build_dir, package_filename)
if builder.codesign:
sign_file_or_directory(package_filepath)
package_files += [(package_filepath, package_filename)]
create_buildbot_upload_zip(builder, package_files)
def pack_linux(builder):
blender_executable = os.path.join(builder.install_dir, 'blender')
info = buildbot_utils.VersionInfo(builder)
# Strip all unused symbols from the binaries
print("Stripping binaries...")
buildbot_utils.call(builder.command_prefix + ['strip', '--strip-all', blender_executable])
print("Stripping python...")
py_target = os.path.join(builder.install_dir, info.short_version)
buildbot_utils.call(
builder.command_prefix + [
'find', py_target, '-iname', '*.so', '-exec', 'strip', '-s', '{}', ';',
],
)
# Construct package name
platform_name = 'linux64'
package_name = get_package_name(builder, platform_name)
package_filename = package_name + ".tar.xz"
print("Creating .tar.xz archive")
package_filepath = builder.install_dir + '.tar.xz'
create_tar_xz(builder.install_dir, package_filepath, package_name)
# Create buildbot_upload.zip
create_buildbot_upload_zip(builder, [(package_filepath, package_filename)])
if __name__ == "__main__":
builder = buildbot_utils.create_builder_from_arguments()
# Make sure install directory always exists
os.makedirs(builder.install_dir, exist_ok=True)
if builder.platform == 'mac':
pack_mac(builder)
elif builder.platform == 'win':
pack_win(builder)
elif builder.platform == 'linux':
pack_linux(builder)

View File

@@ -0,0 +1,42 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import buildbot_utils
import os
import sys
def get_ctest_arguments(builder):
args = ['--output-on-failure']
if builder.platform == 'win':
args += ['-C', 'Release']
return args
def test(builder):
os.chdir(builder.build_dir)
command = builder.command_prefix + ['ctest'] + get_ctest_arguments(builder)
buildbot_utils.call(command)
if __name__ == "__main__":
builder = buildbot_utils.create_builder_from_arguments()
test(builder)

View File

@@ -0,0 +1,31 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import buildbot_utils
import os
import sys
if __name__ == "__main__":
builder = buildbot_utils.create_builder_from_arguments()
os.chdir(builder.blender_dir)
# Run make update which handles all libraries and submodules.
make_update = os.path.join(builder.blender_dir, "build_files", "utils", "make_update.py")
buildbot_utils.call([sys.executable, make_update, '--no-blender', "--use-tests", "--use-centos-libraries"])

View File

@@ -20,24 +20,8 @@ if(NOT CLANG_ROOT_DIR AND NOT $ENV{CLANG_ROOT_DIR} STREQUAL "")
set(CLANG_ROOT_DIR $ENV{CLANG_ROOT_DIR})
endif()
if(NOT LLVM_ROOT_DIR)
if(DEFINED LLVM_VERSION)
message(running llvm-config-${LLVM_VERSION})
find_program(LLVM_CONFIG llvm-config-${LLVM_VERSION})
endif()
if(NOT LLVM_CONFIG)
find_program(LLVM_CONFIG llvm-config)
endif()
execute_process(COMMAND ${LLVM_CONFIG} --prefix
OUTPUT_VARIABLE LLVM_ROOT_DIR
OUTPUT_STRIP_TRAILING_WHITESPACE)
set(LLVM_ROOT_DIR ${LLVM_ROOT_DIR} CACHE PATH "Path to the LLVM installation")
endif()
set(_CLANG_SEARCH_DIRS
${CLANG_ROOT_DIR}
${LLVM_ROOT_DIR}
/opt/lib/clang
)

View File

@@ -472,7 +472,8 @@ if(NOT GFLAGS_FOUND)
gflags_report_not_found(
"Could not find gflags include directory, set GFLAGS_INCLUDE_DIR "
"to directory containing gflags/gflags.h")
endif()
endif(NOT GFLAGS_INCLUDE_DIR OR
NOT EXISTS ${GFLAGS_INCLUDE_DIR})
find_library(GFLAGS_LIBRARY NAMES gflags
PATHS ${GFLAGS_LIBRARY_DIR_HINTS}
@@ -483,7 +484,8 @@ if(NOT GFLAGS_FOUND)
gflags_report_not_found(
"Could not find gflags library, set GFLAGS_LIBRARY "
"to full path to libgflags.")
endif()
endif(NOT GFLAGS_LIBRARY OR
NOT EXISTS ${GFLAGS_LIBRARY})
# gflags typically requires a threading library (which is OS dependent), note
# that this defines the CMAKE_THREAD_LIBS_INIT variable. If we are able to
@@ -558,7 +560,8 @@ if(NOT GFLAGS_FOUND)
gflags_report_not_found(
"Caller defined GFLAGS_INCLUDE_DIR:"
" ${GFLAGS_INCLUDE_DIR} does not contain gflags/gflags.h header.")
endif()
endif(GFLAGS_INCLUDE_DIR AND
NOT EXISTS ${GFLAGS_INCLUDE_DIR}/gflags/gflags.h)
# TODO: This regex for gflags library is pretty primitive, we use lowercase
# for comparison to handle Windows using CamelCase library names, could
# this check be better?
@@ -568,7 +571,8 @@ if(NOT GFLAGS_FOUND)
gflags_report_not_found(
"Caller defined GFLAGS_LIBRARY: "
"${GFLAGS_LIBRARY} does not match gflags.")
endif()
endif(GFLAGS_LIBRARY AND
NOT "${LOWERCASE_GFLAGS_LIBRARY}" MATCHES ".*gflags[^/]*")
gflags_reset_find_library_prefix()

View File

@@ -40,7 +40,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(NanoVDB DEFAULT_MSG
IF(NANOVDB_FOUND)
SET(NANOVDB_INCLUDE_DIRS ${NANOVDB_INCLUDE_DIR})
ENDIF()
ENDIF(NANOVDB_FOUND)
MARK_AS_ADVANCED(
NANOVDB_INCLUDE_DIR

View File

@@ -46,7 +46,7 @@ SET(_opencollada_FIND_COMPONENTS
)
# Fedora openCOLLADA package links these statically
# note that order is important here or it won't link
# note that order is important here ot it wont link
SET(_opencollada_FIND_STATIC_COMPONENTS
buffer
ftoa

View File

@@ -44,7 +44,7 @@ SET(PYTHON_LINKFLAGS "-Xlinker -export-dynamic" CACHE STRING "Linker flags for p
MARK_AS_ADVANCED(PYTHON_LINKFLAGS)
# if the user passes these defines as args, we don't want to overwrite
# if the user passes these defines as args, we dont want to overwrite
SET(_IS_INC_DEF OFF)
SET(_IS_INC_CONF_DEF OFF)
SET(_IS_LIB_DEF OFF)
@@ -143,7 +143,7 @@ IF((NOT _IS_INC_DEF) OR (NOT _IS_INC_CONF_DEF) OR (NOT _IS_LIB_DEF) OR (NOT _IS_
SET(_PYTHON_ABI_FLAGS "${_CURRENT_ABI_FLAGS}")
break()
ELSE()
# ensure we don't find values from 2 different ABI versions
# ensure we dont find values from 2 different ABI versions
IF(NOT _IS_INC_DEF)
UNSET(PYTHON_INCLUDE_DIR CACHE)
ENDIF()

View File

@@ -40,7 +40,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(sse2neon DEFAULT_MSG
IF(SSE2NEON_FOUND)
SET(SSE2NEON_INCLUDE_DIRS ${SSE2NEON_INCLUDE_DIR})
ENDIF()
ENDIF(SSE2NEON_FOUND)
MARK_AS_ADVANCED(
SSE2NEON_INCLUDE_DIR

View File

@@ -79,7 +79,7 @@ if(EXISTS ${SOURCE_DIR}/.git)
ERROR_QUIET)
if(NOT _git_below_check STREQUAL "")
# If there're commits between HEAD and upstream this means
# that we're reset-ed to older revision. Use its hash then.
# that we're reset-ed to older revision. Use it's hash then.
execute_process(COMMAND git rev-parse --short=12 HEAD
WORKING_DIRECTORY ${SOURCE_DIR}
OUTPUT_VARIABLE MY_WC_HASH

View File

@@ -305,7 +305,7 @@ def file_check_arg_sizes(tu):
for i, node_child in enumerate(children):
children = list(node_child.get_children())
# skip if we don't have an index...
# skip if we dont have an index...
size_def = args_size_definition.get(i, -1)
if size_def == -1:
@@ -354,7 +354,7 @@ def file_check_arg_sizes(tu):
filepath # always the same but useful when running threaded
))
# we don't really care what we are looking at, just scan entire file for
# we dont really care what we are looking at, just scan entire file for
# function calls.
def recursive_func_call_check(node):

View File

@@ -56,6 +56,10 @@ set(WITH_TBB ON CACHE BOOL "" FORCE)
set(WITH_USD ON CACHE BOOL "" FORCE)
set(WITH_MEM_JEMALLOC ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_CUDA_BINARIES ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_CUBIN_COMPILER OFF CACHE BOOL "" FORCE)
set(CYCLES_CUDA_BINARIES_ARCH sm_30;sm_35;sm_37;sm_50;sm_52;sm_60;sm_61;sm_70;sm_75;sm_86;compute_75 CACHE STRING "" FORCE)
set(WITH_CYCLES_DEVICE_OPTIX ON CACHE BOOL "" FORCE)
# platform dependent options
if(APPLE)
@@ -76,8 +80,4 @@ if(UNIX AND NOT APPLE)
endif()
if(NOT APPLE)
set(WITH_XR_OPENXR ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_DEVICE_OPTIX ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_CUDA_BINARIES ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_CUBIN_COMPILER OFF CACHE BOOL "" FORCE)
endif()

View File

@@ -596,6 +596,14 @@ function(SETUP_LIBDIRS)
link_directories(${GMP_LIBPATH})
endif()
if(WITH_GHOST_WAYLAND)
link_directories(
${wayland-client_LIBRARY_DIRS}
${wayland-egl_LIBRARY_DIRS}
${xkbcommon_LIBRARY_DIRS}
${wayland-cursor_LIBRARY_DIRS})
endif()
if(WIN32 AND NOT UNIX)
link_directories(${PTHREADS_LIBPATH})
endif()
@@ -694,7 +702,7 @@ macro(message_first_run)
endmacro()
# when we have warnings as errors applied globally this
# needs to be removed for some external libs which we don't maintain.
# needs to be removed for some external libs which we dont maintain.
# utility macro
macro(remove_cc_flag
@@ -794,7 +802,7 @@ macro(remove_extra_strict_flags)
endmacro()
# note, we can only append flags on a single file so we need to negate the options.
# at the moment we can't shut up ffmpeg deprecations, so use this, but will
# at the moment we cant shut up ffmpeg deprecations, so use this, but will
# probably add more removals here.
macro(remove_strict_c_flags_file
filenames)
@@ -963,6 +971,14 @@ macro(blender_project_hack_post)
unset(_reset_standard_cflags_rel)
unset(_reset_standard_cxxflags_rel)
# ------------------------------------------------------------------
# workaround for omission in cmake 2.8.4's GNU.cmake, fixed in 2.8.5
if(CMAKE_COMPILER_IS_GNUCC)
if(NOT DARWIN)
set(CMAKE_INCLUDE_SYSTEM_FLAG_C "-isystem ")
endif()
endif()
endmacro()
# pair of macros to allow libraries to be specify files to install, but to

View File

@@ -104,8 +104,8 @@ if(WIN32)
set(CPACK_WIX_LIGHT_EXTRA_FLAGS -dcl:medium)
endif()
set(CPACK_PACKAGE_EXECUTABLES "blender-launcher" "blender")
set(CPACK_CREATE_DESKTOP_LINKS "blender-launcher" "blender")
set(CPACK_PACKAGE_EXECUTABLES "blender" "blender")
set(CPACK_CREATE_DESKTOP_LINKS "blender" "blender")
include(CPack)

View File

@@ -20,6 +20,12 @@
# Libraries configuration for Apple.
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
set(MACOSX_DEPLOYMENT_TARGET 11.00)
else()
set(MACOSX_DEPLOYMENT_TARGET 10.13)
endif()
macro(find_package_wrapper)
# do nothing, just satisfy the macro
endmacro()
@@ -388,10 +394,6 @@ endif()
if(WITH_TBB)
find_package(TBB)
if(NOT TBB_FOUND)
message(WARNING "TBB not found, disabling WITH_TBB")
set(WITH_TBB OFF)
endif()
endif()
if(WITH_POTRACE)

View File

@@ -168,15 +168,21 @@ endif()
unset(OSX_SDKROOT)
# 10.13 is our min. target, if you use higher sdk, weak linking happens
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
# M1 chips run Big Sur onwards.
set(OSX_MIN_DEPLOYMENT_TARGET 11.00)
else()
# 10.13 is our min. target, if you use higher sdk, weak linking happens
set(OSX_MIN_DEPLOYMENT_TARGET 10.13)
endif()
set(CMAKE_OSX_DEPLOYMENT_TARGET "${OSX_MIN_DEPLOYMENT_TARGET}" CACHE STRING "" FORCE)
if(CMAKE_OSX_DEPLOYMENT_TARGET)
if(${CMAKE_OSX_DEPLOYMENT_TARGET} VERSION_LESS ${OSX_MIN_DEPLOYMENT_TARGET})
message(STATUS "Setting deployment target to ${OSX_MIN_DEPLOYMENT_TARGET}, lower versions are not supported")
set(CMAKE_OSX_DEPLOYMENT_TARGET "${OSX_MIN_DEPLOYMENT_TARGET}" CACHE STRING "" FORCE)
endif()
else()
set(CMAKE_OSX_DEPLOYMENT_TARGET "${OSX_MIN_DEPLOYMENT_TARGET}" CACHE STRING "" FORCE)
endif()
if(NOT ${CMAKE_GENERATOR} MATCHES "Xcode")
# Force CMAKE_OSX_DEPLOYMENT_TARGET for makefiles, will not work else (CMake bug?)

View File

@@ -457,10 +457,6 @@ endif()
if(WITH_TBB)
find_package_wrapper(TBB)
if(NOT TBB_FOUND)
message(WARNING "TBB not found, disabling WITH_TBB")
set(WITH_TBB OFF)
endif()
endif()
if(WITH_XR_OPENXR)
@@ -579,17 +575,17 @@ if(WITH_GHOST_WAYLAND)
pkg_check_modules(wayland-scanner REQUIRED wayland-scanner)
pkg_check_modules(xkbcommon REQUIRED xkbcommon)
pkg_check_modules(wayland-cursor REQUIRED wayland-cursor)
pkg_check_modules(dbus REQUIRED dbus-1)
set(WITH_GL_EGL ON)
list(APPEND PLATFORM_LINKLIBS
${wayland-client_LINK_LIBRARIES}
${wayland-egl_LINK_LIBRARIES}
${xkbcommon_LINK_LIBRARIES}
${wayland-cursor_LINK_LIBRARIES}
${dbus_LINK_LIBRARIES}
)
if(WITH_GHOST_WAYLAND)
list(APPEND PLATFORM_LINKLIBS
${wayland-client_LIBRARIES}
${wayland-egl_LIBRARIES}
${xkbcommon_LIBRARIES}
${wayland-cursor_LIBRARIES}
)
endif()
endif()
if(WITH_GHOST_X11)

View File

@@ -119,7 +119,7 @@ string(APPEND CMAKE_MODULE_LINKER_FLAGS " /SAFESEH:NO /ignore:4099")
list(APPEND PLATFORM_LINKLIBS
ws2_32 vfw32 winmm kernel32 user32 gdi32 comdlg32 Comctl32 version
advapi32 shfolder shell32 ole32 oleaut32 uuid psapi Dbghelp Shlwapi
pathcch Shcore
pathcch
)
if(WITH_INPUT_IME)
@@ -144,8 +144,8 @@ add_definitions(-D_ALLOW_KEYWORD_MACROS)
# that both /GR and /GR- are specified.
remove_cc_flag("/GR")
# Make the Windows 8.1 API available for use.
add_definitions(-D_WIN32_WINNT=0x603)
# We want to support Windows 7 level ABI
add_definitions(-D_WIN32_WINNT=0x601)
include(build_files/cmake/platform/platform_win32_bundle_crt.cmake)
remove_cc_flag("/MDd" "/MD" "/Zi")
@@ -261,10 +261,8 @@ if(NOT DEFINED LIBDIR)
else()
message(FATAL_ERROR "32 bit compiler detected, blender no longer provides pre-build libraries for 32 bit windows, please set the LIBDIR cmake variable to your own library folder")
endif()
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 19.29.30130)
message(STATUS "Visual Studio 2022 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
elseif(MSVC_VERSION GREATER 1919)
# Can be 1910..1912
if(MSVC_VERSION GREATER 1919)
message(STATUS "Visual Studio 2019 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
elseif(MSVC_VERSION GREATER 1909)
@@ -550,6 +548,7 @@ if(WITH_OPENIMAGEIO)
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0")
set(OPENCOLORIO_DEFINITIONS "-DDOpenColorIO_SKIP_IMPORTS")
set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe")
add_definitions(-DOIIO_STATIC_DEFINE)
add_definitions(-DOIIO_NO_SSE=1)
@@ -595,7 +594,7 @@ if(WITH_OPENCOLORIO)
debug ${OPENCOLORIO_LIBPATH}/libexpatdMD.lib
debug ${OPENCOLORIO_LIBPATH}/pystring_d.lib
)
set(OPENCOLORIO_DEFINITIONS "-DOpenColorIO_SKIP_IMPORTS")
set(OPENCOLORIO_DEFINITIONS)
endif()
if(WITH_OPENVDB)
@@ -676,11 +675,10 @@ if(WITH_SYSTEM_AUDASPACE)
endif()
if(WITH_TBB)
set(TBB_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbb.lib debug ${LIBDIR}/tbb/lib/tbb_debug.lib)
set(TBB_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbb.lib debug ${LIBDIR}/tbb/lib/debug/tbb_debug.lib)
set(TBB_INCLUDE_DIR ${LIBDIR}/tbb/include)
set(TBB_INCLUDE_DIRS ${TBB_INCLUDE_DIR})
if(WITH_TBB_MALLOC_PROXY)
set(TBB_MALLOC_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbbmalloc.lib debug ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib)
add_definitions(-DWITH_TBB_MALLOC)
endif()
endif()

View File

@@ -15,15 +15,6 @@ if(WITH_WINDOWS_BUNDLE_CRT)
include(InstallRequiredSystemLibraries)
# ucrtbase(d).dll cannot be in the manifest, due to the way windows 10 handles
# redirects for this dll, for details see T88813.
foreach(lib ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS})
string(FIND ${lib} "ucrtbase" pos)
if(NOT pos EQUAL -1)
list(REMOVE_ITEM CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS ${lib})
install(FILES ${lib} DESTINATION . COMPONENT Libraries)
endif()
endforeach()
# Install the CRT to the blender.crt Sub folder.
install(FILES ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS} DESTINATION ./blender.crt COMPONENT Libraries)

View File

@@ -1,8 +0,0 @@
Pipeline Config
===============
This configuration file is used by buildbot new pipeline for the `update-code` step.
It will soon be used by the ../utils/make_update.py script.
Both buildbot and developers will eventually use the same configuration file.

View File

@@ -1,87 +0,0 @@
{
"update-code":
{
"git" :
{
"submodules":
[
{ "path": "release/scripts/addons", "branch": "master", "commit_id": "HEAD" },
{ "path": "release/scripts/addons_contrib", "branch": "master", "commit_id": "HEAD" },
{ "path": "release/datafiles/locale", "branch": "master", "commit_id": "HEAD" },
{ "path": "source/tools", "branch": "master", "commit_id": "HEAD" }
]
},
"svn":
{
"tests": { "path": "lib/tests", "branch": "trunk", "commit_id": "HEAD" },
"libraries":
{
"darwin-x86_64": { "path": "lib/darwin", "branch": "trunk", "commit_id": "HEAD" },
"darwin-arm64": { "path": "lib/darwin_arm64", "branch": "trunk", "commit_id": "HEAD" },
"linux-x86_64": { "path": "lib/linux_centos7_x86_64", "branch": "trunk", "commit_id": "HEAD" },
"windows-amd64": { "path": "lib/win64_vc15", "branch": "trunk", "commit_id": "HEAD" }
}
}
},
"buildbot":
{
"gcc":
{
"version": "9.0"
},
"sdks":
{
"optix":
{
"version": "7.1.0"
},
"cuda10":
{
"version": "10.1"
},
"cuda11":
{
"version": "11.3"
}
},
"cmake":
{
"default":
{
"version": "any",
"overrides":
{
}
},
"darwin-x86_64":
{
"overrides":
{
}
},
"darwin-arm64":
{
"overrides":
{
}
},
"linux-x86_64":
{
"overrides":
{
}
},
"windows-amd64":
{
"overrides":
{
}
}
}
}
}

View File

@@ -1,5 +0,0 @@
Make Utility Scripts
====================
Scripts used only by developers for now

View File

@@ -6,9 +6,6 @@ if %ERRORLEVEL% EQU 0 goto DetectionComplete
call "%~dp0\detect_msvc2019.cmd"
if %ERRORLEVEL% EQU 0 goto DetectionComplete
call "%~dp0\detect_msvc2022.cmd"
if %ERRORLEVEL% EQU 0 goto DetectionComplete
echo Compiler Detection failed. Use verbose switch for more information.
exit /b 1

View File

@@ -1,6 +1,5 @@
if "%BUILD_VS_YEAR%"=="2017" set BUILD_VS_LIBDIRPOST=vc15
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"

View File

@@ -19,10 +19,10 @@ if "%WITH_PYDEBUG%"=="1" (
set PYDEBUG_CMAKE_ARGS=-DWINDOWS_PYTHON_DEBUG=On
)
if "%BUILD_VS_YEAR%"=="2017" (
set BUILD_GENERATOR_POST=%WINDOWS_ARCH%
) else (
if "%BUILD_VS_YEAR%"=="2019" (
set BUILD_PLATFORM_SELECT=-A %MSBUILD_PLATFORM%
) else (
set BUILD_GENERATOR_POST=%WINDOWS_ARCH%
)
set BUILD_CMAKE_ARGS=%BUILD_CMAKE_ARGS% -G "Visual Studio %BUILD_VS_VER% %BUILD_VS_YEAR%%BUILD_GENERATOR_POST%" %BUILD_PLATFORM_SELECT% %TESTS_CMAKE_ARGS% %CLANG_CMAKE_ARGS% %ASAN_CMAKE_ARGS% %PYDEBUG_CMAKE_ARGS%

View File

@@ -1,3 +0,0 @@
set BUILD_VS_VER=17
set BUILD_VS_YEAR=2022
call "%~dp0\detect_msvc_vswhere.cmd"

View File

@@ -66,14 +66,6 @@ if NOT "%1" == "" (
) else if "%1" == "2019b" (
set BUILD_VS_YEAR=2019
set VSWHERE_ARGS=-products Microsoft.VisualStudio.Product.BuildTools
) else if "%1" == "2022" (
set BUILD_VS_YEAR=2022
) else if "%1" == "2022pre" (
set BUILD_VS_YEAR=2022
set VSWHERE_ARGS=-prerelease
) else if "%1" == "2022b" (
set BUILD_VS_YEAR=2022
set VSWHERE_ARGS=-products Microsoft.VisualStudio.Product.BuildTools
) else if "%1" == "packagename" (
set BUILD_CMAKE_ARGS=%BUILD_CMAKE_ARGS% -DCPACK_OVERRIDE_PACKAGENAME="%2"
shift /1

View File

@@ -85,7 +85,7 @@ def openBlendFile(filename):
'''
handle = open(filename, 'rb')
magic = ReadString(handle, 7)
if magic in {"BLENDER", "BULLETf"}:
if magic in ("BLENDER", "BULLETf"):
log.debug("normal blendfile detected")
handle.seek(0, os.SEEK_SET)
return handle
@@ -137,7 +137,7 @@ class BlendFile:
fileblock = BlendFileBlock(handle, self)
found_dna_block = False
while not found_dna_block:
if fileblock.Header.Code in {"DNA1", "SDNA"}:
if fileblock.Header.Code in ("DNA1", "SDNA"):
self.Catalog = DNACatalog(self.Header, handle)
found_dna_block = True
else:

View File

@@ -1,4 +1,4 @@
# Doxyfile 1.9.1
# Doxyfile 1.8.16
# This file describes the settings to be used by the documentation system
# doxygen (www.doxygen.org) for a project.
@@ -38,7 +38,7 @@ PROJECT_NAME = Blender
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = V3.0
PROJECT_NUMBER = "V3.0"
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
@@ -227,14 +227,6 @@ QT_AUTOBRIEF = NO
MULTILINE_CPP_IS_BRIEF = NO
# By default Python docstrings are displayed as preformatted text and doxygen's
# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the
# doxygen's special commands can be used and the contents of the docstring
# documentation blocks is shown as doxygen documentation.
# The default value is: YES.
PYTHON_DOCSTRING = YES
# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
# documentation from any documented member that it re-implements.
# The default value is: YES.
@@ -271,6 +263,12 @@ TAB_SIZE = 4
ALIASES =
# This tag can be used to specify a number of word-keyword mappings (TCL only).
# A mapping has the form "name=value". For example adding "class=itcl::class"
# will allow you to use the command class in the itcl::class meaning.
TCL_SUBST =
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
# only. Doxygen will then generate output that is more tailored for C. For
# instance, some of the names that are used will be different. The list of all
@@ -311,22 +309,19 @@ OPTIMIZE_OUTPUT_SLICE = NO
# parses. With this tag you can assign which parser to use for a given
# extension. Doxygen has a built-in mapping, but you can override or extend it
# using this tag. The format is ext=language, where ext is a file extension, and
# language is one of the parsers supported by doxygen: IDL, Java, JavaScript,
# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL,
# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice,
# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
# tries to guess whether the code is fixed or free formatted code, this is the
# default for Fortran type files). For instance to make doxygen treat .inc files
# as Fortran files (default is PHP), and .f files as C (default is Fortran),
# use: inc=Fortran f=C.
# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat
# .inc files as Fortran files (default is PHP), and .f files as C (default is
# Fortran), use: inc=Fortran f=C.
#
# Note: For files without extension you can use no_extension as a placeholder.
#
# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
# the files are not read by doxygen. When specifying no_extension you should add
# * to the FILE_PATTERNS.
#
# Note see also the list of default file extension mappings.
# the files are not read by doxygen.
EXTENSION_MAPPING =
@@ -460,19 +455,6 @@ TYPEDEF_HIDES_STRUCT = NO
LOOKUP_CACHE_SIZE = 3
# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use
# during processing. When set to 0 doxygen will based this on the number of
# cores available in the system. You can set it explicitly to a value larger
# than 0 to get more control over the balance between CPU load and processing
# speed. At this moment only the input processing can be done using multiple
# threads. Since this is still an experimental feature the default is set to 1,
# which efficively disables parallel processing. Please report any issues you
# encounter. Generating dot graphs in parallel is controlled by the
# DOT_NUM_THREADS setting.
# Minimum value: 0, maximum value: 32, default value: 1.
NUM_PROC_THREADS = 1
#---------------------------------------------------------------------------
# Build related configuration options
#---------------------------------------------------------------------------
@@ -536,13 +518,6 @@ EXTRACT_LOCAL_METHODS = NO
EXTRACT_ANON_NSPACES = NO
# If this flag is set to YES, the name of an unnamed parameter in a declaration
# will be determined by the corresponding definition. By default unnamed
# parameters remain unnamed in the output.
# The default value is: YES.
RESOLVE_UNNAMED_PARAMS = YES
# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
# undocumented members inside documented classes or files. If set to NO these
# members will be included in the various overviews, but no documentation
@@ -560,8 +535,8 @@ HIDE_UNDOC_MEMBERS = NO
HIDE_UNDOC_CLASSES = NO
# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
# declarations. If set to NO, these declarations will be included in the
# documentation.
# (class|struct|union) declarations. If set to NO, these declarations will be
# included in the documentation.
# The default value is: NO.
HIDE_FRIEND_COMPOUNDS = NO
@@ -580,18 +555,11 @@ HIDE_IN_BODY_DOCS = NO
INTERNAL_DOCS = YES
# With the correct setting of option CASE_SENSE_NAMES doxygen will better be
# able to match the capabilities of the underlying filesystem. In case the
# filesystem is case sensitive (i.e. it supports files in the same directory
# whose names only differ in casing), the option must be set to YES to properly
# deal with such files in case they appear in the input. For filesystems that
# are not case sensitive the option should be be set to NO to properly deal with
# output files written for symbols that only differ in casing, such as for two
# classes, one named CLASS and the other named Class, and to also support
# references to files without having to specify the exact matching casing. On
# Windows (including Cygwin) and MacOS, users should typically set this option
# to NO, whereas on Linux or other Unix flavors it should typically be set to
# YES.
# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
# names in lower-case letters. If set to YES, upper-case letters are also
# allowed. This is useful if you have classes or files whose names only differ
# in case and if your file system supports case sensitive file names. Windows
# (including Cygwin) ands Mac users are advised to set this option to NO.
# The default value is: system dependent.
CASE_SENSE_NAMES = YES
@@ -830,10 +798,7 @@ WARN_IF_DOC_ERROR = YES
WARN_NO_PARAMDOC = NO
# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS
# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but
# at the end of the doxygen process doxygen will return with a non-zero status.
# Possible values are: NO, YES and FAIL_ON_WARNINGS.
# a warning is encountered.
# The default value is: NO.
WARN_AS_ERROR = NO
@@ -875,8 +840,8 @@ INPUT = doxygen.main.h \
# This tag can be used to specify the character encoding of the source files
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
# documentation (see:
# https://www.gnu.org/software/libiconv/) for the list of possible encodings.
# documentation (see: https://www.gnu.org/software/libiconv/) for the list of
# possible encodings.
# The default value is: UTF-8.
INPUT_ENCODING = UTF-8
@@ -889,15 +854,11 @@ INPUT_ENCODING = UTF-8
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
# read by doxygen.
#
# Note the list of default checked file patterns might differ from the list of
# default file extension mappings.
#
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment),
# *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, *.vhdl,
# *.ucf, *.qsf and *.ice.
# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice.
FILE_PATTERNS =
@@ -1125,6 +1086,13 @@ VERBATIM_HEADERS = YES
ALPHABETICAL_INDEX = YES
# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
# which the alphabetical index list will be split.
# Minimum value: 1, maximum value: 20, default value: 5.
# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
COLS_IN_ALPHA_INDEX = 5
# In case all classes in a project start with a common prefix, all classes will
# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
# can be used to specify a prefix (or a list of prefixes) that should be ignored
@@ -1263,9 +1231,9 @@ HTML_TIMESTAMP = YES
# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
# documentation will contain a main index with vertical navigation menus that
# are dynamically created via JavaScript. If disabled, the navigation index will
# are dynamically created via Javascript. If disabled, the navigation index will
# consists of multiple levels of tabs that are statically embedded in every HTML
# page. Disable this option to support browsers that do not have JavaScript,
# page. Disable this option to support browsers that do not have Javascript,
# like the Qt help browser.
# The default value is: YES.
# This tag requires that the tag GENERATE_HTML is set to YES.
@@ -1295,11 +1263,10 @@ HTML_INDEX_NUM_ENTRIES = 100
# If the GENERATE_DOCSET tag is set to YES, additional index files will be
# generated that can be used as input for Apple's Xcode 3 integrated development
# environment (see:
# https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To
# create a documentation set, doxygen will generate a Makefile in the HTML
# output directory. Running make will produce the docset in that directory and
# running make install will install the docset in
# environment (see: https://developer.apple.com/xcode/), introduced with OSX
# 10.5 (Leopard). To create a documentation set, doxygen will generate a
# Makefile in the HTML output directory. Running make will produce the docset in
# that directory and running make install will install the docset in
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
# genXcode/_index.html for more information.
@@ -1341,8 +1308,8 @@ DOCSET_PUBLISHER_NAME = Publisher
# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
# (see:
# https://www.microsoft.com/en-us/download/details.aspx?id=21138) on Windows.
# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on
# Windows.
#
# The HTML Help Workshop contains a compiler that can convert all HTML output
# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
@@ -1372,7 +1339,7 @@ CHM_FILE = blender.chm
HHC_LOCATION = "C:/Program Files (x86)/HTML Help Workshop/hhc.exe"
# The GENERATE_CHI flag controls if a separate .chi index file is generated
# (YES) or that it should be included in the main .chm file (NO).
# (YES) or that it should be included in the master .chm file (NO).
# The default value is: NO.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
@@ -1417,8 +1384,7 @@ QCH_FILE =
# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
# Project output. For more information please see Qt Help Project / Namespace
# (see:
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
# The default value is: org.doxygen.Project.
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1426,8 +1392,8 @@ QHP_NAMESPACE = org.doxygen.Project
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
# Help Project output. For more information please see Qt Help Project / Virtual
# Folders (see:
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders).
# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-
# folders).
# The default value is: doc.
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1435,16 +1401,16 @@ QHP_VIRTUAL_FOLDER = doc
# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
# filter to add. For more information please see Qt Help Project / Custom
# Filters (see:
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters).
# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_CUST_FILTER_NAME =
# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
# custom filter to add. For more information please see Qt Help Project / Custom
# Filters (see:
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters).
# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_CUST_FILTER_ATTRS =
@@ -1456,9 +1422,9 @@ QHP_CUST_FILTER_ATTRS =
QHP_SECT_FILTER_ATTRS =
# The QHG_LOCATION tag can be used to specify the location (absolute path
# including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to
# run qhelpgenerator on the generated .qhp file.
# The QHG_LOCATION tag can be used to specify the location of Qt's
# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
# generated .qhp file.
# This tag requires that the tag GENERATE_QHP is set to YES.
QHG_LOCATION =
@@ -1535,17 +1501,6 @@ TREEVIEW_WIDTH = 246
EXT_LINKS_IN_WINDOW = NO
# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg
# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see
# https://inkscape.org) to generate formulas as SVG images instead of PNGs for
# the HTML output. These images will generally look nicer at scaled resolutions.
# Possible values are: png (the default) and svg (looks nicer but requires the
# pdf2svg or inkscape tool).
# The default value is: png.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_FORMULA_FORMAT = png
# Use this tag to change the font size of LaTeX formulas included as images in
# the HTML documentation. When you change the font size after a successful
# doxygen run you need to manually remove any form_*.png images from the HTML
@@ -1566,14 +1521,8 @@ FORMULA_FONTSIZE = 10
FORMULA_TRANSPARENT = YES
# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands
# to create new LaTeX commands to be used in formulas as building blocks. See
# the section "Including formulas" for details.
FORMULA_MACROFILE =
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
# https://www.mathjax.org) which uses client side JavaScript for the rendering
# https://www.mathjax.org) which uses client side Javascript for the rendering
# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
# installed or if you want to formulas look prettier in the HTML output. When
# enabled you may also need to install MathJax separately and configure the path
@@ -1585,7 +1534,7 @@ USE_MATHJAX = NO
# When MathJax is enabled you can set the default output format to be used for
# the MathJax output. See the MathJax site (see:
# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details.
# http://docs.mathjax.org/en/latest/output.html) for more details.
# Possible values are: HTML-CSS (which is slower, but has the best
# compatibility), NativeMML (i.e. MathML) and SVG.
# The default value is: HTML-CSS.
@@ -1601,7 +1550,7 @@ MATHJAX_FORMAT = HTML-CSS
# Content Delivery Network so you can quickly see the result without installing
# MathJax. However, it is strongly recommended to install a local copy of
# MathJax from https://www.mathjax.org before deployment.
# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2.
# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/.
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_RELPATH = http://www.mathjax.org/mathjax
@@ -1615,8 +1564,7 @@ MATHJAX_EXTENSIONS =
# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
# of code that will be used on startup of the MathJax code. See the MathJax site
# (see:
# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an
# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
# example see the documentation.
# This tag requires that the tag USE_MATHJAX is set to YES.
@@ -1644,7 +1592,7 @@ MATHJAX_CODEFILE =
SEARCHENGINE = NO
# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
# implemented using a web server instead of a web client using JavaScript. There
# implemented using a web server instead of a web client using Javascript. There
# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
# setting. When disabled, doxygen will generate a PHP script for searching and
# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
@@ -1663,8 +1611,7 @@ SERVER_BASED_SEARCH = NO
#
# Doxygen ships with an example indexer (doxyindexer) and search engine
# (doxysearch.cgi) which are based on the open source search engine library
# Xapian (see:
# https://xapian.org/).
# Xapian (see: https://xapian.org/).
#
# See the section "External Indexing and Searching" for details.
# The default value is: NO.
@@ -1677,9 +1624,8 @@ EXTERNAL_SEARCH = NO
#
# Doxygen ships with an example indexer (doxyindexer) and search engine
# (doxysearch.cgi) which are based on the open source search engine library
# Xapian (see:
# https://xapian.org/). See the section "External Indexing and Searching" for
# details.
# Xapian (see: https://xapian.org/). See the section "External Indexing and
# Searching" for details.
# This tag requires that the tag SEARCHENGINE is set to YES.
SEARCHENGINE_URL =
@@ -1843,11 +1789,9 @@ LATEX_EXTRA_FILES =
PDF_HYPERLINKS = NO
# If the USE_PDFLATEX tag is set to YES, doxygen will use the engine as
# specified with LATEX_CMD_NAME to generate the PDF file directly from the LaTeX
# files. Set this option to YES, to get a higher quality PDF documentation.
#
# See also section LATEX_CMD_NAME for selecting the engine.
# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
# the PDF file directly from the LaTeX files. Set this option to YES, to get a
# higher quality PDF documentation.
# The default value is: YES.
# This tag requires that the tag GENERATE_LATEX is set to YES.
@@ -2182,8 +2126,7 @@ INCLUDE_FILE_PATTERNS =
# recursively expanded use the := operator instead of the = operator.
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
PREDEFINED = BUILD_DATE \
DOXYGEN=1
PREDEFINED = BUILD_DATE
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
# tag can be used to specify a list of macro names that should be expanded. The
@@ -2360,31 +2303,9 @@ UML_LOOK = YES
# but if the number exceeds 15, the total amount of fields shown is limited to
# 10.
# Minimum value: 0, maximum value: 100, default value: 10.
# This tag requires that the tag UML_LOOK is set to YES.
UML_LIMIT_NUM_FIELDS = 10
# If the DOT_UML_DETAILS tag is set to NO, doxygen will show attributes and
# methods without types and arguments in the UML graphs. If the DOT_UML_DETAILS
# tag is set to YES, doxygen will add type and arguments for attributes and
# methods in the UML graphs. If the DOT_UML_DETAILS tag is set to NONE, doxygen
# will not generate fields with class member information in the UML graphs. The
# class diagrams will look similar to the default class diagrams but using UML
# notation for the relationships.
# Possible values are: NO, YES and NONE.
# The default value is: NO.
# This tag requires that the tag UML_LOOK is set to YES.
DOT_UML_DETAILS = NO
# The DOT_WRAP_THRESHOLD tag can be used to set the maximum number of characters
# to display on a single line. If the actual line length exceeds this threshold
# significantly it will wrapped across multiple lines. Some heuristics are apply
# to avoid ugly line breaks.
# Minimum value: 0, maximum value: 1000, default value: 17.
# This tag requires that the tag HAVE_DOT is set to YES.
DOT_WRAP_THRESHOLD = 17
UML_LIMIT_NUM_FIELDS = 10
# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
# collaboration graphs will show the relations between templates and their
@@ -2575,11 +2496,9 @@ DOT_MULTI_TARGETS = YES
GENERATE_LEGEND = YES
# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate
# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
# files that are used to generate the various graphs.
#
# Note: This setting is not only used for dot files but also for msc and
# plantuml temporary files.
# The default value is: YES.
# This tag requires that the tag HAVE_DOT is set to YES.
DOT_CLEANUP = YES

View File

@@ -4,9 +4,7 @@ Simple Render Engine
"""
import bpy
import array
import gpu
from gpu_extras.presets import draw_texture_2d
import bgl
class CustomRenderEngine(bpy.types.RenderEngine):
@@ -102,7 +100,8 @@ class CustomRenderEngine(bpy.types.RenderEngine):
dimensions = region.width, region.height
# Bind shader that converts from scene linear to display space,
gpu.state.blend_set('ALPHA_PREMULT')
bgl.glEnable(bgl.GL_BLEND)
bgl.glBlendFunc(bgl.GL_ONE, bgl.GL_ONE_MINUS_SRC_ALPHA)
self.bind_display_space_shader(scene)
if not self.draw_data or self.draw_data.dimensions != dimensions:
@@ -111,7 +110,7 @@ class CustomRenderEngine(bpy.types.RenderEngine):
self.draw_data.draw()
self.unbind_display_space_shader()
gpu.state.blend_set('NONE')
bgl.glDisable(bgl.GL_BLEND)
class CustomDrawData:
@@ -120,21 +119,68 @@ class CustomDrawData:
self.dimensions = dimensions
width, height = dimensions
pixels = width * height * array.array('f', [0.1, 0.2, 0.1, 1.0])
pixels = gpu.types.Buffer('FLOAT', width * height * 4, pixels)
pixels = [0.1, 0.2, 0.1, 1.0] * width * height
pixels = bgl.Buffer(bgl.GL_FLOAT, width * height * 4, pixels)
# Generate texture
self.texture = gpu.types.GPUTexture((width, height), format='RGBA16F', data=pixels)
self.texture = bgl.Buffer(bgl.GL_INT, 1)
bgl.glGenTextures(1, self.texture)
bgl.glActiveTexture(bgl.GL_TEXTURE0)
bgl.glBindTexture(bgl.GL_TEXTURE_2D, self.texture[0])
bgl.glTexImage2D(bgl.GL_TEXTURE_2D, 0, bgl.GL_RGBA16F, width, height, 0, bgl.GL_RGBA, bgl.GL_FLOAT, pixels)
bgl.glTexParameteri(bgl.GL_TEXTURE_2D, bgl.GL_TEXTURE_MIN_FILTER, bgl.GL_LINEAR)
bgl.glTexParameteri(bgl.GL_TEXTURE_2D, bgl.GL_TEXTURE_MAG_FILTER, bgl.GL_LINEAR)
bgl.glBindTexture(bgl.GL_TEXTURE_2D, 0)
# Note: This is just a didactic example.
# In this case it would be more convenient to fill the texture with:
# self.texture.clear('FLOAT', value=[0.1, 0.2, 0.1, 1.0])
# Bind shader that converts from scene linear to display space,
# use the scene's color management settings.
shader_program = bgl.Buffer(bgl.GL_INT, 1)
bgl.glGetIntegerv(bgl.GL_CURRENT_PROGRAM, shader_program)
# Generate vertex array
self.vertex_array = bgl.Buffer(bgl.GL_INT, 1)
bgl.glGenVertexArrays(1, self.vertex_array)
bgl.glBindVertexArray(self.vertex_array[0])
texturecoord_location = bgl.glGetAttribLocation(shader_program[0], "texCoord")
position_location = bgl.glGetAttribLocation(shader_program[0], "pos")
bgl.glEnableVertexAttribArray(texturecoord_location)
bgl.glEnableVertexAttribArray(position_location)
# Generate geometry buffers for drawing textured quad
position = [0.0, 0.0, width, 0.0, width, height, 0.0, height]
position = bgl.Buffer(bgl.GL_FLOAT, len(position), position)
texcoord = [0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0]
texcoord = bgl.Buffer(bgl.GL_FLOAT, len(texcoord), texcoord)
self.vertex_buffer = bgl.Buffer(bgl.GL_INT, 2)
bgl.glGenBuffers(2, self.vertex_buffer)
bgl.glBindBuffer(bgl.GL_ARRAY_BUFFER, self.vertex_buffer[0])
bgl.glBufferData(bgl.GL_ARRAY_BUFFER, 32, position, bgl.GL_STATIC_DRAW)
bgl.glVertexAttribPointer(position_location, 2, bgl.GL_FLOAT, bgl.GL_FALSE, 0, None)
bgl.glBindBuffer(bgl.GL_ARRAY_BUFFER, self.vertex_buffer[1])
bgl.glBufferData(bgl.GL_ARRAY_BUFFER, 32, texcoord, bgl.GL_STATIC_DRAW)
bgl.glVertexAttribPointer(texturecoord_location, 2, bgl.GL_FLOAT, bgl.GL_FALSE, 0, None)
bgl.glBindBuffer(bgl.GL_ARRAY_BUFFER, 0)
bgl.glBindVertexArray(0)
def __del__(self):
del self.texture
bgl.glDeleteBuffers(2, self.vertex_buffer)
bgl.glDeleteVertexArrays(1, self.vertex_array)
bgl.glBindTexture(bgl.GL_TEXTURE_2D, 0)
bgl.glDeleteTextures(1, self.texture)
def draw(self):
draw_texture_2d(self.texture, (0, 0), self.texture.width, self.texture.height)
bgl.glActiveTexture(bgl.GL_TEXTURE0)
bgl.glBindTexture(bgl.GL_TEXTURE_2D, self.texture[0])
bgl.glBindVertexArray(self.vertex_array[0])
bgl.glDrawArrays(bgl.GL_TRIANGLE_FAN, 0, 4)
bgl.glBindVertexArray(0)
bgl.glBindTexture(bgl.GL_TEXTURE_2D, 0)
# RenderEngines also need to tell UI Panels that they are compatible with.

View File

@@ -29,7 +29,7 @@ with offscreen.bind():
amount = 10
for i in range(-amount, amount + 1):
x_pos = i / amount
draw_circle_2d((x_pos, 0.0), (1, 1, 1, 1), 0.5, segments=200)
draw_circle_2d((x_pos, 0.0), (1, 1, 1, 1), 0.5, 200)
# Drawing the generated texture in 3D space

View File

@@ -34,9 +34,7 @@ with offscreen.bind():
for i in range(RING_AMOUNT):
draw_circle_2d(
(random.uniform(-1, 1), random.uniform(-1, 1)),
(1, 1, 1, 1), random.uniform(0.1, 1),
segments=20,
)
(1, 1, 1, 1), random.uniform(0.1, 1), 20)
buffer = fb.read_color(0, 0, WIDTH, HEIGHT, 4, 0, 'UBYTE')

View File

@@ -1,5 +0,0 @@
# Compute local object transformation matrix:
if obj.rotation_mode == 'QUATERNION':
matrix = mathutils.Matrix.LocRotScale(obj.location, obj.rotation_quaternion, obj.scale)
else:
matrix = mathutils.Matrix.LocRotScale(obj.location, obj.rotation_euler, obj.scale)

View File

@@ -14,14 +14,10 @@ mat_rot = mathutils.Matrix.Rotation(math.radians(45.0), 4, 'X')
mat_out = mat_loc @ mat_rot @ mat_sca
print(mat_out)
# extract components back out of the matrix as two vectors and a quaternion
# extract components back out of the matrix
loc, rot, sca = mat_out.decompose()
print(loc, rot, sca)
# recombine extracted components
mat_out2 = mathutils.Matrix.LocRotScale(loc, rot, sca)
print(mat_out2)
# it can also be useful to access components of a matrix directly
mat = mathutils.Matrix()
mat[0][0], mat[1][0], mat[2][0] = 0.0, 1.0, 2.0

View File

@@ -1,13 +1,2 @@
sphinx==3.5.4
# Sphinx dependencies that are important
Jinja2==2.11.3
Pygments==2.9.0
docutils==0.16
snowballstemmer==2.1.0
babel==2.9.1
requests==2.25.1
# Only needed to match the theme used for the official documentation.
# Without this theme, the default theme will be used.
Sphinx==3.5.3
sphinx_rtd_theme==0.5.2

View File

@@ -545,13 +545,6 @@ def range_str(val):
def example_extract_docstring(filepath):
'''
Return (text, line_no, line_no_has_content) where:
- ``text`` is the doc-string text.
- ``line_no`` is the line the doc-string text ends.
- ``line_no_has_content`` when False, this file only contains a doc-string.
There is no need to include the remainder.
'''
file = open(filepath, "r", encoding="utf-8")
line = file.readline()
line_no = 0
@@ -560,7 +553,7 @@ def example_extract_docstring(filepath):
line_no += 1
else:
file.close()
return "", 0, True
return "", 0, False
for line in file:
line_no += 1
@@ -954,7 +947,7 @@ def pymodule2sphinx(basepath, module_name, module, title, module_all_extra):
# constant, not much fun we can do here except to list it.
# TODO, figure out some way to document these!
fw(".. data:: %s\n\n" % attribute)
write_indented_lines(" ", fw, "Constant value %s" % repr(value), False)
write_indented_lines(" ", fw, "constant value %s" % repr(value), False)
fw("\n")
else:
BPY_LOGGER.debug("\tnot documenting %s.%s of %r type" % (module_name, attribute, value_type.__name__))
@@ -1036,6 +1029,7 @@ def pymodule2sphinx(basepath, module_name, module, title, module_all_extra):
context_type_map = {
# context_member: (RNA type, is_collection)
"active_annotation_layer": ("GPencilLayer", False),
"active_base": ("ObjectBase", False),
"active_bone": ("EditBone", False),
"active_gpencil_frame": ("GreasePencilLayer", True),
"active_gpencil_layer": ("GPencilLayer", True),
@@ -1246,7 +1240,7 @@ def pyrna_enum2sphinx(prop, use_empty_descriptions=False):
"%s.\n" % (
identifier,
# Account for multi-line enum descriptions, allowing this to be a block of text.
indent(" -- ".join(escape_rst(val) for val in (name, description) if val) or "Undocumented", " "),
indent(", ".join(escape_rst(val) for val in (name, description) if val) or "Undocumented", " "),
)
for identifier, name, description in prop.enum_items
])
@@ -1555,8 +1549,8 @@ def pyrna2sphinx(basepath):
fw(".. hlist::\n")
fw(" :columns: 2\n\n")
# Context does its own thing.
# "active_object": ("Object", False),
# context does its own thing
# "active_base": ("ObjectBase", False),
for ref_attr, (ref_type, ref_is_seq) in sorted(context_type_map.items()):
if ref_type == struct_id:
fw(" * :mod:`bpy.context.%s`\n" % ref_attr)

View File

@@ -75,7 +75,6 @@ void FFMPEGWriter::encode()
m_frame->nb_samples = m_input_samples;
m_frame->format = m_codecCtx->sample_fmt;
m_frame->channel_layout = m_codecCtx->channel_layout;
m_frame->channels = m_specs.channels;
if(avcodec_fill_audio_frame(m_frame, m_specs.channels, m_codecCtx->sample_fmt, reinterpret_cast<data_t*>(data), m_input_buffer.getSize(), 0) < 0)
AUD_THROW(FileException, "File couldn't be written, filling the audio frame failed with ffmpeg.");

View File

@@ -29,10 +29,10 @@
#if OPENVDB == 1
# include "openvdb/openvdb.h"
# include "openvdb/points/PointConversion.h"
# include "openvdb/points/PointCount.h"
# include "openvdb/tools/Clip.h"
# include "openvdb/tools/Dense.h"
# include <openvdb/points/PointConversion.h>
# include <openvdb/points/PointCount.h>
# include <openvdb/tools/Clip.h>
# include <openvdb/tools/Dense.h>
#endif
#define POSITION_NAME "P"
@@ -519,7 +519,7 @@ int writeObjectsVDB(const string &filename,
}
}
// Write only if there is at least one grid, optionally write with compression.
// Write only if the is at least one grid, optionally write with compression.
if (gridsVDB.size()) {
int vdb_flags = openvdb::io::COMPRESS_ACTIVE_MASK;
switch (compression) {
@@ -534,8 +534,7 @@ int writeObjectsVDB(const string &filename,
}
case COMPRESSION_BLOSC: {
# if OPENVDB_BLOSC == 1
// Cannot use |= here, causes segfault with blosc 1.5.0 (== recommended version)
vdb_flags = openvdb::io::COMPRESS_BLOSC;
vdb_flags |= openvdb::io::COMPRESS_BLOSC;
# else
debMsg("OpenVDB was built without Blosc support, using Zip compression instead", 1);
vdb_flags |= openvdb::io::COMPRESS_ZIP;

View File

@@ -384,7 +384,6 @@ class FluidSolver : public PbClass {
GridStorage<Real> mGrids4dReal;
GridStorage<Vec3> mGrids4dVec;
GridStorage<Vec4> mGrids4dVec4;
public:
PbArgs _args;
}

View File

@@ -42,7 +42,7 @@ inline void updateQtGui(bool full, int frame, float time, const std::string &cur
# ifdef _DEBUG
# define DEBUG 1
# endif // _DEBUG
#endif // DEBUG
#endif // DEBUG
// Standard exception
class Error : public std::exception {

View File

@@ -1,3 +1,3 @@
#define MANTA_GIT_VERSION "commit 8fbebe02459b7f72575872c20961f7cb757db408"
#define MANTA_GIT_VERSION "commit 39b7a415721ecbf6643612a24e8eadd221aeb934"

View File

@@ -389,7 +389,6 @@ class GridBase : public PbClass {
Real mDx;
bool m3D; // precomputed Z shift: to ensure 2D compatibility, always use this instead of sx*sy !
IndexInt mStrideZ;
public:
PbArgs _args;
}

View File

@@ -326,7 +326,6 @@ class Grid4dBase : public PbClass {
// precomputed Z,T shift: to ensure 2D compatibility, always use this instead of sx*sy !
IndexInt mStrideZ;
IndexInt mStrideT;
public:
PbArgs _args;
}
@@ -951,7 +950,6 @@ template<class T> class Grid4d : public Grid4dBase {
protected:
T *mData;
public:
PbArgs _args;
}

View File

@@ -71,19 +71,6 @@ class ParticleBase;
for (int j = bnd; j < (grid).getSizeY() - bnd; ++j) \
for (int i = bnd; i < (grid).getSizeX() - bnd; ++i)
#define FOR_NEIGHBORS_BND(grid, radius, bnd) \
for (int zj = ((grid).is3D() ? std::max(bnd, k - radius) : 0); \
zj <= ((grid).is3D() ? std::min(k + radius, (grid).getSizeZ() - 1 - bnd) : 0); \
zj++) \
for (int yj = std::max(bnd, j - radius); \
yj <= std::min(j + radius, (grid).getSizeY() - 1 - bnd); \
yj++) \
for (int xj = std::max(bnd, i - radius); \
xj <= std::min(i + radius, (grid).getSizeX() - 1 - bnd); \
xj++)
#define FOR_NEIGHBORS(grid, radius) FOR_NEIGHBORS_BND(grid, radius, 0)
//! Basic data structure for kernel data, initialized based on kernel type (e.g. single, idx, etc).
struct KernelBase {
int maxX, maxY, maxZ, minZ, maxT, minT;

View File

@@ -266,7 +266,6 @@ class LevelsetGrid : public Grid<Real> {
}
static Real invalidTimeValue();
public:
PbArgs _args;
}

View File

@@ -796,7 +796,6 @@ class Mesh : public PbClass {
std::vector<MeshDataImpl<int> *>
mMdataInt; //! indicate that mdata of this mesh is copied, and needs to be freed
bool mFreeMdata;
public:
PbArgs _args;
}
@@ -882,7 +881,6 @@ class MeshDataBase : public PbClass {
protected:
Mesh *mMesh;
public:
PbArgs _args;
}
@@ -1647,7 +1645,6 @@ template<class T> class MeshDataImpl : public MeshDataBase {
//! optionally , we might have an associated grid from which to grab new data
Grid<T> *mpGridSource; //! unfortunately , we need to distinguish mac vs regular vec3
bool mGridSourceMAC;
public:
PbArgs _args;
}

View File

@@ -154,7 +154,6 @@ class MovingObstacle : public PbClass {
int mEmptyType;
int mID;
static int sIDcnt;
public:
PbArgs _args;
}

View File

@@ -236,7 +236,6 @@ class WaveletNoiseField : public PbClass {
static int randomSeed;
// global reference count for noise tile
static std::atomic<int> mNoiseReferenceCount;
public:
PbArgs _args;
}

View File

@@ -205,7 +205,6 @@ class ParticleBase : public PbClass {
//! custom seed for particle systems, used by plugins
int mSeed; //! fix global random seed storage, used mainly by functions in this class
static int globalSeed;
public:
PbArgs _args;
}
@@ -629,7 +628,6 @@ template<class S> class ParticleSystem : public ParticleBase {
std::vector<S> mData;
//! reduce storage , called by doCompress
virtual void compress();
public:
PbArgs _args;
}
@@ -920,7 +918,6 @@ class ParticleIndexSystem : public ParticleSystem<ParticleIndexData> {
return -1;
}
};
public:
PbArgs _args;
}
@@ -985,7 +982,6 @@ template<class DATA, class CON> class ConnectedParticleSystem : public ParticleS
protected:
std::vector<CON> mSegments;
virtual void compress();
public:
PbArgs _args;
}
@@ -1075,7 +1071,6 @@ class ParticleDataBase : public PbClass {
protected:
ParticleBase *mpParticleSys;
public:
PbArgs _args;
}
@@ -1848,7 +1843,6 @@ template<class T> class ParticleDataImpl : public ParticleDataBase {
//! optionally , we might have an associated grid from which to grab new data
Grid<T> *mpGridSource; //! unfortunately , we need to distinguish mac vs regular vec3
bool mGridSourceMAC;
public:
PbArgs _args;
}

View File

@@ -822,29 +822,33 @@ struct ComputeUnionLevelsetPindex : public KernelBase {
{
const Vec3 gridPos = Vec3(i, j, k) + Vec3(0.5); // shifted by half cell
Real phiv = radius * 1.0; // outside
const int r = int(radius) + 1;
FOR_NEIGHBORS(phi, r)
{
int r = int(radius) + 1;
int rZ = phi.is3D() ? r : 0;
for (int zj = k - rZ; zj <= k + rZ; zj++)
for (int yj = j - r; yj <= j + r; yj++)
for (int xj = i - r; xj <= i + r; xj++) {
if (!phi.isInBounds(Vec3i(xj, yj, zj)))
continue;
// note, for the particle indices in indexSys the access is periodic (ie, dont skip for eg
// inBounds(sx,10,10)
IndexInt isysIdxS = index.index(xj, yj, zj);
IndexInt pStart = index(isysIdxS), pEnd = 0;
if (phi.isInBounds(isysIdxS + 1))
pEnd = index(isysIdxS + 1);
else
pEnd = indexSys.size();
// note, for the particle indices in indexSys the access is periodic (ie, dont skip for
// eg inBounds(sx,10,10)
IndexInt isysIdxS = index.index(xj, yj, zj);
IndexInt pStart = index(isysIdxS), pEnd = 0;
if (phi.isInBounds(isysIdxS + 1))
pEnd = index(isysIdxS + 1);
else
pEnd = indexSys.size();
// now loop over particles in cell
for (IndexInt p = pStart; p < pEnd; ++p) {
const int psrc = indexSys[p].sourceIndex;
if (ptype && ((*ptype)[psrc] & exclude))
continue;
const Vec3 pos = parts[psrc].pos;
phiv = std::min(phiv, fabs(norm(gridPos - pos)) - radius);
}
}
// now loop over particles in cell
for (IndexInt p = pStart; p < pEnd; ++p) {
const int psrc = indexSys[p].sourceIndex;
if (ptype && ((*ptype)[psrc] & exclude))
continue;
const Vec3 pos = parts[psrc].pos;
phiv = std::min(phiv, fabs(norm(gridPos - pos)) - radius);
}
}
phi(i, j, k) = phiv;
}
inline const Grid<int> &getArg0()
@@ -1022,35 +1026,39 @@ struct ComputeAveragedLevelsetWeight : public KernelBase {
// loop over neighborhood, similar to ComputeUnionLevelsetPindex
const Real sradiusInv = 1. / (4. * radius * radius);
const int r = int(radius) + 1;
int r = int(1. * radius) + 1;
int rZ = phi.is3D() ? r : 0;
// accumulators
Real wacc = 0.;
Vec3 pacc = Vec3(0.);
Real racc = 0.;
FOR_NEIGHBORS(phi, r)
{
for (int zj = k - rZ; zj <= k + rZ; zj++)
for (int yj = j - r; yj <= j + r; yj++)
for (int xj = i - r; xj <= i + r; xj++) {
if (!phi.isInBounds(Vec3i(xj, yj, zj)))
continue;
IndexInt isysIdxS = index.index(xj, yj, zj);
IndexInt pStart = index(isysIdxS), pEnd = 0;
if (phi.isInBounds(isysIdxS + 1))
pEnd = index(isysIdxS + 1);
else
pEnd = indexSys.size();
for (IndexInt p = pStart; p < pEnd; ++p) {
IndexInt psrc = indexSys[p].sourceIndex;
if (ptype && ((*ptype)[psrc] & exclude))
continue;
IndexInt isysIdxS = index.index(xj, yj, zj);
IndexInt pStart = index(isysIdxS), pEnd = 0;
if (phi.isInBounds(isysIdxS + 1))
pEnd = index(isysIdxS + 1);
else
pEnd = indexSys.size();
for (IndexInt p = pStart; p < pEnd; ++p) {
IndexInt psrc = indexSys[p].sourceIndex;
if (ptype && ((*ptype)[psrc] & exclude))
continue;
Vec3 pos = parts[psrc].pos;
Real s = normSquare(gridPos - pos) * sradiusInv;
// Real w = std::max(0., cubed(1.-s) );
Real w = std::max(0., (1. - s)); // a bit smoother
wacc += w;
racc += radius * w;
pacc += pos * w;
}
}
Vec3 pos = parts[psrc].pos;
Real s = normSquare(gridPos - pos) * sradiusInv;
// Real w = std::max(0., cubed(1.-s) );
Real w = std::max(0., (1. - s)); // a bit smoother
wacc += w;
racc += radius * w;
pacc += pos * w;
}
}
if (wacc > VECTOR_EPSILON) {
racc /= wacc;

View File

@@ -269,7 +269,6 @@ class Shape : public PbClass {
protected:
GridType mType;
public:
PbArgs _args;
}
@@ -320,7 +319,6 @@ class NullShape : public Shape {
{
gridSetConst<Real>(phi, 1000.0f);
}
public:
PbArgs _args;
}
@@ -396,7 +394,6 @@ class Box : public Shape {
protected:
Vec3 mP0, mP1;
public:
PbArgs _args;
}
@@ -458,7 +455,6 @@ class Sphere : public Shape {
protected:
Vec3 mCenter, mScale;
Real mRadius;
public:
PbArgs _args;
}
@@ -583,7 +579,6 @@ class Cylinder : public Shape {
protected:
Vec3 mCenter, mZDir;
Real mRadius, mZ;
public:
PbArgs _args;
}
@@ -660,7 +655,6 @@ class Slope : public Shape {
Real mAnglexy, mAngleyz;
Real mOrigin;
Vec3 mGs;
public:
PbArgs _args;
}

View File

@@ -199,7 +199,6 @@ class TurbulenceParticleSystem : public ParticleSystem<TurbulenceParticleData> {
private:
WaveletNoiseField &noise;
public:
PbArgs _args;
}

View File

@@ -127,7 +127,6 @@ class VortexParticleSystem : public ParticleSystem<VortexParticleData> {
}
virtual ParticleBase *clone();
public:
PbArgs _args;
}

View File

@@ -240,7 +240,6 @@ class VortexSheetMesh : public Mesh {
VorticityChannel mVorticity;
TexCoord3Channel mTex1, mTex2;
TurbulenceChannel mTurb;
public:
PbArgs _args;
}

View File

@@ -79,15 +79,15 @@ typedef struct CLG_IDFilter {
} CLG_IDFilter;
typedef struct CLogContext {
/** Single linked list of types. */
/** Single linked list of types. */
CLG_LogType *types;
/** Single linked list of references. */
/** Single linked list of references. */
CLG_LogRef *refs;
#ifdef WITH_CLOG_PTHREADS
pthread_mutex_t types_lock;
#endif
/* exclude, include filters. */
/* exclude, include filters. */
CLG_IDFilter *filters[2];
bool use_color;
bool use_basename;
@@ -322,9 +322,7 @@ static bool clg_ctx_filter_check(CLogContext *ctx, const char *identifier)
if (flt->match[0] == '*' && flt->match[len - 1] == '*') {
char *match = MEM_callocN(sizeof(char) * len - 1, __func__);
memcpy(match, flt->match + 1, len - 2);
const bool success = (strstr(identifier, match) != NULL);
MEM_freeN(match);
if (success) {
if (strstr(identifier, match) != NULL) {
return (bool)i;
}
}

View File

@@ -71,16 +71,6 @@ if(WITH_CYCLES_STANDALONE)
target_link_libraries(cycles ${LIBRARIES})
cycles_target_link_libraries(cycles)
if(APPLE)
if(WITH_OPENCOLORIO)
set_property(TARGET cycles APPEND_STRING PROPERTY LINK_FLAGS " -framework IOKit")
endif()
if(WITH_OPENIMAGEDENOISE AND "${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
# OpenImageDenoise uses BNNS from the Accelerate framework.
set_property(TARGET cycles APPEND_STRING PROPERTY LINK_FLAGS " -framework Accelerate")
endif()
endif()
if(UNIX AND NOT APPLE)
set_target_properties(cycles PROPERTIES INSTALL_RPATH $ORIGIN/lib)
endif()

View File

@@ -51,12 +51,12 @@ CCL_NAMESPACE_BEGIN
/* XML reading state */
struct XMLReadState : public XMLReader {
Scene *scene; /* Scene pointer. */
Transform tfm; /* Current transform state. */
bool smooth; /* Smooth normal state. */
Shader *shader; /* Current shader. */
string base; /* Base path to current file. */
float dicing_rate; /* Current dicing rate. */
Scene *scene; /* scene pointer */
Transform tfm; /* current transform state */
bool smooth; /* smooth normal state */
Shader *shader; /* current shader */
string base; /* base path to current file*/
float dicing_rate; /* current dicing rate */
XMLReadState() : scene(NULL), smooth(false), shader(NULL), dicing_rate(1.0f)
{
@@ -385,7 +385,7 @@ static Mesh *xml_add_mesh(Scene *scene, const Transform &tfm)
Mesh *mesh = new Mesh();
scene->geometry.push_back(mesh);
/* Create object. */
/* create object*/
Object *object = new Object();
object->set_geometry(mesh);
object->set_tfm(tfm);

View File

@@ -19,16 +19,16 @@ from __future__ import annotations
def _is_using_buggy_driver():
import gpu
import bgl
# We need to be conservative here because in multi-GPU systems display card
# might be quite old, but others one might be just good.
#
# So We shouldn't disable possible good dedicated cards just because display
# card seems weak. And instead we only blacklist configurations which are
# proven to cause problems.
if gpu.platform.vendor_get() == "ATI Technologies Inc.":
if bgl.glGetString(bgl.GL_VENDOR) == "ATI Technologies Inc.":
import re
version = gpu.platform.version_get()
version = bgl.glGetString(bgl.GL_VERSION)
if version.endswith("Compatibility Profile Context"):
# Old HD 4xxx and 5xxx series drivers did not have driver version
# in the version string, but those cards do not quite work and
@@ -132,7 +132,7 @@ def init():
_workaround_buggy_drivers()
path = os.path.dirname(__file__)
user_path = os.path.dirname(os.path.abspath(bpy.utils.user_resource('CONFIG', path='')))
user_path = os.path.dirname(os.path.abspath(bpy.utils.user_resource('CONFIG', '')))
_cycles.init(path, user_path, bpy.app.background)
_parse_command_line()

View File

@@ -1254,19 +1254,12 @@ class CyclesObjectSettings(bpy.types.PropertyGroup):
)
shadow_terminator_offset: FloatProperty(
name="Shadow Terminator Shading Offset",
name="Shadow Terminator Offset",
description="Push the shadow terminator towards the light to hide artifacts on low poly geometry",
min=0.0, max=1.0,
default=0.0,
)
shadow_terminator_geometry_offset: FloatProperty(
name="Shadow Terminator Geometry Offset",
description="Offset rays from the surface to reduce shadow terminator artifact on low poly geometry. Only affects triangles at grazing angles to light",
min=0.0, max=1.0,
default=0.1,
)
is_shadow_catcher: BoolProperty(
name="Shadow Catcher",
description="Only render shadows on this object, for compositing renders into real footage",

View File

@@ -725,7 +725,7 @@ class CYCLES_RENDER_PT_performance_tiles(CyclesButtonsPanel, Panel):
col.prop(cscene, "tile_order", text="Order")
sub = col.column()
sub.active = not rd.use_save_buffers and not cscene.use_adaptive_sampling
sub.active = not rd.use_save_buffers
sub.prop(cscene, "use_progressive_refine")
@@ -821,11 +821,6 @@ class CYCLES_RENDER_PT_filter(CyclesButtonsPanel, Panel):
col.prop(view_layer, "use_strand", text="Hair")
col.prop(view_layer, "use_volumes", text="Volumes")
col = layout.column(heading="Use")
sub = col.row()
sub.prop(view_layer, "use_motion_blur", text="Motion Blur")
sub.active = rd.use_motion_blur
class CYCLES_RENDER_PT_override(CyclesButtonsPanel, Panel):
bl_label = "Override"
@@ -1223,31 +1218,20 @@ class CYCLES_OBJECT_PT_shading(CyclesButtonsPanel, Panel):
@classmethod
def poll(cls, context):
if not CyclesButtonsPanel.poll(context):
return False
ob = context.object
return ob and has_geometry_visibility(ob)
def draw(self, context):
pass
class CYCLES_OBJECT_PT_shading_shadow_terminator(CyclesButtonsPanel, Panel):
bl_label = "Shadow Terminator"
bl_parent_id = "CYCLES_OBJECT_PT_shading"
bl_context = "object"
return CyclesButtonsPanel.poll(context) and (context.object)
def draw(self, context):
layout = self.layout
layout.use_property_split = True
flow = layout.grid_flow(row_major=False, columns=0, even_columns=True, even_rows=False, align=True)
flow = layout.grid_flow(row_major=False, columns=0, even_columns=True, even_rows=False, align=False)
layout = self.layout
ob = context.object
cob = ob.cycles
flow.prop(cob, "shadow_terminator_geometry_offset", text="Geometry Offset")
flow.prop(cob, "shadow_terminator_offset", text="Shading Offset")
if has_geometry_visibility(ob):
col = flow.column()
col.prop(cob, "shadow_terminator_offset")
class CYCLES_OBJECT_PT_visibility(CyclesButtonsPanel, Panel):
@@ -2327,7 +2311,6 @@ classes = (
CYCLES_PT_context_material,
CYCLES_OBJECT_PT_motion_blur,
CYCLES_OBJECT_PT_shading,
CYCLES_OBJECT_PT_shading_shadow_terminator,
CYCLES_OBJECT_PT_visibility,
CYCLES_OBJECT_PT_visibility_ray_visibility,
CYCLES_OBJECT_PT_visibility_culling,

View File

@@ -83,8 +83,6 @@ struct BlenderCamera {
BoundBox2D pano_viewplane;
BoundBox2D viewport_camera_border;
float passepartout_alpha;
Transform matrix;
float offscreen_dicing_scale;
@@ -127,7 +125,6 @@ static void blender_camera_init(BlenderCamera *bcam, BL::RenderSettings &b_rende
bcam->pano_viewplane.top = 1.0f;
bcam->viewport_camera_border.right = 1.0f;
bcam->viewport_camera_border.top = 1.0f;
bcam->passepartout_alpha = 0.5f;
bcam->offscreen_dicing_scale = 1.0f;
bcam->matrix = transform_identity();
@@ -215,8 +212,6 @@ static void blender_camera_from_object(BlenderCamera *bcam,
bcam->lens = b_camera.lens();
bcam->passepartout_alpha = b_camera.show_passepartout() ? b_camera.passepartout_alpha() : 0.0f;
if (b_camera.dof().use_dof()) {
/* allow f/stop number to change aperture_size but still
* give manual control over aperture radius */
@@ -839,19 +834,15 @@ static void blender_camera_border(BlenderCamera *bcam,
full_border,
&bcam->viewport_camera_border);
if (b_render.use_border()) {
bcam->border.left = b_render.border_min_x();
bcam->border.right = b_render.border_max_x();
bcam->border.bottom = b_render.border_min_y();
bcam->border.top = b_render.border_max_y();
}
else if (bcam->passepartout_alpha == 1.0f) {
bcam->border = full_border;
}
else {
if (!b_render.use_border()) {
return;
}
bcam->border.left = b_render.border_min_x();
bcam->border.right = b_render.border_max_x();
bcam->border.bottom = b_render.border_min_y();
bcam->border.top = b_render.border_max_y();
/* Determine viewport subset matching camera border. */
blender_camera_border_subset(b_engine,
b_render,
@@ -894,7 +885,8 @@ void BlenderSync::sync_view(BL::SpaceView3D &b_v3d,
}
}
BufferParams BlenderSync::get_buffer_params(BL::SpaceView3D &b_v3d,
BufferParams BlenderSync::get_buffer_params(BL::RenderSettings &b_render,
BL::SpaceView3D &b_v3d,
BL::RegionView3D &b_rv3d,
Camera *cam,
int width,
@@ -910,8 +902,7 @@ BufferParams BlenderSync::get_buffer_params(BL::SpaceView3D &b_v3d,
if (b_v3d && b_rv3d && b_rv3d.view_perspective() != BL::RegionView3D::view_perspective_CAMERA)
use_border = b_v3d.use_render_border();
else
/* the camera can always have a passepartout */
use_border = true;
use_border = b_render.use_border();
if (use_border) {
/* border render */

View File

@@ -533,7 +533,7 @@ void BlenderSync::sync_particle_hair(
return;
}
/* Extract particle hair data - should be combined with connecting to mesh later. */
/* extract particle hair data - should be combined with connecting to mesh later*/
ParticleCurveData CData;

View File

@@ -34,17 +34,12 @@ void BlenderSync::sync_light(BL::Object &b_parent,
bool *use_portal)
{
/* test if we need to sync */
Light *light;
ObjectKey key(b_parent, persistent_id, b_ob_instance, false);
BL::Light b_light(b_ob.data());
Light *light = light_map.find(key);
/* Check if the transform was modified, in case a linked collection is moved we do not get a
* specific depsgraph update (T88515). This also mimics the behavior for Objects. */
const bool tfm_updated = (light && light->get_tfm() != tfm);
/* Update if either object or light data changed. */
if (!light_map.add_or_update(&light, b_ob, b_parent, key) && !tfm_updated) {
if (!light_map.add_or_update(&light, b_ob, b_parent, key)) {
Shader *shader;
if (!shader_map.add_or_update(&shader, b_light)) {
if (light->get_is_portal())

View File

@@ -96,49 +96,7 @@ bool BlenderSync::object_is_light(BL::Object &b_ob)
return (b_ob_data && b_ob_data.is_a(&RNA_Light));
}
void BlenderSync::sync_object_motion_init(BL::Object &b_parent, BL::Object &b_ob, Object *object)
{
/* Initialize motion blur for object, detecting if it's enabled and creating motion
* steps array if so. */
array<Transform> motion;
object->set_motion(motion);
Scene::MotionType need_motion = scene->need_motion();
if (need_motion == Scene::MOTION_NONE || !object->get_geometry()) {
return;
}
Geometry *geom = object->get_geometry();
geom->set_use_motion_blur(false);
geom->set_motion_steps(0);
uint motion_steps;
if (need_motion == Scene::MOTION_BLUR) {
motion_steps = object_motion_steps(b_parent, b_ob, Object::MAX_MOTION_STEPS);
geom->set_motion_steps(motion_steps);
if (motion_steps && object_use_deform_motion(b_parent, b_ob)) {
geom->set_use_motion_blur(true);
}
}
else {
motion_steps = 3;
geom->set_motion_steps(motion_steps);
}
motion.resize(motion_steps, transform_empty());
if (motion_steps) {
motion[motion_steps / 2] = object->get_tfm();
/* update motion socket before trying to access object->motion_time */
object->set_motion(motion);
for (size_t step = 0; step < motion_steps; step++) {
motion_times.insert(object->motion_time(step));
}
}
}
/* Object */
Object *BlenderSync::sync_object(BL::Depsgraph &b_depsgraph,
BL::ViewLayer &b_view_layer,
@@ -261,8 +219,10 @@ Object *BlenderSync::sync_object(BL::Depsgraph &b_depsgraph,
}
/* test if we need to sync */
bool object_updated = object_map.add_or_update(&object, b_ob, b_parent, key) ||
(tfm != object->get_tfm());
bool object_updated = false;
if (object_map.add_or_update(&object, b_ob, b_parent, key))
object_updated = true;
/* mesh sync */
/* b_ob is owned by the iterator and will go out of scope at the end of the block.
@@ -290,12 +250,8 @@ Object *BlenderSync::sync_object(BL::Depsgraph &b_depsgraph,
bool is_shadow_catcher = get_boolean(cobject, "is_shadow_catcher");
object->set_is_shadow_catcher(is_shadow_catcher);
float shadow_terminator_shading_offset = get_float(cobject, "shadow_terminator_offset");
object->set_shadow_terminator_shading_offset(shadow_terminator_shading_offset);
float shadow_terminator_geometry_offset = get_float(cobject,
"shadow_terminator_geometry_offset");
object->set_shadow_terminator_geometry_offset(shadow_terminator_geometry_offset);
float shadow_terminator_offset = get_float(cobject, "shadow_terminator_offset");
object->set_shadow_terminator_offset(shadow_terminator_offset);
/* sync the asset name for Cryptomatte */
BL::Object parent = b_ob.parent();
@@ -315,11 +271,49 @@ Object *BlenderSync::sync_object(BL::Depsgraph &b_depsgraph,
* transform comparison should not be needed, but duplis don't work perfect
* in the depsgraph and may not signal changes, so this is a workaround */
if (object->is_modified() || object_updated ||
(object->get_geometry() && object->get_geometry()->is_modified())) {
(object->get_geometry() && object->get_geometry()->is_modified()) ||
tfm != object->get_tfm()) {
object->name = b_ob.name().c_str();
object->set_pass_id(b_ob.pass_index());
object->set_color(get_float3(b_ob.color()));
object->set_tfm(tfm);
array<Transform> motion;
object->set_motion(motion);
/* motion blur */
Scene::MotionType need_motion = scene->need_motion();
if (need_motion != Scene::MOTION_NONE && object->get_geometry()) {
Geometry *geom = object->get_geometry();
geom->set_use_motion_blur(false);
geom->set_motion_steps(0);
uint motion_steps;
if (need_motion == Scene::MOTION_BLUR) {
motion_steps = object_motion_steps(b_parent, b_ob, Object::MAX_MOTION_STEPS);
geom->set_motion_steps(motion_steps);
if (motion_steps && object_use_deform_motion(b_parent, b_ob)) {
geom->set_use_motion_blur(true);
}
}
else {
motion_steps = 3;
geom->set_motion_steps(motion_steps);
}
motion.resize(motion_steps, transform_empty());
if (motion_steps) {
motion[motion_steps / 2] = tfm;
/* update motion socket before trying to access object->motion_time */
object->set_motion(motion);
for (size_t step = 0; step < motion_steps; step++) {
motion_times.insert(object->motion_time(step));
}
}
}
/* dupli texture coordinates and random_id */
if (is_instance) {
@@ -337,8 +331,6 @@ Object *BlenderSync::sync_object(BL::Depsgraph &b_depsgraph,
object->tag_update(scene);
}
sync_object_motion_init(b_parent, b_ob, object);
if (is_instance) {
/* Sync possible particle data. */
sync_dupli_particle(b_parent, b_instance, object);
@@ -621,7 +613,7 @@ void BlenderSync::sync_motion(BL::RenderSettings &b_render,
if (b_cam) {
sync_camera_motion(b_render, b_cam, width, height, 0.0f);
}
sync_objects(b_depsgraph, b_v3d);
sync_objects(b_depsgraph, b_v3d, 0.0f);
}
/* Insert motion times from camera. Motion times from other objects

View File

@@ -35,7 +35,6 @@
#include "util/util_path.h"
#include "util/util_string.h"
#include "util/util_task.h"
#include "util/util_tbb.h"
#include "util/util_types.h"
#ifdef WITH_OSL
@@ -289,7 +288,6 @@ static PyObject *render_func(PyObject * /*self*/, PyObject *args)
RNA_pointer_create(NULL, &RNA_Depsgraph, (ID *)PyLong_AsVoidPtr(pydepsgraph), &depsgraphptr);
BL::Depsgraph b_depsgraph(depsgraphptr);
/* Allow Blender to execute other Python scripts. */
python_thread_state_save(&session->python_thread_state);
session->render(b_depsgraph);

View File

@@ -155,7 +155,7 @@ void BlenderSession::create_session()
/* set buffer parameters */
BufferParams buffer_params = BlenderSync::get_buffer_params(
b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
b_render, b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
session->reset(buffer_params, session_params.samples);
b_engine.use_highlight_tiles(session_params.progressive_refine == false);
@@ -237,12 +237,10 @@ void BlenderSession::reset_session(BL::BlendData &b_data, BL::Depsgraph &b_depsg
sync->sync_recalc(b_depsgraph, b_v3d);
}
BL::Object b_camera_override(b_engine.camera_override());
sync->sync_camera(b_render, b_camera_override, width, height, "");
BL::SpaceView3D b_null_space_view3d(PointerRNA_NULL);
BL::RegionView3D b_null_region_view3d(PointerRNA_NULL);
BufferParams buffer_params = BlenderSync::get_buffer_params(b_null_space_view3d,
BufferParams buffer_params = BlenderSync::get_buffer_params(b_render,
b_null_space_view3d,
b_null_region_view3d,
scene->camera,
width,
@@ -485,7 +483,7 @@ void BlenderSession::render(BL::Depsgraph &b_depsgraph_)
SessionParams session_params = BlenderSync::get_session_params(
b_engine, b_userpref, b_scene, background, b_view_layer);
BufferParams buffer_params = BlenderSync::get_buffer_params(
b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
b_render, b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
/* temporary render result to find needed passes and views */
BL::RenderResult b_rr = begin_render_result(
@@ -809,7 +807,7 @@ void BlenderSession::synchronize(BL::Depsgraph &b_depsgraph_)
/* get buffer parameters */
BufferParams buffer_params = BlenderSync::get_buffer_params(
b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
b_render, b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
if (!buffer_params.denoising_data_pass) {
session_params.denoising.use = false;
@@ -888,7 +886,7 @@ bool BlenderSession::draw(int w, int h)
SessionParams session_params = BlenderSync::get_session_params(
b_engine, b_userpref, b_scene, background);
BufferParams buffer_params = BlenderSync::get_buffer_params(
b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
b_render, b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
bool session_pause = BlenderSync::get_session_pause(b_scene, background);
if (session_pause == false) {
@@ -906,7 +904,7 @@ bool BlenderSession::draw(int w, int h)
/* draw */
BufferParams buffer_params = BlenderSync::get_buffer_params(
b_v3d, b_rv3d, scene->camera, width, height, session->params.denoising.use);
b_render, b_v3d, b_rv3d, scene->camera, width, height, session->params.denoising.use);
DeviceDrawParams draw_params;
if (session->params.display_buffer_linear) {

View File

@@ -1373,7 +1373,7 @@ void BlenderSync::sync_world(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d,
BlenderViewportParameters new_viewport_parameters(b_v3d);
if (world_recalc || update_all || b_world.ptr.data != world_map ||
viewport_parameters.shader_modified(new_viewport_parameters)) {
viewport_parameters.modified(new_viewport_parameters)) {
Shader *shader = scene->default_background;
ShaderGraph *graph = new ShaderGraph();
@@ -1501,8 +1501,8 @@ void BlenderSync::sync_world(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d,
background->set_transparent_roughness_threshold(0.0f);
}
background->set_use_shader(view_layer.use_background_shader ||
viewport_parameters.use_custom_shader());
background->set_use_shader(view_layer.use_background_shader |
viewport_parameters.custom_viewport_parameters());
background->set_use_ao(background->get_use_ao() && view_layer.use_background_ao);
background->tag_update(scene);
@@ -1553,9 +1553,13 @@ void BlenderSync::sync_lights(BL::Depsgraph &b_depsgraph, bool update_all)
void BlenderSync::sync_shaders(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d)
{
/* for auto refresh images */
ImageManager *image_manager = scene->image_manager;
const int frame = b_scene.frame_current();
const bool auto_refresh_update = image_manager->set_animation_frame_update(frame);
bool auto_refresh_update = false;
if (preview) {
ImageManager *image_manager = scene->image_manager;
int frame = b_scene.frame_current();
auto_refresh_update = image_manager->set_animation_frame_update(frame);
}
shader_map.pre_sync();

View File

@@ -69,8 +69,7 @@ BlenderSync::BlenderSync(BL::RenderEngine &b_engine,
experimental(false),
dicing_rate(1.0f),
max_subdivisions(12),
progress(progress),
has_updates_(true)
progress(progress)
{
PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
dicing_rate = preview ? RNA_float_get(&cscene, "preview_dicing_rate") :
@@ -85,9 +84,7 @@ BlenderSync::~BlenderSync()
void BlenderSync::reset(BL::BlendData &b_data, BL::Scene &b_scene)
{
/* Update data and scene pointers in case they change in session reset,
* for example after undo.
* Note that we do not modify the `has_updates_` flag here because the sync
* reset is also used during viewport navigation. */
* for example after undo. */
this->b_data = b_data;
this->b_scene = b_scene;
}
@@ -120,8 +117,6 @@ void BlenderSync::sync_recalc(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d
}
if (dicing_prop_changed) {
has_updates_ = true;
for (const pair<const GeometryKey, Geometry *> &iter : geometry_map.key_to_scene_data()) {
Geometry *geom = iter.second;
if (geom->is_mesh()) {
@@ -138,12 +133,6 @@ void BlenderSync::sync_recalc(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d
/* Iterate over all IDs in this depsgraph. */
for (BL::DepsgraphUpdate &b_update : b_depsgraph.updates) {
/* TODO(sergey): Can do more selective filter here. For example, ignore changes made to
* screen datablock. Note that sync_data() needs to be called after object deletion, and
* currently this is ensured by the scene ID tagged for update, which sets the `has_updates_`
* flag. */
has_updates_ = true;
BL::ID b_id(b_update.id());
/* Material */
@@ -224,13 +213,9 @@ void BlenderSync::sync_recalc(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d
if (b_v3d) {
BlenderViewportParameters new_viewport_parameters(b_v3d);
if (viewport_parameters.shader_modified(new_viewport_parameters)) {
if (viewport_parameters.modified(new_viewport_parameters)) {
world_recalc = true;
has_updates_ = true;
}
has_updates_ |= viewport_parameters.modified(new_viewport_parameters);
}
}
@@ -242,15 +227,11 @@ void BlenderSync::sync_data(BL::RenderSettings &b_render,
int height,
void **python_thread_state)
{
if (!has_updates_) {
return;
}
scoped_timer timer;
BL::ViewLayer b_view_layer = b_depsgraph.view_layer_eval();
sync_view_layer(b_view_layer);
sync_view_layer(b_v3d, b_view_layer);
sync_integrator();
sync_film(b_v3d);
sync_shaders(b_depsgraph, b_v3d);
@@ -273,14 +254,13 @@ void BlenderSync::sync_data(BL::RenderSettings &b_render,
free_data_after_sync(b_depsgraph);
VLOG(1) << "Total time spent synchronizing data: " << timer.get_time();
has_updates_ = false;
}
/* Integrator */
void BlenderSync::sync_integrator()
{
BL::RenderSettings r = b_scene.render();
PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
experimental = (get_enum(cscene, "feature_set") != 0);
@@ -324,7 +304,7 @@ void BlenderSync::sync_integrator()
integrator->set_sample_clamp_direct(get_float(cscene, "sample_clamp_direct"));
integrator->set_sample_clamp_indirect(get_float(cscene, "sample_clamp_indirect"));
if (!preview) {
integrator->set_motion_blur(view_layer.use_motion_blur);
integrator->set_motion_blur(r.use_motion_blur());
}
integrator->set_method((Integrator::Method)get_enum(
@@ -444,7 +424,7 @@ void BlenderSync::sync_film(BL::SpaceView3D &b_v3d)
/* Render Layer */
void BlenderSync::sync_view_layer(BL::ViewLayer &b_view_layer)
void BlenderSync::sync_view_layer(BL::SpaceView3D & /*b_v3d*/, BL::ViewLayer &b_view_layer)
{
view_layer.name = b_view_layer.name();
@@ -455,8 +435,6 @@ void BlenderSync::sync_view_layer(BL::ViewLayer &b_view_layer)
view_layer.use_surfaces = b_view_layer.use_solid() || scene->bake_manager->get_baking();
view_layer.use_hair = b_view_layer.use_strand();
view_layer.use_volumes = b_view_layer.use_volumes();
view_layer.use_motion_blur = b_view_layer.use_motion_blur() &&
b_scene.render().use_motion_blur();
/* Material override. */
view_layer.material_override = b_view_layer.material_override();
@@ -603,10 +581,8 @@ vector<Pass> BlenderSync::sync_render_passes(BL::Scene &b_scene,
for (BL::RenderPass &b_pass : b_rlay.passes) {
PassType pass_type = get_pass_type(b_pass);
if (pass_type == PASS_MOTION &&
(b_view_layer.use_motion_blur() && b_scene.render().use_motion_blur())) {
if (pass_type == PASS_MOTION && b_scene.render().use_motion_blur())
continue;
}
if (pass_type != PASS_NONE)
Pass::add(pass_type, passes, b_pass.name().c_str());
}
@@ -763,18 +739,12 @@ void BlenderSync::free_data_after_sync(BL::Depsgraph &b_depsgraph)
* caches to be releases from blender side in order to reduce peak memory
* footprint during synchronization process.
*/
const bool is_interface_locked = b_engine.render() && b_engine.render().use_lock_interface();
const bool is_persistent_data = b_engine.render() && b_engine.render().use_persistent_data();
const bool can_free_caches =
(BlenderSession::headless || is_interface_locked) &&
/* Baking re-uses the depsgraph multiple times, clearing crashes
* reading un-evaluated mesh data which isn't aligned with the
* geometry we're baking, see T71012. */
!scene->bake_manager->get_baking() &&
/* Persistent data must main caches for performance and correctness. */
!is_persistent_data;
const bool can_free_caches = (BlenderSession::headless || is_interface_locked) &&
/* Baking re-uses the depsgraph multiple times, clearing crashes
* reading un-evaluated mesh data which isn't aligned with the
* geometry we're baking, see T71012. */
!scene->bake_manager->get_baking();
if (!can_free_caches) {
return;
}
@@ -899,9 +869,6 @@ SessionParams BlenderSync::get_session_params(BL::RenderEngine &b_engine,
/* Clamp samples. */
params.samples = min(params.samples, Integrator::MAX_SAMPLES);
/* Adaptive sampling. */
params.adaptive_sampling = RNA_boolean_get(&cscene, "use_adaptive_sampling");
/* tiles */
const bool is_cpu = (params.device.type == DEVICE_CPU);
if (!is_cpu && !background) {
@@ -954,7 +921,7 @@ SessionParams BlenderSync::get_session_params(BL::RenderEngine &b_engine,
BL::RenderSettings b_r = b_scene.render();
params.progressive_refine = b_engine.is_preview() ||
get_boolean(cscene, "use_progressive_refine");
if (b_r.use_save_buffers() || params.adaptive_sampling)
if (b_r.use_save_buffers())
params.progressive_refine = false;
if (background) {
@@ -990,6 +957,8 @@ SessionParams BlenderSync::get_session_params(BL::RenderEngine &b_engine,
params.use_profiling = params.device.has_profiling && !b_engine.is_preview() && background &&
BlenderSession::print_render_stats;
params.adaptive_sampling = RNA_boolean_get(&cscene, "use_adaptive_sampling");
return params;
}

Some files were not shown because too many files have changed in this diff Show More