Compare commits
1 Commits
temp-compa
...
temp-ffmpe
Author | SHA1 | Date | |
---|---|---|---|
ead360995a |
@@ -43,12 +43,6 @@ endif()
|
||||
|
||||
if(WIN32)
|
||||
set(EMBREE_BUILD_DIR ${BUILD_MODE}/)
|
||||
if(BUILD_MODE STREQUAL Debug)
|
||||
list(APPEND EMBREE_EXTRA_ARGS
|
||||
-DEMBREE_TBBMALLOC_LIBRARY_NAME=tbbmalloc_debug
|
||||
-DEMBREE_TBB_LIBRARY_NAME=tbb_debug
|
||||
)
|
||||
endif()
|
||||
else()
|
||||
set(EMBREE_BUILD_DIR)
|
||||
endif()
|
||||
|
@@ -22,7 +22,6 @@ if(WIN32)
|
||||
-DTBB_BUILD_TBBMALLOC_PROXY=On
|
||||
-DTBB_BUILD_STATIC=Off
|
||||
-DTBB_BUILD_TESTS=Off
|
||||
-DCMAKE_DEBUG_POSTFIX=_debug
|
||||
)
|
||||
set(TBB_LIBRARY tbb)
|
||||
set(TBB_STATIC_LIBRARY Off)
|
||||
@@ -56,17 +55,17 @@ if(WIN32)
|
||||
ExternalProject_Add_Step(external_tbb after_install
|
||||
# findtbb.cmake in some deps *NEEDS* to find tbb_debug.lib even if they are not going to use it
|
||||
# to make that test pass, we place a copy with the right name in the lib folder.
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${LIBDIR}/tbb/lib/tbb_debug.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb.dll ${LIBDIR}/tbb/bin/tbb_debug.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc.dll ${LIBDIR}/tbb/bin/tbbmalloc_debug.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${HARVEST_TARGET}/tbb/lib/tbb_debug.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.dll ${HARVEST_TARGET}/tbb/lib/tbb_debug.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.dll
|
||||
# Normal collection of build artifacts
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${HARVEST_TARGET}/tbb/lib/tbb.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb.dll ${HARVEST_TARGET}/tbb/bin/tbb.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.dll ${HARVEST_TARGET}/tbb/lib/tbb.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_proxy.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/tbb/include/ ${HARVEST_TARGET}/tbb/include/
|
||||
DEPENDEES install
|
||||
)
|
||||
@@ -77,12 +76,11 @@ if(WIN32)
|
||||
# to make that test pass, we place a copy with the right name in the lib folder.
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${LIBDIR}/tbb/lib/tbb.lib
|
||||
# Normal collection of build artifacts
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${HARVEST_TARGET}/tbb/lib/tbb_debug.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb_debug.dll ${HARVEST_TARGET}/tbb/bin/tbb_debug.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy_debug.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy_debug.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_debug.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_debug.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_proxy_debug.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_proxy_debug.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${HARVEST_TARGET}/tbb/lib/debug/tbb_debug.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.dll ${HARVEST_TARGET}/tbb/lib/debug/tbb_debug.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy_debug.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/debug/tbbmalloc.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/lib/debug/tbbmalloc_proxy.dll
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
|
@@ -43,7 +43,7 @@ set(JPEG_FILE libjpeg-turbo-${JPEG_VERSION}.tar.gz)
|
||||
set(BOOST_VERSION 1.73.0)
|
||||
set(BOOST_VERSION_NODOTS 1_73_0)
|
||||
set(BOOST_VERSION_NODOTS_SHORT 1_73)
|
||||
set(BOOST_URI https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz)
|
||||
set(BOOST_URI https://dl.bintray.com/boostorg/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz)
|
||||
set(BOOST_HASH 4036cd27ef7548b8d29c30ea10956196)
|
||||
set(BOOST_HASH_TYPE MD5)
|
||||
set(BOOST_FILE boost_${BOOST_VERSION_NODOTS}.tar.gz)
|
||||
@@ -432,9 +432,9 @@ set(USD_HASH 1dd1e2092d085ed393c1f7c450a4155a)
|
||||
set(USD_HASH_TYPE MD5)
|
||||
set(USD_FILE usd-v${USD_VERSION}.tar.gz)
|
||||
|
||||
set(OIDN_VERSION 1.4.0)
|
||||
set(OIDN_VERSION 1.3.0)
|
||||
set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz)
|
||||
set(OIDN_HASH 421824019becc5b664a22a2b98332bc5)
|
||||
set(OIDN_HASH 301a5a0958d375a942014df0679b9270)
|
||||
set(OIDN_HASH_TYPE MD5)
|
||||
set(OIDN_FILE oidn-${OIDN_VERSION}.src.tar.gz)
|
||||
|
||||
|
@@ -553,10 +553,10 @@ EMBREE_FORCE_BUILD=false
|
||||
EMBREE_FORCE_REBUILD=false
|
||||
EMBREE_SKIP=false
|
||||
|
||||
OIDN_VERSION="1.4.0"
|
||||
OIDN_VERSION_SHORT="1.4"
|
||||
OIDN_VERSION_MIN="1.4.0"
|
||||
OIDN_VERSION_MAX="1.5"
|
||||
OIDN_VERSION="1.3.0"
|
||||
OIDN_VERSION_SHORT="1.3"
|
||||
OIDN_VERSION_MIN="1.3.0"
|
||||
OIDN_VERSION_MAX="1.4"
|
||||
OIDN_FORCE_BUILD=false
|
||||
OIDN_FORCE_REBUILD=false
|
||||
OIDN_SKIP=false
|
||||
|
@@ -1,3 +1,33 @@
|
||||
diff -Naur oidn-1.3.0/cmake/FindTBB.cmake external_openimagedenoise/cmake/FindTBB.cmake
|
||||
--- oidn-1.3.0/cmake/FindTBB.cmake 2021-02-04 16:20:26 -0700
|
||||
+++ external_openimagedenoise/cmake/FindTBB.cmake 2021-02-12 09:35:53 -0700
|
||||
@@ -332,20 +332,22 @@
|
||||
${TBB_ROOT}/lib/${TBB_ARCH}/${TBB_VCVER}
|
||||
${TBB_ROOT}/lib
|
||||
)
|
||||
-
|
||||
# On Windows, also search the DLL so that the client may install it.
|
||||
file(GLOB DLL_NAMES
|
||||
${TBB_ROOT}/bin/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/bin/${LIB_NAME}.dll
|
||||
+ ${TBB_ROOT}/lib/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB1}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB2}.dll
|
||||
${TBB_ROOT}/../redist/${TBB_ARCH}/tbb/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/../redist/${TBB_ARCH}_win/tbb/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
)
|
||||
- list(GET DLL_NAMES 0 DLL_NAME)
|
||||
- get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
|
||||
- set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
|
||||
+ if (DLL_NAMES)
|
||||
+ list(GET DLL_NAMES 0 DLL_NAME)
|
||||
+ get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
|
||||
+ set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
|
||||
+ endif()
|
||||
elseif(APPLE)
|
||||
set(LIB_PATHS ${TBB_ROOT}/lib)
|
||||
else()
|
||||
--- external_openimagedenoise/cmake/oidn_ispc.cmake 2021-02-15 17:29:34.000000000 +0100
|
||||
+++ external_openimagedenoise/cmake/oidn_ispc.cmake2 2021-02-15 17:29:28.000000000 +0100
|
||||
@@ -98,7 +98,7 @@
|
||||
|
@@ -1,4 +1,70 @@
|
||||
Buildbot Configuration
|
||||
=====================
|
||||
Blender Buildbot
|
||||
================
|
||||
|
||||
Files used by Buildbot's `compile-code` step.
|
||||
Code signing
|
||||
------------
|
||||
|
||||
Code signing is done as part of INSTALL target, which makes it possible to sign
|
||||
files which are aimed into a bundle and coming from a non-signed source (such as
|
||||
libraries SVN).
|
||||
|
||||
This is achieved by specifying `worker_codesign.cmake` as a post-install script
|
||||
run by CMake. This CMake script simply involves an utility script written in
|
||||
Python which takes care of an actual signing.
|
||||
|
||||
### Configuration
|
||||
|
||||
Client configuration doesn't need anything special, other than variable
|
||||
`SHARED_STORAGE_DIR` pointing to a location which is watched by a server.
|
||||
This is done in `config_builder.py` file and is stored in Git (which makes it
|
||||
possible to have almost zero-configuration buildbot machines).
|
||||
|
||||
Server configuration requires copying `config_server_template.py` under the
|
||||
name of `config_server.py` and tweaking values, which are platform-specific.
|
||||
|
||||
#### Windows configuration
|
||||
|
||||
There are two things which are needed on Windows in order to have code signing
|
||||
to work:
|
||||
|
||||
- `TIMESTAMP_AUTHORITY_URL` which is most likely set http://timestamp.digicert.com
|
||||
- `CERTIFICATE_FILEPATH` which is a full file path to a PKCS #12 key (.pfx).
|
||||
|
||||
## Tips
|
||||
|
||||
### Self-signed certificate on Windows
|
||||
|
||||
It is easiest to test configuration using self-signed certificate.
|
||||
|
||||
The certificate manipulation utilities are coming with Windows SDK.
|
||||
Unfortunately, they are not added to PATH. Here is an example of how to make
|
||||
sure they are easily available:
|
||||
|
||||
```
|
||||
set PATH=C:\Program Files (x86)\Windows Kits\10\App Certification Kit;%PATH%
|
||||
set PATH=C:\Program Files (x86)\Windows Kits\10\bin\10.0.18362.0\x64;%PATH%
|
||||
```
|
||||
|
||||
Generate CA:
|
||||
|
||||
```
|
||||
makecert -r -pe -n "CN=Blender Test CA" -ss CA -sr CurrentUser -a sha256 ^
|
||||
-cy authority -sky signature -sv BlenderTestCA.pvk BlenderTestCA.cer
|
||||
```
|
||||
|
||||
Import the generated CA:
|
||||
|
||||
```
|
||||
certutil -user -addstore Root BlenderTestCA.cer
|
||||
```
|
||||
|
||||
Create self-signed certificate and pack it into PKCS #12:
|
||||
|
||||
```
|
||||
makecert -pe -n "CN=Blender Test SPC" -a sha256 -cy end ^
|
||||
-sky signature ^
|
||||
-ic BlenderTestCA.cer -iv BlenderTestCA.pvk ^
|
||||
-sv BlenderTestSPC.pvk BlenderTestSPC.cer
|
||||
|
||||
pvk2pfx -pvk BlenderTestSPC.pvk -spc BlenderTestSPC.cer -pfx BlenderTestSPC.pfx
|
||||
```
|
127
build_files/buildbot/buildbot_utils.py
Normal file
127
build_files/buildbot/buildbot_utils.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def is_tool(name):
|
||||
"""Check whether `name` is on PATH and marked as executable."""
|
||||
|
||||
# from whichcraft import which
|
||||
from shutil import which
|
||||
|
||||
return which(name) is not None
|
||||
|
||||
|
||||
class Builder:
|
||||
def __init__(self, name, branch, codesign):
|
||||
self.name = name
|
||||
self.branch = branch
|
||||
self.is_release_branch = re.match("^blender-v(.*)-release$", branch) is not None
|
||||
self.codesign = codesign
|
||||
|
||||
# Buildbot runs from build/ directory
|
||||
self.blender_dir = os.path.abspath(os.path.join('..', 'blender.git'))
|
||||
self.build_dir = os.path.abspath(os.path.join('..', 'build'))
|
||||
self.install_dir = os.path.abspath(os.path.join('..', 'install'))
|
||||
self.upload_dir = os.path.abspath(os.path.join('..', 'install'))
|
||||
|
||||
# Detect platform
|
||||
if name.startswith('mac'):
|
||||
self.platform = 'mac'
|
||||
self.command_prefix = []
|
||||
elif name.startswith('linux'):
|
||||
self.platform = 'linux'
|
||||
if is_tool('scl'):
|
||||
self.command_prefix = ['scl', 'enable', 'devtoolset-9', '--']
|
||||
else:
|
||||
self.command_prefix = []
|
||||
elif name.startswith('win'):
|
||||
self.platform = 'win'
|
||||
self.command_prefix = []
|
||||
else:
|
||||
raise ValueError('Unkonw platform for builder ' + self.platform)
|
||||
|
||||
# Always 64 bit now
|
||||
self.bits = 64
|
||||
|
||||
|
||||
def create_builder_from_arguments():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('builder_name')
|
||||
parser.add_argument('branch', default='master', nargs='?')
|
||||
parser.add_argument("--codesign", action="store_true")
|
||||
args = parser.parse_args()
|
||||
return Builder(args.builder_name, args.branch, args.codesign)
|
||||
|
||||
|
||||
class VersionInfo:
|
||||
def __init__(self, builder):
|
||||
# Get version information
|
||||
buildinfo_h = os.path.join(builder.build_dir, "source", "creator", "buildinfo.h")
|
||||
blender_h = os.path.join(builder.blender_dir, "source", "blender", "blenkernel", "BKE_blender_version.h")
|
||||
|
||||
version_number = int(self._parse_header_file(blender_h, 'BLENDER_VERSION'))
|
||||
version_number_patch = int(self._parse_header_file(blender_h, 'BLENDER_VERSION_PATCH'))
|
||||
version_numbers = (version_number // 100, version_number % 100, version_number_patch)
|
||||
self.short_version = "%d.%d" % (version_numbers[0], version_numbers[1])
|
||||
self.version = "%d.%d.%d" % version_numbers
|
||||
self.version_cycle = self._parse_header_file(blender_h, 'BLENDER_VERSION_CYCLE')
|
||||
self.hash = self._parse_header_file(buildinfo_h, 'BUILD_HASH')[1:-1]
|
||||
|
||||
if self.version_cycle == "release":
|
||||
# Final release
|
||||
self.full_version = self.version
|
||||
self.is_development_build = False
|
||||
elif self.version_cycle == "rc":
|
||||
# Release candidate
|
||||
self.full_version = self.version + self.version_cycle
|
||||
self.is_development_build = False
|
||||
else:
|
||||
# Development build
|
||||
self.full_version = self.version + '-' + self.hash
|
||||
self.is_development_build = True
|
||||
|
||||
def _parse_header_file(self, filename, define):
|
||||
import re
|
||||
regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define)
|
||||
with open(filename, "r") as file:
|
||||
for l in file:
|
||||
match = regex.match(l)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
|
||||
|
||||
def call(cmd, env=None, exit_on_error=True):
|
||||
print(' '.join(cmd))
|
||||
|
||||
# Flush to ensure correct order output on Windows.
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
retcode = subprocess.call(cmd, env=env)
|
||||
if exit_on_error and retcode != 0:
|
||||
sys.exit(retcode)
|
||||
return retcode
|
@@ -0,0 +1,81 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
|
||||
@dataclass
|
||||
class AbsoluteAndRelativeFileName:
|
||||
"""
|
||||
Helper class which keeps track of absolute file path for a direct access and
|
||||
corresponding relative path against given base.
|
||||
|
||||
The relative part is used to construct a file name within an archive which
|
||||
contains files which are to be signed or which has been signed already
|
||||
(depending on whether the archive is addressed to signing server or back
|
||||
to the buildbot worker).
|
||||
"""
|
||||
|
||||
# Base directory which is where relative_filepath is relative to.
|
||||
base_dir: Path
|
||||
|
||||
# Full absolute path of the corresponding file.
|
||||
absolute_filepath: Path
|
||||
|
||||
# Derived from full file path, contains part of the path which is relative
|
||||
# to a desired base path.
|
||||
relative_filepath: Path
|
||||
|
||||
def __init__(self, base_dir: Path, filepath: Path):
|
||||
self.base_dir = base_dir
|
||||
self.absolute_filepath = filepath.resolve()
|
||||
self.relative_filepath = self.absolute_filepath.relative_to(
|
||||
self.base_dir)
|
||||
|
||||
@classmethod
|
||||
def from_path(cls, path: Path) -> 'AbsoluteAndRelativeFileName':
|
||||
assert path.is_absolute()
|
||||
assert path.is_file()
|
||||
|
||||
base_dir = path.parent
|
||||
return AbsoluteAndRelativeFileName(base_dir, path)
|
||||
|
||||
@classmethod
|
||||
def recursively_from_directory(cls, base_dir: Path) \
|
||||
-> List['AbsoluteAndRelativeFileName']:
|
||||
"""
|
||||
Create list of AbsoluteAndRelativeFileName for all the files in the
|
||||
given directory.
|
||||
|
||||
NOTE: Result will be pointing to a resolved paths.
|
||||
"""
|
||||
assert base_dir.is_absolute()
|
||||
assert base_dir.is_dir()
|
||||
|
||||
base_dir = base_dir.resolve()
|
||||
|
||||
result = []
|
||||
for filename in base_dir.glob('**/*'):
|
||||
if not filename.is_file():
|
||||
continue
|
||||
result.append(AbsoluteAndRelativeFileName(base_dir, filename))
|
||||
return result
|
245
build_files/buildbot/codesign/archive_with_indicator.py
Normal file
245
build_files/buildbot/codesign/archive_with_indicator.py
Normal file
@@ -0,0 +1,245 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import dataclasses
|
||||
import json
|
||||
import os
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import codesign.util as util
|
||||
|
||||
|
||||
class ArchiveStateError(Exception):
|
||||
message: str
|
||||
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ArchiveState:
|
||||
"""
|
||||
Additional information (state) of the archive
|
||||
|
||||
Includes information like expected file size of the archive file in the case
|
||||
the archive file is expected to be successfully created.
|
||||
|
||||
If the archive can not be created, this state will contain error message
|
||||
indicating details of error.
|
||||
"""
|
||||
|
||||
# Size in bytes of the corresponding archive.
|
||||
file_size: Optional[int] = None
|
||||
|
||||
# Non-empty value indicates that error has happenned.
|
||||
error_message: str = ''
|
||||
|
||||
def has_error(self) -> bool:
|
||||
"""
|
||||
Check whether the archive is at error state
|
||||
"""
|
||||
|
||||
return self.error_message
|
||||
|
||||
def serialize_to_string(self) -> str:
|
||||
payload = dataclasses.asdict(self)
|
||||
return json.dumps(payload, sort_keys=True, indent=4)
|
||||
|
||||
def serialize_to_file(self, filepath: Path) -> None:
|
||||
string = self.serialize_to_string()
|
||||
filepath.write_text(string)
|
||||
|
||||
@classmethod
|
||||
def deserialize_from_string(cls, string: str) -> 'ArchiveState':
|
||||
try:
|
||||
object_as_dict = json.loads(string)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise ArchiveStateError('Error parsing JSON')
|
||||
|
||||
return cls(**object_as_dict)
|
||||
|
||||
@classmethod
|
||||
def deserialize_from_file(cls, filepath: Path):
|
||||
string = filepath.read_text()
|
||||
return cls.deserialize_from_string(string)
|
||||
|
||||
|
||||
class ArchiveWithIndicator:
|
||||
"""
|
||||
The idea of this class is to wrap around logic which takes care of keeping
|
||||
track of a name of an archive and synchronization routines between buildbot
|
||||
worker and signing server.
|
||||
|
||||
The synchronization is done based on creating a special file after the
|
||||
archive file is knowingly ready for access.
|
||||
"""
|
||||
|
||||
# Base directory where the archive is stored (basically, a basename() of
|
||||
# the absolute archive file name).
|
||||
#
|
||||
# For example, 'X:\\TEMP\\'.
|
||||
base_dir: Path
|
||||
|
||||
# Absolute file name of the archive.
|
||||
#
|
||||
# For example, 'X:\\TEMP\\FOO.ZIP'.
|
||||
archive_filepath: Path
|
||||
|
||||
# Absolute name of a file which acts as an indication of the fact that the
|
||||
# archive is ready and is available for access.
|
||||
#
|
||||
# This is how synchronization between buildbot worker and signing server is
|
||||
# done:
|
||||
# - First, the archive is created under archive_filepath name.
|
||||
# - Second, the indication file is created under ready_indicator_filepath
|
||||
# name.
|
||||
# - Third, the colleague of whoever created the indicator name watches for
|
||||
# the indication file to appear, and once it's there it access the
|
||||
# archive.
|
||||
ready_indicator_filepath: Path
|
||||
|
||||
def __init__(
|
||||
self, base_dir: Path, archive_name: str, ready_indicator_name: str):
|
||||
"""
|
||||
Construct the object from given base directory and name of the archive
|
||||
file:
|
||||
ArchiveWithIndicator(Path('X:\\TEMP'), 'FOO.ZIP', 'INPUT_READY')
|
||||
"""
|
||||
|
||||
self.base_dir = base_dir
|
||||
self.archive_filepath = self.base_dir / archive_name
|
||||
self.ready_indicator_filepath = self.base_dir / ready_indicator_name
|
||||
|
||||
def is_ready_unsafe(self) -> bool:
|
||||
"""
|
||||
Check whether the archive is ready for access.
|
||||
|
||||
No guarding about possible network failres is done here.
|
||||
"""
|
||||
if not self.ready_indicator_filepath.exists():
|
||||
return False
|
||||
|
||||
try:
|
||||
archive_state = ArchiveState.deserialize_from_file(
|
||||
self.ready_indicator_filepath)
|
||||
except ArchiveStateError as error:
|
||||
print(f'Error deserializing archive state: {error.message}')
|
||||
return False
|
||||
|
||||
if archive_state.has_error():
|
||||
# If the error did happen during codesign procedure there will be no
|
||||
# corresponding archive file.
|
||||
# The caller code will deal with the error check further.
|
||||
return True
|
||||
|
||||
# Sometimes on macOS indicator file appears prior to the actual archive
|
||||
# despite the order of creation and os.sync() used in tag_ready().
|
||||
# So consider archive not ready if there is an indicator without an
|
||||
# actual archive.
|
||||
if not self.archive_filepath.exists():
|
||||
print('Found indicator without actual archive, waiting for archive '
|
||||
f'({self.archive_filepath}) to appear.')
|
||||
return False
|
||||
|
||||
# Wait for until archive is fully stored.
|
||||
actual_archive_size = self.archive_filepath.stat().st_size
|
||||
if actual_archive_size != archive_state.file_size:
|
||||
print('Partial/invalid archive size (expected '
|
||||
f'{archive_state.file_size} got {actual_archive_size})')
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def is_ready(self) -> bool:
|
||||
"""
|
||||
Check whether the archive is ready for access.
|
||||
|
||||
Will tolerate possible network failures: if there is a network failure
|
||||
or if there is still no proper permission on a file False is returned.
|
||||
"""
|
||||
|
||||
# There are some intermitten problem happening at a random which is
|
||||
# translates to "OSError : [WinError 59] An unexpected network error occurred".
|
||||
# Some reports suggests it might be due to lack of permissions to the file,
|
||||
# which might be applicable in our case since it's possible that file is
|
||||
# initially created with non-accessible permissions and gets chmod-ed
|
||||
# after initial creation.
|
||||
try:
|
||||
return self.is_ready_unsafe()
|
||||
except OSError as e:
|
||||
print(f'Exception checking archive: {e}')
|
||||
return False
|
||||
|
||||
def tag_ready(self, error_message='') -> None:
|
||||
"""
|
||||
Tag the archive as ready by creating the corresponding indication file.
|
||||
|
||||
NOTE: It is expected that the archive was never tagged as ready before
|
||||
and that there are no subsequent tags of the same archive.
|
||||
If it is violated, an assert will fail.
|
||||
"""
|
||||
assert not self.is_ready()
|
||||
|
||||
# Try the best to make sure everything is synced to the file system,
|
||||
# to avoid any possibility of stamp appearing on a network share prior to
|
||||
# an actual file.
|
||||
if util.get_current_platform() != util.Platform.WINDOWS:
|
||||
os.sync()
|
||||
|
||||
archive_size = -1
|
||||
if self.archive_filepath.exists():
|
||||
archive_size = self.archive_filepath.stat().st_size
|
||||
|
||||
archive_info = ArchiveState(
|
||||
file_size=archive_size, error_message=error_message)
|
||||
|
||||
self.ready_indicator_filepath.write_text(
|
||||
archive_info.serialize_to_string())
|
||||
|
||||
def get_state(self) -> ArchiveState:
|
||||
"""
|
||||
Get state object for this archive
|
||||
|
||||
The state is read from the corresponding state file.
|
||||
"""
|
||||
|
||||
try:
|
||||
return ArchiveState.deserialize_from_file(self.ready_indicator_filepath)
|
||||
except ArchiveStateError as error:
|
||||
return ArchiveState(error_message=f'Error in information format: {error}')
|
||||
|
||||
def clean(self) -> None:
|
||||
"""
|
||||
Remove both archive and the ready indication file.
|
||||
"""
|
||||
util.ensure_file_does_not_exist_or_die(self.ready_indicator_filepath)
|
||||
util.ensure_file_does_not_exist_or_die(self.archive_filepath)
|
||||
|
||||
def is_fully_absent(self) -> bool:
|
||||
"""
|
||||
Check whether both archive and its ready indicator are absent.
|
||||
Is used for a sanity check during code signing process by both
|
||||
buildbot worker and signing server.
|
||||
"""
|
||||
return (not self.archive_filepath.exists() and
|
||||
not self.ready_indicator_filepath.exists())
|
501
build_files/buildbot/codesign/base_code_signer.py
Normal file
501
build_files/buildbot/codesign/base_code_signer.py
Normal file
@@ -0,0 +1,501 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Signing process overview.
|
||||
#
|
||||
# From buildbot worker side:
|
||||
# - Files which needs to be signed are collected from either a directory to
|
||||
# sign all signable files in there, or by filename of a single file to sign.
|
||||
# - Those files gets packed into an archive and stored in a location location
|
||||
# which is watched by the signing server.
|
||||
# - A marker READY file is created which indicates the archive is ready for
|
||||
# access.
|
||||
# - Wait for the server to provide an archive with signed files.
|
||||
# This is done by watching for the READY file which corresponds to an archive
|
||||
# coming from the signing server.
|
||||
# - Unpack the signed signed files from the archives and replace original ones.
|
||||
#
|
||||
# From code sign server:
|
||||
# - Watch special location for a READY file which indicates the there is an
|
||||
# archive with files which are to be signed.
|
||||
# - Unpack the archive to a temporary location.
|
||||
# - Run codesign tool and make sure all the files are signed.
|
||||
# - Pack the signed files and store them in a location which is watched by
|
||||
# the buildbot worker.
|
||||
# - Create a READY file which indicates that the archive with signed files is
|
||||
# ready.
|
||||
|
||||
import abc
|
||||
import logging
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
import tarfile
|
||||
import uuid
|
||||
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Iterable, List
|
||||
|
||||
import codesign.util as util
|
||||
|
||||
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
|
||||
from codesign.archive_with_indicator import ArchiveWithIndicator
|
||||
from codesign.exception import CodeSignException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger_builder = logger.getChild('builder')
|
||||
logger_server = logger.getChild('server')
|
||||
|
||||
|
||||
def pack_files(files: Iterable[AbsoluteAndRelativeFileName],
|
||||
archive_filepath: Path) -> None:
|
||||
"""
|
||||
Create tar archive from given files for the signing pipeline.
|
||||
Is used by buildbot worker to create an archive of files which are to be
|
||||
signed, and by signing server to send signed files back to the worker.
|
||||
"""
|
||||
with tarfile.TarFile.open(archive_filepath, 'w') as tar_file_handle:
|
||||
for file_info in files:
|
||||
tar_file_handle.add(file_info.absolute_filepath,
|
||||
arcname=file_info.relative_filepath)
|
||||
|
||||
|
||||
def extract_files(archive_filepath: Path,
|
||||
extraction_dir: Path) -> None:
|
||||
"""
|
||||
Extract all files form the given archive into the given direcotry.
|
||||
"""
|
||||
|
||||
# TODO(sergey): Verify files in the archive have relative path.
|
||||
|
||||
with tarfile.TarFile.open(archive_filepath, mode='r') as tar_file_handle:
|
||||
tar_file_handle.extractall(path=extraction_dir)
|
||||
|
||||
|
||||
class BaseCodeSigner(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
Base class for a platform-specific signer of binaries.
|
||||
|
||||
Contains all the logic shared across platform-specific implementations, such
|
||||
as synchronization and notification logic.
|
||||
|
||||
Platform specific bits (such as actual command for signing the binary) are
|
||||
to be implemented as a subclass.
|
||||
|
||||
Provides utilities code signing as a whole, including functionality needed
|
||||
by a signing server and a buildbot worker.
|
||||
|
||||
The signer and builder may run on separate machines, the only requirement is
|
||||
that they have access to a directory which is shared between them. For the
|
||||
security concerns this is to be done as a separate machine (or as a Shared
|
||||
Folder configuration in VirtualBox configuration). This directory might be
|
||||
mounted under different base paths, but its underlying storage is to be
|
||||
the same.
|
||||
|
||||
The code signer is short-lived on a buildbot worker side, and is living
|
||||
forever on a code signing server side.
|
||||
"""
|
||||
|
||||
# TODO(sergey): Find a neat way to have config annotated.
|
||||
# config: Config
|
||||
|
||||
# Storage directory where builder puts files which are requested to be
|
||||
# signed.
|
||||
# Consider this an input of the code signing server.
|
||||
unsigned_storage_dir: Path
|
||||
|
||||
# Storage where signed files are stored.
|
||||
# Consider this an output of the code signer server.
|
||||
signed_storage_dir: Path
|
||||
|
||||
# Platform the code is currently executing on.
|
||||
platform: util.Platform
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
absolute_shared_storage_dir = config.SHARED_STORAGE_DIR.resolve()
|
||||
|
||||
# Unsigned (signing server input) configuration.
|
||||
self.unsigned_storage_dir = absolute_shared_storage_dir / 'unsigned'
|
||||
|
||||
# Signed (signing server output) configuration.
|
||||
self.signed_storage_dir = absolute_shared_storage_dir / 'signed'
|
||||
|
||||
self.platform = util.get_current_platform()
|
||||
|
||||
def cleanup_environment_for_builder(self) -> None:
|
||||
# TODO(sergey): Revisit need of cleaning up the existing files.
|
||||
# In practice it wasn't so helpful, and with multiple clients
|
||||
# talking to the same server it becomes even more tricky.
|
||||
pass
|
||||
|
||||
def cleanup_environment_for_signing_server(self) -> None:
|
||||
# TODO(sergey): Revisit need of cleaning up the existing files.
|
||||
# In practice it wasn't so helpful, and with multiple clients
|
||||
# talking to the same server it becomes even more tricky.
|
||||
pass
|
||||
|
||||
def generate_request_id(self) -> str:
|
||||
"""
|
||||
Generate an unique identifier for code signing request.
|
||||
"""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
def archive_info_for_request_id(
|
||||
self, path: Path, request_id: str) -> ArchiveWithIndicator:
|
||||
return ArchiveWithIndicator(
|
||||
path, f'{request_id}.tar', f'{request_id}.ready')
|
||||
|
||||
def signed_archive_info_for_request_id(
|
||||
self, request_id: str) -> ArchiveWithIndicator:
|
||||
return self.archive_info_for_request_id(
|
||||
self.signed_storage_dir, request_id)
|
||||
|
||||
def unsigned_archive_info_for_request_id(
|
||||
self, request_id: str) -> ArchiveWithIndicator:
|
||||
return self.archive_info_for_request_id(
|
||||
self.unsigned_storage_dir, request_id)
|
||||
|
||||
############################################################################
|
||||
# Buildbot worker side helpers.
|
||||
|
||||
@abc.abstractmethod
|
||||
def check_file_is_to_be_signed(
|
||||
self, file: AbsoluteAndRelativeFileName) -> bool:
|
||||
"""
|
||||
Check whether file is to be signed.
|
||||
|
||||
Is used by both single file signing pipeline and recursive directory
|
||||
signing pipeline.
|
||||
|
||||
This is where code signer is to check whether file is to be signed or
|
||||
not. This check might be based on a simple extension test or on actual
|
||||
test whether file have a digital signature already or not.
|
||||
"""
|
||||
|
||||
def collect_files_to_sign(self, path: Path) \
|
||||
-> List[AbsoluteAndRelativeFileName]:
|
||||
"""
|
||||
Get all files which need to be signed from the given path.
|
||||
|
||||
NOTE: The path might either be a file or directory.
|
||||
|
||||
This function is run from the buildbot worker side.
|
||||
"""
|
||||
|
||||
# If there is a single file provided trust the buildbot worker that it
|
||||
# is eligible for signing.
|
||||
if path.is_file():
|
||||
file = AbsoluteAndRelativeFileName.from_path(path)
|
||||
if not self.check_file_is_to_be_signed(file):
|
||||
return []
|
||||
return [file]
|
||||
|
||||
all_files = AbsoluteAndRelativeFileName.recursively_from_directory(
|
||||
path)
|
||||
files_to_be_signed = [file for file in all_files
|
||||
if self.check_file_is_to_be_signed(file)]
|
||||
return files_to_be_signed
|
||||
|
||||
def wait_for_signed_archive_or_die(self, request_id) -> None:
|
||||
"""
|
||||
Wait until archive with signed files is available.
|
||||
|
||||
Will only return if the archive with signed files is available. If there
|
||||
was an error during code sign procedure the SystemExit exception is
|
||||
raised, with the message set to the error reported by the codesign
|
||||
server.
|
||||
|
||||
Will only wait for the configured time. If that time exceeds and there
|
||||
is still no responce from the signing server the application will exit
|
||||
with a non-zero exit code.
|
||||
|
||||
"""
|
||||
|
||||
signed_archive_info = self.signed_archive_info_for_request_id(
|
||||
request_id)
|
||||
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
|
||||
request_id)
|
||||
|
||||
timeout_in_seconds = self.config.TIMEOUT_IN_SECONDS
|
||||
time_start = time.monotonic()
|
||||
while not signed_archive_info.is_ready():
|
||||
time.sleep(1)
|
||||
time_slept_in_seconds = time.monotonic() - time_start
|
||||
if time_slept_in_seconds > timeout_in_seconds:
|
||||
signed_archive_info.clean()
|
||||
unsigned_archive_info.clean()
|
||||
raise SystemExit("Signing server didn't finish signing in "
|
||||
f'{timeout_in_seconds} seconds, dying :(')
|
||||
|
||||
archive_state = signed_archive_info.get_state()
|
||||
if archive_state.has_error():
|
||||
signed_archive_info.clean()
|
||||
unsigned_archive_info.clean()
|
||||
raise SystemExit(
|
||||
f'Error happenned during codesign procedure: {archive_state.error_message}')
|
||||
|
||||
def copy_signed_files_to_directory(
|
||||
self, signed_dir: Path, destination_dir: Path) -> None:
|
||||
"""
|
||||
Copy all files from signed_dir to destination_dir.
|
||||
|
||||
This function will overwrite any existing file. Permissions are copied
|
||||
from the source files, but other metadata, such as timestamps, are not.
|
||||
"""
|
||||
for signed_filepath in signed_dir.glob('**/*'):
|
||||
if not signed_filepath.is_file():
|
||||
continue
|
||||
|
||||
relative_filepath = signed_filepath.relative_to(signed_dir)
|
||||
destination_filepath = destination_dir / relative_filepath
|
||||
destination_filepath.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
shutil.copy(signed_filepath, destination_filepath)
|
||||
|
||||
def run_buildbot_path_sign_pipeline(self, path: Path) -> None:
|
||||
"""
|
||||
Run all steps needed to make given path signed.
|
||||
|
||||
Path points to an unsigned file or a directory which contains unsigned
|
||||
files.
|
||||
|
||||
If the path points to a single file then this file will be signed.
|
||||
This is used to sign a final bundle such as .msi on Windows or .dmg on
|
||||
macOS.
|
||||
|
||||
NOTE: The code signed implementation might actually reject signing the
|
||||
file, in which case the file will be left unsigned. This isn't anything
|
||||
to be considered a failure situation, just might happen when buildbot
|
||||
worker can not detect whether signing is really required in a specific
|
||||
case or not.
|
||||
|
||||
If the path points to a directory then code signer will sign all
|
||||
signable files from it (finding them recursively).
|
||||
"""
|
||||
|
||||
self.cleanup_environment_for_builder()
|
||||
|
||||
# Make sure storage directory exists.
|
||||
self.unsigned_storage_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Collect all files which needs to be signed and pack them into a single
|
||||
# archive which will be sent to the signing server.
|
||||
logger_builder.info('Collecting files which are to be signed...')
|
||||
files = self.collect_files_to_sign(path)
|
||||
if not files:
|
||||
logger_builder.info('No files to be signed, ignoring.')
|
||||
return
|
||||
logger_builder.info('Found %d files to sign.', len(files))
|
||||
|
||||
request_id = self.generate_request_id()
|
||||
signed_archive_info = self.signed_archive_info_for_request_id(
|
||||
request_id)
|
||||
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
|
||||
request_id)
|
||||
|
||||
pack_files(files=files,
|
||||
archive_filepath=unsigned_archive_info.archive_filepath)
|
||||
unsigned_archive_info.tag_ready()
|
||||
|
||||
# Wait for the signing server to finish signing.
|
||||
logger_builder.info('Waiting signing server to sign the files...')
|
||||
self.wait_for_signed_archive_or_die(request_id)
|
||||
|
||||
# Extract signed files from archive and move files to final location.
|
||||
with TemporaryDirectory(prefix='blender-buildbot-') as temp_dir_str:
|
||||
unpacked_signed_files_dir = Path(temp_dir_str)
|
||||
|
||||
logger_builder.info('Extracting signed files from archive...')
|
||||
extract_files(
|
||||
archive_filepath=signed_archive_info.archive_filepath,
|
||||
extraction_dir=unpacked_signed_files_dir)
|
||||
|
||||
destination_dir = path
|
||||
if destination_dir.is_file():
|
||||
destination_dir = destination_dir.parent
|
||||
self.copy_signed_files_to_directory(
|
||||
unpacked_signed_files_dir, destination_dir)
|
||||
|
||||
logger_builder.info('Removing archive with signed files...')
|
||||
signed_archive_info.clean()
|
||||
|
||||
############################################################################
|
||||
# Signing server side helpers.
|
||||
|
||||
def wait_for_sign_request(self) -> str:
|
||||
"""
|
||||
Wait for the buildbot to request signing of an archive.
|
||||
|
||||
Returns an identifier of signing request.
|
||||
"""
|
||||
|
||||
# TOOD(sergey): Support graceful shutdown on Ctrl-C.
|
||||
|
||||
logger_server.info(
|
||||
f'Waiting for a request directory {self.unsigned_storage_dir} to appear.')
|
||||
while not self.unsigned_storage_dir.exists():
|
||||
time.sleep(1)
|
||||
|
||||
logger_server.info(
|
||||
'Waiting for a READY indicator of any signing request.')
|
||||
request_id = None
|
||||
while request_id is None:
|
||||
for file in self.unsigned_storage_dir.iterdir():
|
||||
if file.suffix != '.ready':
|
||||
continue
|
||||
request_id = file.stem
|
||||
logger_server.info(f'Found READY for request ID {request_id}.')
|
||||
if request_id is None:
|
||||
time.sleep(1)
|
||||
|
||||
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
|
||||
request_id)
|
||||
while not unsigned_archive_info.is_ready():
|
||||
time.sleep(1)
|
||||
|
||||
return request_id
|
||||
|
||||
@abc.abstractmethod
|
||||
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
||||
"""
|
||||
Sign all files in the given directory.
|
||||
|
||||
NOTE: Signing should happen in-place.
|
||||
"""
|
||||
|
||||
def run_signing_pipeline(self, request_id: str):
|
||||
"""
|
||||
Run the full signing pipeline starting from the point when buildbot
|
||||
worker have requested signing.
|
||||
"""
|
||||
|
||||
# Make sure storage directory exists.
|
||||
self.signed_storage_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with TemporaryDirectory(prefix='blender-codesign-') as temp_dir_str:
|
||||
temp_dir = Path(temp_dir_str)
|
||||
|
||||
signed_archive_info = self.signed_archive_info_for_request_id(
|
||||
request_id)
|
||||
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
|
||||
request_id)
|
||||
|
||||
logger_server.info('Extracting unsigned files from archive...')
|
||||
extract_files(
|
||||
archive_filepath=unsigned_archive_info.archive_filepath,
|
||||
extraction_dir=temp_dir)
|
||||
|
||||
logger_server.info('Collecting all files which needs signing...')
|
||||
files = AbsoluteAndRelativeFileName.recursively_from_directory(
|
||||
temp_dir)
|
||||
|
||||
logger_server.info('Signing all requested files...')
|
||||
try:
|
||||
self.sign_all_files(files)
|
||||
except CodeSignException as error:
|
||||
signed_archive_info.tag_ready(error_message=error.message)
|
||||
unsigned_archive_info.clean()
|
||||
logger_server.info('Signing is complete with errors.')
|
||||
return
|
||||
|
||||
logger_server.info('Packing signed files...')
|
||||
pack_files(files=files,
|
||||
archive_filepath=signed_archive_info.archive_filepath)
|
||||
signed_archive_info.tag_ready()
|
||||
|
||||
logger_server.info('Removing signing request...')
|
||||
unsigned_archive_info.clean()
|
||||
|
||||
logger_server.info('Signing is complete.')
|
||||
|
||||
def run_signing_server(self):
|
||||
logger_server.info('Starting new code signing server...')
|
||||
self.cleanup_environment_for_signing_server()
|
||||
logger_server.info('Code signing server is ready')
|
||||
while True:
|
||||
logger_server.info('Waiting for the signing request in %s...',
|
||||
self.unsigned_storage_dir)
|
||||
request_id = self.wait_for_sign_request()
|
||||
|
||||
logger_server.info(
|
||||
f'Beging signign procedure for request ID {request_id}.')
|
||||
self.run_signing_pipeline(request_id)
|
||||
|
||||
############################################################################
|
||||
# Command executing.
|
||||
#
|
||||
# Abstracted to a degree that allows to run commands from a foreign
|
||||
# platform.
|
||||
# The goal with this is to allow performing dry-run tests of code signer
|
||||
# server from other platforms (for example, to test that macOS code signer
|
||||
# does what it is supposed to after doing a refactor on Linux).
|
||||
|
||||
# TODO(sergey): What is the type annotation for the command?
|
||||
def run_command_or_mock(self, command, platform: util.Platform) -> None:
|
||||
"""
|
||||
Run given command if current platform matches given one
|
||||
|
||||
If the platform is different then it will only be printed allowing
|
||||
to verify logic of the code signing process.
|
||||
"""
|
||||
|
||||
if platform != self.platform:
|
||||
logger_server.info(
|
||||
f'Will run command for {platform}: {command}')
|
||||
return
|
||||
|
||||
logger_server.info(f'Running command: {command}')
|
||||
subprocess.run(command)
|
||||
|
||||
# TODO(sergey): What is the type annotation for the command?
|
||||
def check_output_or_mock(self, command,
|
||||
platform: util.Platform,
|
||||
allow_nonzero_exit_code=False) -> str:
|
||||
"""
|
||||
Run given command if current platform matches given one
|
||||
|
||||
If the platform is different then it will only be printed allowing
|
||||
to verify logic of the code signing process.
|
||||
|
||||
If allow_nonzero_exit_code is truth then the output will be returned
|
||||
even if application quit with non-zero exit code.
|
||||
Otherwise an subprocess.CalledProcessError exception will be raised
|
||||
in such case.
|
||||
"""
|
||||
|
||||
if platform != self.platform:
|
||||
logger_server.info(
|
||||
f'Will run command for {platform}: {command}')
|
||||
return
|
||||
|
||||
if allow_nonzero_exit_code:
|
||||
process = subprocess.Popen(command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
output = process.communicate()[0]
|
||||
return output.decode()
|
||||
|
||||
logger_server.info(f'Running command: {command}')
|
||||
return subprocess.check_output(
|
||||
command, stderr=subprocess.STDOUT).decode()
|
62
build_files/buildbot/codesign/config_builder.py
Normal file
62
build_files/buildbot/codesign/config_builder.py
Normal file
@@ -0,0 +1,62 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Configuration of a code signer which is specific to the code running from
|
||||
# buildbot's worker.
|
||||
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import codesign.util as util
|
||||
|
||||
from codesign.config_common import *
|
||||
|
||||
platform = util.get_current_platform()
|
||||
if platform == util.Platform.LINUX:
|
||||
SHARED_STORAGE_DIR = Path('/data/codesign')
|
||||
elif platform == util.Platform.WINDOWS:
|
||||
SHARED_STORAGE_DIR = Path('Z:\\codesign')
|
||||
elif platform == util.Platform.MACOS:
|
||||
SHARED_STORAGE_DIR = Path('/Volumes/codesign_macos/codesign')
|
||||
|
||||
# https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'formatters': {
|
||||
'default': {'format': '%(asctime)-15s %(levelname)8s %(name)s %(message)s'}
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'default',
|
||||
'stream': 'ext://sys.stderr',
|
||||
}
|
||||
},
|
||||
'loggers': {
|
||||
'codesign': {'level': 'INFO'},
|
||||
},
|
||||
'root': {
|
||||
'level': 'WARNING',
|
||||
'handlers': [
|
||||
'console',
|
||||
],
|
||||
}
|
||||
}
|
36
build_files/buildbot/codesign/config_common.py
Normal file
36
build_files/buildbot/codesign/config_common.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
# Timeout in seconds for the signing process.
|
||||
#
|
||||
# This is how long buildbot packing step will wait signing server to
|
||||
# perform signing.
|
||||
#
|
||||
# NOTE: Notarization could take a long time, hence the rather high value
|
||||
# here. Might consider using different timeout for different platforms.
|
||||
TIMEOUT_IN_SECONDS = 45 * 60 * 60
|
||||
|
||||
# Directory which is shared across buildbot worker and signing server.
|
||||
#
|
||||
# This is where worker puts files requested for signing as well as where
|
||||
# server puts signed files.
|
||||
SHARED_STORAGE_DIR: Path
|
101
build_files/buildbot/codesign/config_server_template.py
Normal file
101
build_files/buildbot/codesign/config_server_template.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Configuration of a code signer which is specific to the code signing server.
|
||||
#
|
||||
# NOTE: DO NOT put any sensitive information here, put it in an actual
|
||||
# configuration on the signing machine.
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from codesign.config_common import *
|
||||
|
||||
CODESIGN_DIRECTORY = Path(__file__).absolute().parent
|
||||
BLENDER_GIT_ROOT_DIRECTORY = CODESIGN_DIRECTORY.parent.parent.parent
|
||||
|
||||
################################################################################
|
||||
# Common configuration.
|
||||
|
||||
# Directory where folders for codesign requests and signed result are stored.
|
||||
# For example, /data/codesign
|
||||
SHARED_STORAGE_DIR: Path
|
||||
|
||||
################################################################################
|
||||
# macOS-specific configuration.
|
||||
|
||||
MACOS_ENTITLEMENTS_FILE = \
|
||||
BLENDER_GIT_ROOT_DIRECTORY / 'release' / 'darwin' / 'entitlements.plist'
|
||||
|
||||
# Identity of the Developer ID Application certificate which is to be used for
|
||||
# codesign tool.
|
||||
# Use `security find-identity -v -p codesigning` to find the identity.
|
||||
#
|
||||
# NOTE: This identity is just an example from release/darwin/README.txt.
|
||||
MACOS_CODESIGN_IDENTITY = 'AE825E26F12D08B692F360133210AF46F4CF7B97'
|
||||
|
||||
# User name (Apple ID) which will be used to request notarization.
|
||||
MACOS_XCRUN_USERNAME = 'me@example.com'
|
||||
|
||||
# One-time application password which will be used to request notarization.
|
||||
MACOS_XCRUN_PASSWORD = '@keychain:altool-password'
|
||||
|
||||
# Timeout in seconds within which the notarial office is supposed to reply.
|
||||
MACOS_NOTARIZE_TIMEOUT_IN_SECONDS = 60 * 60
|
||||
|
||||
################################################################################
|
||||
# Windows-specific configuration.
|
||||
|
||||
# URL to the timestamping authority.
|
||||
WIN_TIMESTAMP_AUTHORITY_URL = 'http://timestamp.digicert.com'
|
||||
|
||||
# Full path to the certificate used for signing.
|
||||
#
|
||||
# The path and expected file format might vary depending on a platform.
|
||||
#
|
||||
# On Windows it is usually is a PKCS #12 key (.pfx), so the path will look
|
||||
# like Path('C:\\Secret\\Blender.pfx').
|
||||
WIN_CERTIFICATE_FILEPATH: Path
|
||||
|
||||
################################################################################
|
||||
# Logging configuration, common for all platforms.
|
||||
|
||||
# https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'formatters': {
|
||||
'default': {'format': '%(asctime)-15s %(levelname)8s %(name)s %(message)s'}
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'default',
|
||||
'stream': 'ext://sys.stderr',
|
||||
}
|
||||
},
|
||||
'loggers': {
|
||||
'codesign': {'level': 'INFO'},
|
||||
},
|
||||
'root': {
|
||||
'level': 'WARNING',
|
||||
'handlers': [
|
||||
'console',
|
||||
],
|
||||
}
|
||||
}
|
26
build_files/buildbot/codesign/exception.py
Normal file
26
build_files/buildbot/codesign/exception.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
class CodeSignException(Exception):
|
||||
message: str
|
||||
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
72
build_files/buildbot/codesign/linux_code_signer.py
Normal file
72
build_files/buildbot/codesign/linux_code_signer.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# NOTE: This is a no-op signer (since there isn't really a procedure to sign
|
||||
# Linux binaries yet). Used to debug and verify the code signing routines on
|
||||
# a Linux environment.
|
||||
|
||||
import logging
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
|
||||
from codesign.base_code_signer import BaseCodeSigner
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger_server = logger.getChild('server')
|
||||
|
||||
|
||||
class LinuxCodeSigner(BaseCodeSigner):
|
||||
def is_active(self) -> bool:
|
||||
"""
|
||||
Check whether this signer is active.
|
||||
|
||||
if it is inactive, no files will be signed.
|
||||
|
||||
Is used to be able to debug code signing pipeline on Linux, where there
|
||||
is no code signing happening in the actual buildbot and release
|
||||
environment.
|
||||
"""
|
||||
return False
|
||||
|
||||
def check_file_is_to_be_signed(
|
||||
self, file: AbsoluteAndRelativeFileName) -> bool:
|
||||
if file.relative_filepath == Path('blender'):
|
||||
return True
|
||||
if (file.relative_filepath.parts[-3:-1] == ('python', 'bin') and
|
||||
file.relative_filepath.name.startwith('python')):
|
||||
return True
|
||||
if file.relative_filepath.suffix == '.so':
|
||||
return True
|
||||
return False
|
||||
|
||||
def collect_files_to_sign(self, path: Path) \
|
||||
-> List[AbsoluteAndRelativeFileName]:
|
||||
if not self.is_active():
|
||||
return []
|
||||
|
||||
return super().collect_files_to_sign(path)
|
||||
|
||||
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
||||
num_files = len(files)
|
||||
for file_index, file in enumerate(files):
|
||||
logger.info('Server: Signed file [%d/%d] %s',
|
||||
file_index + 1, num_files, file.relative_filepath)
|
456
build_files/buildbot/codesign/macos_code_signer.py
Normal file
456
build_files/buildbot/codesign/macos_code_signer.py
Normal file
@@ -0,0 +1,456 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import logging
|
||||
import re
|
||||
import stat
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
import codesign.util as util
|
||||
|
||||
from buildbot_utils import Builder
|
||||
|
||||
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
|
||||
from codesign.base_code_signer import BaseCodeSigner
|
||||
from codesign.exception import CodeSignException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger_server = logger.getChild('server')
|
||||
|
||||
# NOTE: Check is done as filename.endswith(), so keep the dot
|
||||
EXTENSIONS_TO_BE_SIGNED = {'.dylib', '.so', '.dmg'}
|
||||
|
||||
# Prefixes of a file (not directory) name which are to be signed.
|
||||
# Used to sign extra executable files in Contents/Resources.
|
||||
NAME_PREFIXES_TO_BE_SIGNED = {'python'}
|
||||
|
||||
|
||||
class NotarizationException(CodeSignException):
|
||||
pass
|
||||
|
||||
|
||||
def is_file_from_bundle(file: AbsoluteAndRelativeFileName) -> bool:
|
||||
"""
|
||||
Check whether file is coming from an .app bundle
|
||||
"""
|
||||
parts = file.relative_filepath.parts
|
||||
if not parts:
|
||||
return False
|
||||
if not parts[0].endswith('.app'):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_bundle_from_file(
|
||||
file: AbsoluteAndRelativeFileName) -> AbsoluteAndRelativeFileName:
|
||||
"""
|
||||
Get AbsoluteAndRelativeFileName descriptor of bundle
|
||||
"""
|
||||
assert(is_file_from_bundle(file))
|
||||
|
||||
parts = file.relative_filepath.parts
|
||||
bundle_name = parts[0]
|
||||
|
||||
base_dir = file.base_dir
|
||||
bundle_filepath = file.base_dir / bundle_name
|
||||
return AbsoluteAndRelativeFileName(base_dir, bundle_filepath)
|
||||
|
||||
|
||||
def is_bundle_executable_file(file: AbsoluteAndRelativeFileName) -> bool:
|
||||
"""
|
||||
Check whether given file is an executable within an app bundle
|
||||
"""
|
||||
if not is_file_from_bundle(file):
|
||||
return False
|
||||
|
||||
parts = file.relative_filepath.parts
|
||||
num_parts = len(parts)
|
||||
if num_parts < 3:
|
||||
return False
|
||||
|
||||
if parts[1:3] != ('Contents', 'MacOS'):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def xcrun_field_value_from_output(field: str, output: str) -> str:
|
||||
"""
|
||||
Get value of a given field from xcrun output.
|
||||
|
||||
If field is not found empty string is returned.
|
||||
"""
|
||||
|
||||
field_prefix = field + ': '
|
||||
for line in output.splitlines():
|
||||
line = line.strip()
|
||||
if line.startswith(field_prefix):
|
||||
return line[len(field_prefix):]
|
||||
return ''
|
||||
|
||||
|
||||
class MacOSCodeSigner(BaseCodeSigner):
|
||||
def check_file_is_to_be_signed(
|
||||
self, file: AbsoluteAndRelativeFileName) -> bool:
|
||||
if file.relative_filepath.name.startswith('.'):
|
||||
return False
|
||||
|
||||
if is_bundle_executable_file(file):
|
||||
return True
|
||||
|
||||
base_name = file.relative_filepath.name
|
||||
if any(base_name.startswith(prefix)
|
||||
for prefix in NAME_PREFIXES_TO_BE_SIGNED):
|
||||
return True
|
||||
|
||||
mode = file.absolute_filepath.lstat().st_mode
|
||||
if mode & stat.S_IXUSR != 0:
|
||||
file_output = subprocess.check_output(
|
||||
("file", file.absolute_filepath)).decode()
|
||||
if "64-bit executable" in file_output:
|
||||
return True
|
||||
|
||||
return file.relative_filepath.suffix in EXTENSIONS_TO_BE_SIGNED
|
||||
|
||||
def collect_files_to_sign(self, path: Path) \
|
||||
-> List[AbsoluteAndRelativeFileName]:
|
||||
# Include all files when signing app or dmg bundle: all the files are
|
||||
# needed to do valid signature of bundle.
|
||||
if path.name.endswith('.app'):
|
||||
return AbsoluteAndRelativeFileName.recursively_from_directory(path)
|
||||
if path.is_dir():
|
||||
files = []
|
||||
for child in path.iterdir():
|
||||
if child.name.endswith('.app'):
|
||||
current_files = AbsoluteAndRelativeFileName.recursively_from_directory(
|
||||
child)
|
||||
else:
|
||||
current_files = super().collect_files_to_sign(child)
|
||||
for current_file in current_files:
|
||||
files.append(AbsoluteAndRelativeFileName(
|
||||
path, current_file.absolute_filepath))
|
||||
return files
|
||||
return super().collect_files_to_sign(path)
|
||||
|
||||
############################################################################
|
||||
# Codesign.
|
||||
|
||||
def codesign_remove_signature(
|
||||
self, file: AbsoluteAndRelativeFileName) -> None:
|
||||
"""
|
||||
Make sure given file does not have codesign signature
|
||||
|
||||
This is needed because codesigning is not possible for file which has
|
||||
signature already.
|
||||
"""
|
||||
|
||||
logger_server.info(
|
||||
'Removing codesign signature from %s...', file.relative_filepath)
|
||||
|
||||
command = ['codesign', '--remove-signature', file.absolute_filepath]
|
||||
self.run_command_or_mock(command, util.Platform.MACOS)
|
||||
|
||||
def codesign_file(
|
||||
self, file: AbsoluteAndRelativeFileName) -> None:
|
||||
"""
|
||||
Sign given file
|
||||
|
||||
NOTE: File must not have any signatures.
|
||||
"""
|
||||
|
||||
logger_server.info(
|
||||
'Codesigning %s...', file.relative_filepath)
|
||||
|
||||
entitlements_file = self.config.MACOS_ENTITLEMENTS_FILE
|
||||
command = ['codesign',
|
||||
'--timestamp',
|
||||
'--options', 'runtime',
|
||||
f'--entitlements={entitlements_file}',
|
||||
'--sign', self.config.MACOS_CODESIGN_IDENTITY,
|
||||
file.absolute_filepath]
|
||||
self.run_command_or_mock(command, util.Platform.MACOS)
|
||||
|
||||
def codesign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
||||
"""
|
||||
Run codesign tool on all eligible files in the given list.
|
||||
|
||||
Will ignore all files which are not to be signed. For the rest will
|
||||
remove possible existing signature and add a new signature.
|
||||
"""
|
||||
|
||||
num_files = len(files)
|
||||
have_ignored_files = False
|
||||
signed_files = []
|
||||
for file_index, file in enumerate(files):
|
||||
# Ignore file if it is not to be signed.
|
||||
# Allows to manually construct ZIP of a bundle and get it signed.
|
||||
if not self.check_file_is_to_be_signed(file):
|
||||
logger_server.info(
|
||||
'Ignoring file [%d/%d] %s',
|
||||
file_index + 1, num_files, file.relative_filepath)
|
||||
have_ignored_files = True
|
||||
continue
|
||||
|
||||
logger_server.info(
|
||||
'Running codesigning routines for file [%d/%d] %s...',
|
||||
file_index + 1, num_files, file.relative_filepath)
|
||||
|
||||
self.codesign_remove_signature(file)
|
||||
self.codesign_file(file)
|
||||
|
||||
signed_files.append(file)
|
||||
|
||||
if have_ignored_files:
|
||||
logger_server.info('Signed %d files:', len(signed_files))
|
||||
num_signed_files = len(signed_files)
|
||||
for file_index, signed_file in enumerate(signed_files):
|
||||
logger_server.info(
|
||||
'- [%d/%d] %s',
|
||||
file_index + 1, num_signed_files,
|
||||
signed_file.relative_filepath)
|
||||
|
||||
def codesign_bundles(
|
||||
self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
||||
"""
|
||||
Codesign all .app bundles in the given list of files.
|
||||
|
||||
Bundle is deducted from paths of the files, and every bundle is only
|
||||
signed once.
|
||||
"""
|
||||
|
||||
signed_bundles = set()
|
||||
extra_files = []
|
||||
|
||||
for file in files:
|
||||
if not is_file_from_bundle(file):
|
||||
continue
|
||||
bundle = get_bundle_from_file(file)
|
||||
bundle_name = bundle.relative_filepath
|
||||
if bundle_name in signed_bundles:
|
||||
continue
|
||||
|
||||
logger_server.info('Running codesign routines on bundle %s',
|
||||
bundle_name)
|
||||
|
||||
# It is not possible to remove signature from DMG.
|
||||
if bundle.relative_filepath.name.endswith('.app'):
|
||||
self.codesign_remove_signature(bundle)
|
||||
self.codesign_file(bundle)
|
||||
|
||||
signed_bundles.add(bundle_name)
|
||||
|
||||
# Codesign on a bundle adds an extra folder with information.
|
||||
# It needs to be compied to the source.
|
||||
code_signature_directory = \
|
||||
bundle.absolute_filepath / 'Contents' / '_CodeSignature'
|
||||
code_signature_files = \
|
||||
AbsoluteAndRelativeFileName.recursively_from_directory(
|
||||
code_signature_directory)
|
||||
for code_signature_file in code_signature_files:
|
||||
bundle_relative_file = AbsoluteAndRelativeFileName(
|
||||
bundle.base_dir,
|
||||
code_signature_directory /
|
||||
code_signature_file.relative_filepath)
|
||||
extra_files.append(bundle_relative_file)
|
||||
|
||||
files.extend(extra_files)
|
||||
|
||||
############################################################################
|
||||
# Notarization.
|
||||
|
||||
def notarize_get_bundle_id(self, file: AbsoluteAndRelativeFileName) -> str:
|
||||
"""
|
||||
Get bundle ID which will be used to notarize DMG
|
||||
"""
|
||||
name = file.relative_filepath.name
|
||||
app_name = name.split('-', 2)[0].lower()
|
||||
|
||||
app_name_words = app_name.split()
|
||||
if len(app_name_words) > 1:
|
||||
app_name_id = ''.join(word.capitalize() for word in app_name_words)
|
||||
else:
|
||||
app_name_id = app_name_words[0]
|
||||
|
||||
# TODO(sergey): Consider using "alpha" for buildbot builds.
|
||||
return f'org.blenderfoundation.{app_name_id}.release'
|
||||
|
||||
def notarize_request(self, file) -> str:
|
||||
"""
|
||||
Request notarization of the given file.
|
||||
|
||||
Returns UUID of the notarization request. If error occurred None is
|
||||
returned instead of UUID.
|
||||
"""
|
||||
|
||||
bundle_id = self.notarize_get_bundle_id(file)
|
||||
logger_server.info('Bundle ID: %s', bundle_id)
|
||||
|
||||
logger_server.info('Submitting file to the notarial office.')
|
||||
command = [
|
||||
'xcrun', 'altool', '--notarize-app', '--verbose',
|
||||
'-f', file.absolute_filepath,
|
||||
'--primary-bundle-id', bundle_id,
|
||||
'--username', self.config.MACOS_XCRUN_USERNAME,
|
||||
'--password', self.config.MACOS_XCRUN_PASSWORD]
|
||||
|
||||
output = self.check_output_or_mock(
|
||||
command, util.Platform.MACOS, allow_nonzero_exit_code=True)
|
||||
|
||||
for line in output.splitlines():
|
||||
line = line.strip()
|
||||
if line.startswith('RequestUUID = '):
|
||||
request_uuid = line[14:]
|
||||
return request_uuid
|
||||
|
||||
# Check whether the package has been already submitted.
|
||||
if 'The software asset has already been uploaded.' in line:
|
||||
request_uuid = re.sub(
|
||||
'.*The upload ID is ([A-Fa-f0-9\-]+).*', '\\1', line)
|
||||
logger_server.warning(
|
||||
f'The package has been already submitted under UUID {request_uuid}')
|
||||
return request_uuid
|
||||
|
||||
logger_server.error(output)
|
||||
logger_server.error('xcrun command did not report RequestUUID')
|
||||
return None
|
||||
|
||||
def notarize_review_status(self, xcrun_output: str) -> bool:
|
||||
"""
|
||||
Review status returned by xcrun's notarization info
|
||||
|
||||
Returns truth if the notarization process has finished.
|
||||
If there are errors during notarization, a NotarizationException()
|
||||
exception is thrown with status message from the notarial office.
|
||||
"""
|
||||
|
||||
# Parse status and message
|
||||
status = xcrun_field_value_from_output('Status', xcrun_output)
|
||||
status_message = xcrun_field_value_from_output(
|
||||
'Status Message', xcrun_output)
|
||||
|
||||
if status == 'success':
|
||||
logger_server.info(
|
||||
'Package successfully notarized: %s', status_message)
|
||||
return True
|
||||
|
||||
if status == 'invalid':
|
||||
logger_server.error(xcrun_output)
|
||||
logger_server.error(
|
||||
'Package notarization has failed: %s', status_message)
|
||||
raise NotarizationException(status_message)
|
||||
|
||||
if status == 'in progress':
|
||||
return False
|
||||
|
||||
logger_server.info(
|
||||
'Unknown notarization status %s (%s)', status, status_message)
|
||||
|
||||
return False
|
||||
|
||||
def notarize_wait_result(self, request_uuid: str) -> None:
|
||||
"""
|
||||
Wait for until notarial office have a reply
|
||||
"""
|
||||
|
||||
logger_server.info(
|
||||
'Waiting for a result from the notarization office.')
|
||||
|
||||
command = ['xcrun', 'altool',
|
||||
'--notarization-info', request_uuid,
|
||||
'--username', self.config.MACOS_XCRUN_USERNAME,
|
||||
'--password', self.config.MACOS_XCRUN_PASSWORD]
|
||||
|
||||
time_start = time.monotonic()
|
||||
timeout_in_seconds = self.config.MACOS_NOTARIZE_TIMEOUT_IN_SECONDS
|
||||
|
||||
while True:
|
||||
xcrun_output = self.check_output_or_mock(
|
||||
command, util.Platform.MACOS, allow_nonzero_exit_code=True)
|
||||
|
||||
if self.notarize_review_status(xcrun_output):
|
||||
break
|
||||
|
||||
logger_server.info('Keep waiting for notarization office.')
|
||||
time.sleep(30)
|
||||
|
||||
time_slept_in_seconds = time.monotonic() - time_start
|
||||
if time_slept_in_seconds > timeout_in_seconds:
|
||||
logger_server.error(
|
||||
"Notarial office didn't reply in %f seconds.",
|
||||
timeout_in_seconds)
|
||||
|
||||
def notarize_staple(self, file: AbsoluteAndRelativeFileName) -> bool:
|
||||
"""
|
||||
Staple notarial label on the file
|
||||
"""
|
||||
|
||||
logger_server.info('Stapling notarial stamp.')
|
||||
|
||||
command = ['xcrun', 'stapler', 'staple', '-v', file.absolute_filepath]
|
||||
self.check_output_or_mock(command, util.Platform.MACOS)
|
||||
|
||||
def notarize_dmg(self, file: AbsoluteAndRelativeFileName) -> bool:
|
||||
"""
|
||||
Run entire pipeline to get DMG notarized.
|
||||
"""
|
||||
logger_server.info('Begin notarization routines on %s',
|
||||
file.relative_filepath)
|
||||
|
||||
# Submit file for notarization.
|
||||
request_uuid = self.notarize_request(file)
|
||||
if not request_uuid:
|
||||
return False
|
||||
logger_server.info('Received Request UUID: %s', request_uuid)
|
||||
|
||||
# Wait for the status from the notarization office.
|
||||
if not self.notarize_wait_result(request_uuid):
|
||||
return False
|
||||
|
||||
# Staple.
|
||||
self.notarize_staple(file)
|
||||
|
||||
def notarize_all_dmg(
|
||||
self, files: List[AbsoluteAndRelativeFileName]) -> bool:
|
||||
"""
|
||||
Notarize all DMG images from the input.
|
||||
|
||||
Images are supposed to be codesigned already.
|
||||
"""
|
||||
for file in files:
|
||||
if not file.relative_filepath.name.endswith('.dmg'):
|
||||
continue
|
||||
if not self.check_file_is_to_be_signed(file):
|
||||
continue
|
||||
|
||||
self.notarize_dmg(file)
|
||||
|
||||
############################################################################
|
||||
# Entry point.
|
||||
|
||||
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
||||
# TODO(sergey): Handle errors somehow.
|
||||
|
||||
self.codesign_all_files(files)
|
||||
self.codesign_bundles(files)
|
||||
self.notarize_all_dmg(files)
|
52
build_files/buildbot/codesign/simple_code_signer.py
Normal file
52
build_files/buildbot/codesign/simple_code_signer.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
|
||||
import logging.config
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import codesign.config_builder
|
||||
import codesign.util as util
|
||||
from codesign.base_code_signer import BaseCodeSigner
|
||||
|
||||
|
||||
class SimpleCodeSigner:
|
||||
code_signer: Optional[BaseCodeSigner]
|
||||
|
||||
def __init__(self):
|
||||
platform = util.get_current_platform()
|
||||
if platform == util.Platform.LINUX:
|
||||
from codesign.linux_code_signer import LinuxCodeSigner
|
||||
self.code_signer = LinuxCodeSigner(codesign.config_builder)
|
||||
elif platform == util.Platform.MACOS:
|
||||
from codesign.macos_code_signer import MacOSCodeSigner
|
||||
self.code_signer = MacOSCodeSigner(codesign.config_builder)
|
||||
elif platform == util.Platform.WINDOWS:
|
||||
from codesign.windows_code_signer import WindowsCodeSigner
|
||||
self.code_signer = WindowsCodeSigner(codesign.config_builder)
|
||||
else:
|
||||
self.code_signer = None
|
||||
|
||||
def sign_file_or_directory(self, path: Path) -> None:
|
||||
logging.config.dictConfig(codesign.config_builder.LOGGING)
|
||||
self.code_signer.run_buildbot_path_sign_pipeline(path)
|
54
build_files/buildbot/codesign/util.py
Normal file
54
build_files/buildbot/codesign/util.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import sys
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class Platform(Enum):
|
||||
LINUX = 1
|
||||
MACOS = 2
|
||||
WINDOWS = 3
|
||||
|
||||
|
||||
def get_current_platform() -> Platform:
|
||||
if sys.platform == 'linux':
|
||||
return Platform.LINUX
|
||||
elif sys.platform == 'darwin':
|
||||
return Platform.MACOS
|
||||
elif sys.platform == 'win32':
|
||||
return Platform.WINDOWS
|
||||
raise Exception(f'Unknown platform {sys.platform}')
|
||||
|
||||
|
||||
def ensure_file_does_not_exist_or_die(filepath: Path) -> None:
|
||||
"""
|
||||
If the file exists, unlink it.
|
||||
If the file path exists and is not a file an assert will trigger.
|
||||
If the file path does not exists nothing happens.
|
||||
"""
|
||||
if not filepath.exists():
|
||||
return
|
||||
if not filepath.is_file():
|
||||
# TODO(sergey): Provide information about what the filepath actually is.
|
||||
raise SystemExit(f'{filepath} is expected to be a file, but is not')
|
||||
filepath.unlink()
|
117
build_files/buildbot/codesign/windows_code_signer.py
Normal file
117
build_files/buildbot/codesign/windows_code_signer.py
Normal file
@@ -0,0 +1,117 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import logging
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
import codesign.util as util
|
||||
|
||||
from buildbot_utils import Builder
|
||||
|
||||
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
|
||||
from codesign.base_code_signer import BaseCodeSigner
|
||||
from codesign.exception import CodeSignException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger_server = logger.getChild('server')
|
||||
|
||||
# NOTE: Check is done as filename.endswith(), so keep the dot
|
||||
EXTENSIONS_TO_BE_SIGNED = {'.exe', '.dll', '.pyd', '.msi'}
|
||||
|
||||
BLACKLIST_FILE_PREFIXES = (
|
||||
'api-ms-', 'concrt', 'msvcp', 'ucrtbase', 'vcomp', 'vcruntime')
|
||||
|
||||
|
||||
class SigntoolException(CodeSignException):
|
||||
pass
|
||||
|
||||
class WindowsCodeSigner(BaseCodeSigner):
|
||||
def check_file_is_to_be_signed(
|
||||
self, file: AbsoluteAndRelativeFileName) -> bool:
|
||||
base_name = file.relative_filepath.name
|
||||
if any(base_name.startswith(prefix)
|
||||
for prefix in BLACKLIST_FILE_PREFIXES):
|
||||
return False
|
||||
|
||||
return file.relative_filepath.suffix in EXTENSIONS_TO_BE_SIGNED
|
||||
|
||||
|
||||
def get_sign_command_prefix(self) -> List[str]:
|
||||
return [
|
||||
'signtool', 'sign', '/v',
|
||||
'/f', self.config.WIN_CERTIFICATE_FILEPATH,
|
||||
'/tr', self.config.WIN_TIMESTAMP_AUTHORITY_URL]
|
||||
|
||||
|
||||
def run_codesign_tool(self, filepath: Path) -> None:
|
||||
command = self.get_sign_command_prefix() + [filepath]
|
||||
|
||||
try:
|
||||
codesign_output = self.check_output_or_mock(command, util.Platform.WINDOWS)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise SigntoolException(f'Error running signtool {e}')
|
||||
|
||||
logger_server.info(f'signtool output:\n{codesign_output}')
|
||||
|
||||
got_number_of_success = False
|
||||
|
||||
for line in codesign_output.split('\n'):
|
||||
line_clean = line.strip()
|
||||
line_clean_lower = line_clean.lower()
|
||||
|
||||
if line_clean_lower.startswith('number of warnings') or \
|
||||
line_clean_lower.startswith('number of errors'):
|
||||
number = int(line_clean_lower.split(':')[1])
|
||||
if number != 0:
|
||||
raise SigntoolException('Non-clean success of signtool')
|
||||
|
||||
if line_clean_lower.startswith('number of files successfully signed'):
|
||||
got_number_of_success = True
|
||||
number = int(line_clean_lower.split(':')[1])
|
||||
if number != 1:
|
||||
raise SigntoolException('Signtool did not consider codesign a success')
|
||||
|
||||
if not got_number_of_success:
|
||||
raise SigntoolException('Signtool did not report number of files signed')
|
||||
|
||||
|
||||
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
||||
# NOTE: Sign files one by one to avoid possible command line length
|
||||
# overflow (which could happen if we ever decide to sign every binary
|
||||
# in the install folder, for example).
|
||||
#
|
||||
# TODO(sergey): Consider doing batched signing of handful of files in
|
||||
# one go (but only if this actually known to be much faster).
|
||||
num_files = len(files)
|
||||
for file_index, file in enumerate(files):
|
||||
# Ignore file if it is not to be signed.
|
||||
# Allows to manually construct ZIP of package and get it signed.
|
||||
if not self.check_file_is_to_be_signed(file):
|
||||
logger_server.info(
|
||||
'Ignoring file [%d/%d] %s',
|
||||
file_index + 1, num_files, file.relative_filepath)
|
||||
continue
|
||||
|
||||
logger_server.info(
|
||||
'Running signtool command for file [%d/%d] %s...',
|
||||
file_index + 1, num_files, file.relative_filepath)
|
||||
self.run_codesign_tool(file.absolute_filepath)
|
37
build_files/buildbot/codesign_server_linux.py
Executable file
37
build_files/buildbot/codesign_server_linux.py
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# NOTE: This is a no-op signer (since there isn't really a procedure to sign
|
||||
# Linux binaries yet). Used to debug and verify the code signing routines on
|
||||
# a Linux environment.
|
||||
|
||||
import logging.config
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from codesign.linux_code_signer import LinuxCodeSigner
|
||||
import codesign.config_server
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.config.dictConfig(codesign.config_server.LOGGING)
|
||||
code_signer = LinuxCodeSigner(codesign.config_server)
|
||||
code_signer.run_signing_server()
|
41
build_files/buildbot/codesign_server_macos.py
Executable file
41
build_files/buildbot/codesign_server_macos.py
Executable file
@@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import logging.config
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from codesign.macos_code_signer import MacOSCodeSigner
|
||||
import codesign.config_server
|
||||
|
||||
if __name__ == "__main__":
|
||||
entitlements_file = codesign.config_server.MACOS_ENTITLEMENTS_FILE
|
||||
if not entitlements_file.exists():
|
||||
raise SystemExit(
|
||||
'Entitlements file {entitlements_file} does not exist.')
|
||||
if not entitlements_file.is_file():
|
||||
raise SystemExit(
|
||||
'Entitlements file {entitlements_file} is not a file.')
|
||||
|
||||
logging.config.dictConfig(codesign.config_server.LOGGING)
|
||||
code_signer = MacOSCodeSigner(codesign.config_server)
|
||||
code_signer.run_signing_server()
|
11
build_files/buildbot/codesign_server_windows.bat
Normal file
11
build_files/buildbot/codesign_server_windows.bat
Normal file
@@ -0,0 +1,11 @@
|
||||
@echo off
|
||||
|
||||
rem This is an entry point of the codesign server for Windows.
|
||||
rem It makes sure that signtool.exe is within the current PATH and can be
|
||||
rem used by the Python script.
|
||||
|
||||
SETLOCAL
|
||||
|
||||
set PATH=C:\Program Files (x86)\Windows Kits\10\App Certification Kit;%PATH%
|
||||
|
||||
codesign_server_windows.py
|
54
build_files/buildbot/codesign_server_windows.py
Executable file
54
build_files/buildbot/codesign_server_windows.py
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Implementation of codesign server for Windows.
|
||||
#
|
||||
# NOTE: If signtool.exe is not in the PATH use codesign_server_windows.bat
|
||||
|
||||
import logging.config
|
||||
import shutil
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
import codesign.util as util
|
||||
|
||||
from codesign.windows_code_signer import WindowsCodeSigner
|
||||
import codesign.config_server
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.config.dictConfig(codesign.config_server.LOGGING)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger_server = logger.getChild('server')
|
||||
|
||||
# TODO(sergey): Consider moving such sanity checks into
|
||||
# CodeSigner.check_environment_or_die().
|
||||
if not shutil.which('signtool.exe'):
|
||||
if util.get_current_platform() == util.Platform.WINDOWS:
|
||||
raise SystemExit("signtool.exe is not found in %PATH%")
|
||||
logger_server.info(
|
||||
'signtool.exe not found, '
|
||||
'but will not be used on this foreign platform')
|
||||
|
||||
code_signer = WindowsCodeSigner(codesign.config_server)
|
||||
code_signer.run_signing_server()
|
551
build_files/buildbot/worker_bundle_dmg.py
Executable file
551
build_files/buildbot/worker_bundle_dmg.py
Executable file
@@ -0,0 +1,551 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory, NamedTemporaryFile
|
||||
from typing import List
|
||||
|
||||
BUILDBOT_DIRECTORY = Path(__file__).absolute().parent
|
||||
CODESIGN_SCRIPT = BUILDBOT_DIRECTORY / 'worker_codesign.py'
|
||||
BLENDER_GIT_ROOT_DIRECTORY = BUILDBOT_DIRECTORY.parent.parent
|
||||
DARWIN_DIRECTORY = BLENDER_GIT_ROOT_DIRECTORY / 'release' / 'darwin'
|
||||
|
||||
|
||||
# Extra size which is added on top of actual files size when estimating size
|
||||
# of destination DNG.
|
||||
EXTRA_DMG_SIZE_IN_BYTES = 800 * 1024 * 1024
|
||||
|
||||
################################################################################
|
||||
# Common utilities
|
||||
|
||||
|
||||
def get_directory_size(root_directory: Path) -> int:
|
||||
"""
|
||||
Get size of directory on disk
|
||||
"""
|
||||
|
||||
total_size = 0
|
||||
for file in root_directory.glob('**/*'):
|
||||
total_size += file.lstat().st_size
|
||||
return total_size
|
||||
|
||||
|
||||
################################################################################
|
||||
# DMG bundling specific logic
|
||||
|
||||
def create_argument_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'source_dir',
|
||||
type=Path,
|
||||
help='Source directory which points to either existing .app bundle'
|
||||
'or to a directory with .app bundles.')
|
||||
parser.add_argument(
|
||||
'--background-image',
|
||||
type=Path,
|
||||
help="Optional background picture which will be set on the DMG."
|
||||
"If not provided default Blender's one is used.")
|
||||
parser.add_argument(
|
||||
'--volume-name',
|
||||
type=str,
|
||||
help='Optional name of a volume which will be used for DMG.')
|
||||
parser.add_argument(
|
||||
'--dmg',
|
||||
type=Path,
|
||||
help='Optional argument which points to a final DMG file name.')
|
||||
parser.add_argument(
|
||||
'--applescript',
|
||||
type=Path,
|
||||
help="Optional path to applescript to set up folder looks of DMG."
|
||||
"If not provided default Blender's one is used.")
|
||||
parser.add_argument(
|
||||
'--codesign',
|
||||
action="store_true",
|
||||
help="Code sign and notarize DMG contents.")
|
||||
return parser
|
||||
|
||||
|
||||
def collect_app_bundles(source_dir: Path) -> List[Path]:
|
||||
"""
|
||||
Collect all app bundles which are to be put into DMG
|
||||
|
||||
If the source directory points to FOO.app it will be the only app bundle
|
||||
packed.
|
||||
|
||||
Otherwise all .app bundles from given directory are placed to a single
|
||||
DMG.
|
||||
"""
|
||||
|
||||
if source_dir.name.endswith('.app'):
|
||||
return [source_dir]
|
||||
|
||||
app_bundles = []
|
||||
for filename in source_dir.glob('*'):
|
||||
if not filename.is_dir():
|
||||
continue
|
||||
if not filename.name.endswith('.app'):
|
||||
continue
|
||||
|
||||
app_bundles.append(filename)
|
||||
|
||||
return app_bundles
|
||||
|
||||
|
||||
def collect_and_log_app_bundles(source_dir: Path) -> List[Path]:
|
||||
app_bundles = collect_app_bundles(source_dir)
|
||||
|
||||
if not app_bundles:
|
||||
print('No app bundles found for packing')
|
||||
return
|
||||
|
||||
print(f'Found {len(app_bundles)} to pack:')
|
||||
for app_bundle in app_bundles:
|
||||
print(f'- {app_bundle}')
|
||||
|
||||
return app_bundles
|
||||
|
||||
|
||||
def estimate_dmg_size(app_bundles: List[Path]) -> int:
|
||||
"""
|
||||
Estimate size of DMG to hold requested app bundles
|
||||
|
||||
The size is based on actual size of all files in all bundles plus some
|
||||
space to compensate for different size-on-disk plus some space to hold
|
||||
codesign signatures.
|
||||
|
||||
Is better to be on a high side since the empty space is compressed, but
|
||||
lack of space might cause silent failures later on.
|
||||
"""
|
||||
|
||||
app_bundles_size = 0
|
||||
for app_bundle in app_bundles:
|
||||
app_bundles_size += get_directory_size(app_bundle)
|
||||
|
||||
return app_bundles_size + EXTRA_DMG_SIZE_IN_BYTES
|
||||
|
||||
|
||||
def copy_app_bundles_to_directory(app_bundles: List[Path],
|
||||
directory: Path) -> None:
|
||||
"""
|
||||
Copy all bundles to a given directory
|
||||
|
||||
This directory is what the DMG will be created from.
|
||||
"""
|
||||
for app_bundle in app_bundles:
|
||||
print(f'Copying {app_bundle.name}...')
|
||||
shutil.copytree(app_bundle, directory / app_bundle.name)
|
||||
|
||||
|
||||
def get_main_app_bundle(app_bundles: List[Path]) -> Path:
|
||||
"""
|
||||
Get application bundle main for the installation
|
||||
"""
|
||||
return app_bundles[0]
|
||||
|
||||
|
||||
def create_dmg_image(app_bundles: List[Path],
|
||||
dmg_filepath: Path,
|
||||
volume_name: str) -> None:
|
||||
"""
|
||||
Create DMG disk image and put app bundles in it
|
||||
|
||||
No DMG configuration or codesigning is happening here.
|
||||
"""
|
||||
|
||||
if dmg_filepath.exists():
|
||||
print(f'Removing existing writable DMG {dmg_filepath}...')
|
||||
dmg_filepath.unlink()
|
||||
|
||||
print('Preparing directory with app bundles for the DMG...')
|
||||
with TemporaryDirectory(prefix='blender-dmg-content-') as content_dir_str:
|
||||
# Copy all bundles to a clean directory.
|
||||
content_dir = Path(content_dir_str)
|
||||
copy_app_bundles_to_directory(app_bundles, content_dir)
|
||||
|
||||
# Estimate size of the DMG.
|
||||
dmg_size = estimate_dmg_size(app_bundles)
|
||||
print(f'Estimated DMG size: {dmg_size:,} bytes.')
|
||||
|
||||
# Create the DMG.
|
||||
print(f'Creating writable DMG {dmg_filepath}')
|
||||
command = ('hdiutil',
|
||||
'create',
|
||||
'-size', str(dmg_size),
|
||||
'-fs', 'HFS+',
|
||||
'-srcfolder', content_dir,
|
||||
'-volname', volume_name,
|
||||
'-format', 'UDRW',
|
||||
dmg_filepath)
|
||||
subprocess.run(command)
|
||||
|
||||
|
||||
def get_writable_dmg_filepath(dmg_filepath: Path):
|
||||
"""
|
||||
Get file path for writable DMG image
|
||||
"""
|
||||
parent = dmg_filepath.parent
|
||||
return parent / (dmg_filepath.stem + '-temp.dmg')
|
||||
|
||||
|
||||
def mount_readwrite_dmg(dmg_filepath: Path) -> None:
|
||||
"""
|
||||
Mount writable DMG
|
||||
|
||||
Mounting point would be /Volumes/<volume name>
|
||||
"""
|
||||
|
||||
print(f'Mounting read-write DMG ${dmg_filepath}')
|
||||
command = ('hdiutil',
|
||||
'attach', '-readwrite',
|
||||
'-noverify',
|
||||
'-noautoopen',
|
||||
dmg_filepath)
|
||||
subprocess.run(command)
|
||||
|
||||
|
||||
def get_mount_directory_for_volume_name(volume_name: str) -> Path:
|
||||
"""
|
||||
Get directory under which the volume will be mounted
|
||||
"""
|
||||
|
||||
return Path('/Volumes') / volume_name
|
||||
|
||||
|
||||
def eject_volume(volume_name: str) -> None:
|
||||
"""
|
||||
Eject given volume, if mounted
|
||||
"""
|
||||
mount_directory = get_mount_directory_for_volume_name(volume_name)
|
||||
if not mount_directory.exists():
|
||||
return
|
||||
mount_directory_str = str(mount_directory)
|
||||
|
||||
print(f'Ejecting volume {volume_name}')
|
||||
|
||||
# Figure out which device to eject.
|
||||
mount_output = subprocess.check_output(['mount']).decode()
|
||||
device = ''
|
||||
for line in mount_output.splitlines():
|
||||
if f'on {mount_directory_str} (' not in line:
|
||||
continue
|
||||
tokens = line.split(' ', 3)
|
||||
if len(tokens) < 3:
|
||||
continue
|
||||
if tokens[1] != 'on':
|
||||
continue
|
||||
if device:
|
||||
raise Exception(
|
||||
f'Multiple devices found for mounting point {mount_directory}')
|
||||
device = tokens[0]
|
||||
|
||||
if not device:
|
||||
raise Exception(
|
||||
f'No device found for mounting point {mount_directory}')
|
||||
|
||||
print(f'{mount_directory} is mounted as device {device}, ejecting...')
|
||||
subprocess.run(['diskutil', 'eject', device])
|
||||
|
||||
|
||||
def copy_background_if_needed(background_image_filepath: Path,
|
||||
mount_directory: Path) -> None:
|
||||
"""
|
||||
Copy background to the DMG
|
||||
|
||||
If the background image is not specified it will not be copied.
|
||||
"""
|
||||
|
||||
if not background_image_filepath:
|
||||
print('No background image provided.')
|
||||
return
|
||||
|
||||
print(f'Copying background image {background_image_filepath}')
|
||||
|
||||
destination_dir = mount_directory / '.background'
|
||||
destination_dir.mkdir(exist_ok=True)
|
||||
|
||||
destination_filepath = destination_dir / background_image_filepath.name
|
||||
shutil.copy(background_image_filepath, destination_filepath)
|
||||
|
||||
|
||||
def create_applications_link(mount_directory: Path) -> None:
|
||||
"""
|
||||
Create link to /Applications in the given location
|
||||
"""
|
||||
|
||||
print('Creating link to /Applications')
|
||||
|
||||
command = ('ln', '-s', '/Applications', mount_directory / ' ')
|
||||
subprocess.run(command)
|
||||
|
||||
|
||||
def run_applescript(applescript: Path,
|
||||
volume_name: str,
|
||||
app_bundles: List[Path],
|
||||
background_image_filepath: Path) -> None:
|
||||
"""
|
||||
Run given applescript to adjust look and feel of the DMG
|
||||
"""
|
||||
|
||||
main_app_bundle = get_main_app_bundle(app_bundles)
|
||||
|
||||
with NamedTemporaryFile(
|
||||
mode='w', suffix='.applescript') as temp_applescript:
|
||||
print('Adjusting applescript for volume name...')
|
||||
# Adjust script to the specific volume name.
|
||||
with open(applescript, mode='r') as input:
|
||||
for line in input.readlines():
|
||||
stripped_line = line.strip()
|
||||
if stripped_line.startswith('tell disk'):
|
||||
line = re.sub('tell disk ".*"',
|
||||
f'tell disk "{volume_name}"',
|
||||
line)
|
||||
elif stripped_line.startswith('set background picture'):
|
||||
if not background_image_filepath:
|
||||
continue
|
||||
else:
|
||||
background_image_short = \
|
||||
'.background:' + background_image_filepath.name
|
||||
line = re.sub('to file ".*"',
|
||||
f'to file "{background_image_short}"',
|
||||
line)
|
||||
line = line.replace('blender.app', main_app_bundle.name)
|
||||
temp_applescript.write(line)
|
||||
|
||||
temp_applescript.flush()
|
||||
|
||||
print('Running applescript...')
|
||||
command = ('osascript', temp_applescript.name)
|
||||
subprocess.run(command)
|
||||
|
||||
print('Waiting for applescript...')
|
||||
|
||||
# NOTE: This is copied from bundle.sh. The exact reason for sleep is
|
||||
# still remained a mystery.
|
||||
time.sleep(5)
|
||||
|
||||
|
||||
def codesign(subject: Path):
|
||||
"""
|
||||
Codesign file or directory
|
||||
|
||||
NOTE: For DMG it will also notarize.
|
||||
"""
|
||||
|
||||
command = (CODESIGN_SCRIPT, subject)
|
||||
subprocess.run(command)
|
||||
|
||||
|
||||
def codesign_app_bundles_in_dmg(mount_directory: str) -> None:
|
||||
"""
|
||||
Code sign all binaries and bundles in the mounted directory
|
||||
"""
|
||||
|
||||
print(f'Codesigning all app bundles in {mount_directory}')
|
||||
codesign(mount_directory)
|
||||
|
||||
|
||||
def codesign_and_notarize_dmg(dmg_filepath: Path) -> None:
|
||||
"""
|
||||
Run codesign and notarization pipeline on the DMG
|
||||
"""
|
||||
|
||||
print(f'Codesigning and notarizing DMG {dmg_filepath}')
|
||||
codesign(dmg_filepath)
|
||||
|
||||
|
||||
def compress_dmg(writable_dmg_filepath: Path,
|
||||
final_dmg_filepath: Path) -> None:
|
||||
"""
|
||||
Compress temporary read-write DMG
|
||||
"""
|
||||
command = ('hdiutil', 'convert',
|
||||
writable_dmg_filepath,
|
||||
'-format', 'UDZO',
|
||||
'-o', final_dmg_filepath)
|
||||
|
||||
if final_dmg_filepath.exists():
|
||||
print(f'Removing old compressed DMG {final_dmg_filepath}')
|
||||
final_dmg_filepath.unlink()
|
||||
|
||||
print('Compressing disk image...')
|
||||
subprocess.run(command)
|
||||
|
||||
|
||||
def create_final_dmg(app_bundles: List[Path],
|
||||
dmg_filepath: Path,
|
||||
background_image_filepath: Path,
|
||||
volume_name: str,
|
||||
applescript: Path,
|
||||
codesign: bool) -> None:
|
||||
"""
|
||||
Create DMG with all app bundles
|
||||
|
||||
Will take care configuring background, signing all binaries and app bundles
|
||||
and notarizing the DMG.
|
||||
"""
|
||||
|
||||
print('Running all routines to create final DMG')
|
||||
|
||||
writable_dmg_filepath = get_writable_dmg_filepath(dmg_filepath)
|
||||
mount_directory = get_mount_directory_for_volume_name(volume_name)
|
||||
|
||||
# Make sure volume is not mounted.
|
||||
# If it is mounted it will prevent removing old DMG files and could make
|
||||
# it so app bundles are copied to the wrong place.
|
||||
eject_volume(volume_name)
|
||||
|
||||
create_dmg_image(app_bundles, writable_dmg_filepath, volume_name)
|
||||
|
||||
mount_readwrite_dmg(writable_dmg_filepath)
|
||||
|
||||
# Run codesign first, prior to copying amything else.
|
||||
#
|
||||
# This allows to recurs into the content of bundles without worrying about
|
||||
# possible interfereice of Application symlink.
|
||||
if codesign:
|
||||
codesign_app_bundles_in_dmg(mount_directory)
|
||||
|
||||
copy_background_if_needed(background_image_filepath, mount_directory)
|
||||
create_applications_link(mount_directory)
|
||||
run_applescript(applescript, volume_name, app_bundles,
|
||||
background_image_filepath)
|
||||
|
||||
print('Ejecting read-write DMG image...')
|
||||
eject_volume(volume_name)
|
||||
|
||||
compress_dmg(writable_dmg_filepath, dmg_filepath)
|
||||
writable_dmg_filepath.unlink()
|
||||
|
||||
if codesign:
|
||||
codesign_and_notarize_dmg(dmg_filepath)
|
||||
|
||||
|
||||
def ensure_dmg_extension(filepath: Path) -> Path:
|
||||
"""
|
||||
Make sure given file have .dmg extension
|
||||
"""
|
||||
|
||||
if filepath.suffix != '.dmg':
|
||||
return filepath.with_suffix(f'{filepath.suffix}.dmg')
|
||||
return filepath
|
||||
|
||||
|
||||
def get_dmg_filepath(requested_name: Path, app_bundles: List[Path]) -> Path:
|
||||
"""
|
||||
Get full file path for the final DMG image
|
||||
|
||||
Will use the provided one when possible, otherwise will deduct it from
|
||||
app bundles.
|
||||
|
||||
If the name is deducted, the DMG is stored in the current directory.
|
||||
"""
|
||||
|
||||
if requested_name:
|
||||
return ensure_dmg_extension(requested_name.absolute())
|
||||
|
||||
# TODO(sergey): This is not necessarily the main one.
|
||||
main_bundle = app_bundles[0]
|
||||
# Strip .app from the name
|
||||
return Path(main_bundle.name[:-4] + '.dmg').absolute()
|
||||
|
||||
|
||||
def get_background_image(requested_background_image: Path) -> Path:
|
||||
"""
|
||||
Get effective filepath for the background image
|
||||
"""
|
||||
|
||||
if requested_background_image:
|
||||
return requested_background_image.absolute()
|
||||
|
||||
return DARWIN_DIRECTORY / 'background.tif'
|
||||
|
||||
|
||||
def get_applescript(requested_applescript: Path) -> Path:
|
||||
"""
|
||||
Get effective filepath for the applescript
|
||||
"""
|
||||
|
||||
if requested_applescript:
|
||||
return requested_applescript.absolute()
|
||||
|
||||
return DARWIN_DIRECTORY / 'blender.applescript'
|
||||
|
||||
|
||||
def get_volume_name_from_dmg_filepath(dmg_filepath: Path) -> str:
|
||||
"""
|
||||
Deduct volume name from the DMG path
|
||||
|
||||
Will use first part of the DMG file name prior to dash.
|
||||
"""
|
||||
|
||||
tokens = dmg_filepath.stem.split('-')
|
||||
words = tokens[0].split()
|
||||
|
||||
return ' '.join(word.capitalize() for word in words)
|
||||
|
||||
|
||||
def get_volume_name(requested_volume_name: str,
|
||||
dmg_filepath: Path) -> str:
|
||||
"""
|
||||
Get effective name for DMG volume
|
||||
"""
|
||||
|
||||
if requested_volume_name:
|
||||
return requested_volume_name
|
||||
|
||||
return get_volume_name_from_dmg_filepath(dmg_filepath)
|
||||
|
||||
|
||||
def main():
|
||||
parser = create_argument_parser()
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get normalized input parameters.
|
||||
source_dir = args.source_dir.absolute()
|
||||
background_image_filepath = get_background_image(args.background_image)
|
||||
applescript = get_applescript(args.applescript)
|
||||
codesign = args.codesign
|
||||
|
||||
app_bundles = collect_and_log_app_bundles(source_dir)
|
||||
if not app_bundles:
|
||||
return
|
||||
|
||||
dmg_filepath = get_dmg_filepath(args.dmg, app_bundles)
|
||||
volume_name = get_volume_name(args.volume_name, dmg_filepath)
|
||||
|
||||
print(f'Will produce DMG "{dmg_filepath.name}" (without quotes)')
|
||||
|
||||
create_final_dmg(app_bundles,
|
||||
dmg_filepath,
|
||||
background_image_filepath,
|
||||
volume_name,
|
||||
applescript,
|
||||
codesign)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
44
build_files/buildbot/worker_codesign.cmake
Normal file
44
build_files/buildbot/worker_codesign.cmake
Normal file
@@ -0,0 +1,44 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# This is a script which is used as POST-INSTALL one for regular CMake's
|
||||
# INSTALL target.
|
||||
# It is used by buildbot workers to sign every binary which is going into
|
||||
# the final buundle.
|
||||
|
||||
# On Windows Python 3 there only is python.exe, no python3.exe.
|
||||
#
|
||||
# On other platforms it is possible to have python2 and python3, and a
|
||||
# symbolic link to python to either of them. So on those platforms use
|
||||
# an explicit Python version.
|
||||
if(WIN32)
|
||||
set(PYTHON_EXECUTABLE python)
|
||||
else()
|
||||
set(PYTHON_EXECUTABLE python3)
|
||||
endif()
|
||||
|
||||
execute_process(
|
||||
COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_LIST_DIR}/worker_codesign.py"
|
||||
"${CMAKE_INSTALL_PREFIX}"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}
|
||||
RESULT_VARIABLE exit_code
|
||||
)
|
||||
|
||||
if(NOT exit_code EQUAL "0")
|
||||
message(FATAL_ERROR "Non-zero exit code of codesign tool")
|
||||
endif()
|
74
build_files/buildbot/worker_codesign.py
Executable file
74
build_files/buildbot/worker_codesign.py
Executable file
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# Helper script which takes care of signing provided location.
|
||||
#
|
||||
# The location can either be a directory (in which case all eligible binaries
|
||||
# will be signed) or a single file (in which case a single file will be signed).
|
||||
#
|
||||
# This script takes care of all the complexity of communicating between process
|
||||
# which requests file to be signed and the code signing server.
|
||||
#
|
||||
# NOTE: Signing happens in-place.
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from codesign.simple_code_signer import SimpleCodeSigner
|
||||
|
||||
|
||||
def create_argument_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('path_to_sign', type=Path)
|
||||
return parser
|
||||
|
||||
|
||||
def main():
|
||||
parser = create_argument_parser()
|
||||
args = parser.parse_args()
|
||||
path_to_sign = args.path_to_sign.absolute()
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# When WIX packed is used to generate .msi on Windows the CPack will
|
||||
# install two different projects and install them to different
|
||||
# installation prefix:
|
||||
#
|
||||
# - C:\b\build\_CPack_Packages\WIX\Blender
|
||||
# - C:\b\build\_CPack_Packages\WIX\Unspecified
|
||||
#
|
||||
# Annoying part is: CMake's post-install script will only be run
|
||||
# once, with the install prefix which corresponds to a project which
|
||||
# was installed last. But we want to sign binaries from all projects.
|
||||
# So in order to do so we detect that we are running for a CPack's
|
||||
# project used for WIX and force parent directory (which includes both
|
||||
# projects) to be signed.
|
||||
#
|
||||
# Here we force both projects to be signed.
|
||||
if path_to_sign.name == 'Unspecified' and 'WIX' in str(path_to_sign):
|
||||
path_to_sign = path_to_sign.parent
|
||||
|
||||
code_signer = SimpleCodeSigner()
|
||||
code_signer.sign_file_or_directory(path_to_sign)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
135
build_files/buildbot/worker_compile.py
Normal file
135
build_files/buildbot/worker_compile.py
Normal file
@@ -0,0 +1,135 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import buildbot_utils
|
||||
|
||||
|
||||
def get_cmake_options(builder):
|
||||
codesign_script = os.path.join(
|
||||
builder.blender_dir, 'build_files', 'buildbot', 'worker_codesign.cmake')
|
||||
|
||||
config_file = "build_files/cmake/config/blender_release.cmake"
|
||||
options = ['-DCMAKE_BUILD_TYPE:STRING=Release',
|
||||
'-DWITH_GTESTS=ON']
|
||||
|
||||
if builder.platform == 'mac':
|
||||
options.append('-DCMAKE_OSX_ARCHITECTURES:STRING=x86_64')
|
||||
options.append('-DCMAKE_OSX_DEPLOYMENT_TARGET=10.9')
|
||||
elif builder.platform == 'win':
|
||||
options.extend(['-G', 'Visual Studio 16 2019', '-A', 'x64'])
|
||||
if builder.codesign:
|
||||
options.extend(['-DPOSTINSTALL_SCRIPT:PATH=' + codesign_script])
|
||||
elif builder.platform == 'linux':
|
||||
config_file = "build_files/buildbot/config/blender_linux.cmake"
|
||||
|
||||
optix_sdk_dir = os.path.join(builder.blender_dir, '..', '..', 'NVIDIA-Optix-SDK-7.1')
|
||||
options.append('-DOPTIX_ROOT_DIR:PATH=' + optix_sdk_dir)
|
||||
|
||||
# Workaround to build sm_30 kernels with CUDA 10, since CUDA 11 no longer supports that architecture
|
||||
if builder.platform == 'win':
|
||||
options.append('-DCUDA10_TOOLKIT_ROOT_DIR:PATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.1')
|
||||
options.append('-DCUDA10_NVCC_EXECUTABLE:FILEPATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.1/bin/nvcc.exe')
|
||||
options.append('-DCUDA11_TOOLKIT_ROOT_DIR:PATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.1')
|
||||
options.append('-DCUDA11_NVCC_EXECUTABLE:FILEPATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.1/bin/nvcc.exe')
|
||||
elif builder.platform == 'linux':
|
||||
options.append('-DCUDA10_TOOLKIT_ROOT_DIR:PATH=/usr/local/cuda-10.1')
|
||||
options.append('-DCUDA10_NVCC_EXECUTABLE:FILEPATH=/usr/local/cuda-10.1/bin/nvcc')
|
||||
options.append('-DCUDA11_TOOLKIT_ROOT_DIR:PATH=/usr/local/cuda-11.1')
|
||||
options.append('-DCUDA11_NVCC_EXECUTABLE:FILEPATH=/usr/local/cuda-11.1/bin/nvcc')
|
||||
|
||||
options.append("-C" + os.path.join(builder.blender_dir, config_file))
|
||||
options.append("-DCMAKE_INSTALL_PREFIX=%s" % (builder.install_dir))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def update_git(builder):
|
||||
# Do extra git fetch because not all platform/git/buildbot combinations
|
||||
# update the origin remote, causing buildinfo to detect local changes.
|
||||
os.chdir(builder.blender_dir)
|
||||
|
||||
print("Fetching remotes")
|
||||
command = ['git', 'fetch', '--all']
|
||||
buildbot_utils.call(builder.command_prefix + command)
|
||||
|
||||
|
||||
def clean_directories(builder):
|
||||
# Make sure no garbage remained from the previous run
|
||||
if os.path.isdir(builder.install_dir):
|
||||
shutil.rmtree(builder.install_dir)
|
||||
|
||||
# Make sure build directory exists and enter it
|
||||
os.makedirs(builder.build_dir, exist_ok=True)
|
||||
|
||||
# Remove buildinfo files to force buildbot to re-generate them.
|
||||
for buildinfo in ('buildinfo.h', 'buildinfo.h.txt', ):
|
||||
full_path = os.path.join(builder.build_dir, 'source', 'creator', buildinfo)
|
||||
if os.path.exists(full_path):
|
||||
print("Removing {}" . format(buildinfo))
|
||||
os.remove(full_path)
|
||||
|
||||
|
||||
def cmake_configure(builder):
|
||||
# CMake configuration
|
||||
os.chdir(builder.build_dir)
|
||||
|
||||
cmake_cache = os.path.join(builder.build_dir, 'CMakeCache.txt')
|
||||
if os.path.exists(cmake_cache):
|
||||
print("Removing CMake cache")
|
||||
os.remove(cmake_cache)
|
||||
|
||||
print("CMake configure:")
|
||||
cmake_options = get_cmake_options(builder)
|
||||
command = ['cmake', builder.blender_dir] + cmake_options
|
||||
buildbot_utils.call(builder.command_prefix + command)
|
||||
|
||||
|
||||
def cmake_build(builder):
|
||||
# CMake build
|
||||
os.chdir(builder.build_dir)
|
||||
|
||||
# NOTE: CPack will build an INSTALL target, which would mean that code
|
||||
# signing will happen twice when using `make install` and CPack.
|
||||
# The tricky bit here is that it is not possible to know whether INSTALL
|
||||
# target is used by CPack or by a buildbot itaself. Extra level on top of
|
||||
# this is that on Windows it is required to build INSTALL target in order
|
||||
# to have unit test binaries to run.
|
||||
# So on the one hand we do an extra unneeded code sign on Windows, but on
|
||||
# a positive side we don't add complexity and don't make build process more
|
||||
# fragile trying to avoid this. The signing process is way faster than just
|
||||
# a clean build of buildbot, especially with regression tests enabled.
|
||||
if builder.platform == 'win':
|
||||
command = ['cmake', '--build', '.', '--target', 'install', '--config', 'Release']
|
||||
else:
|
||||
command = ['make', '-s', '-j16', 'install']
|
||||
|
||||
print("CMake build:")
|
||||
buildbot_utils.call(builder.command_prefix + command)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
builder = buildbot_utils.create_builder_from_arguments()
|
||||
update_git(builder)
|
||||
clean_directories(builder)
|
||||
cmake_configure(builder)
|
||||
cmake_build(builder)
|
208
build_files/buildbot/worker_pack.py
Normal file
208
build_files/buildbot/worker_pack.py
Normal file
@@ -0,0 +1,208 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Runs on buildbot worker, creating a release package using the build
|
||||
# system and zipping it into buildbot_upload.zip. This is then uploaded
|
||||
# to the master in the next buildbot step.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import buildbot_utils
|
||||
|
||||
|
||||
def get_package_name(builder, platform=None):
|
||||
info = buildbot_utils.VersionInfo(builder)
|
||||
|
||||
package_name = 'blender-' + info.full_version
|
||||
if platform:
|
||||
package_name += '-' + platform
|
||||
if not (builder.branch == 'master' or builder.is_release_branch):
|
||||
if info.is_development_build:
|
||||
package_name = builder.branch + "-" + package_name
|
||||
|
||||
return package_name
|
||||
|
||||
|
||||
def sign_file_or_directory(path):
|
||||
from codesign.simple_code_signer import SimpleCodeSigner
|
||||
code_signer = SimpleCodeSigner()
|
||||
code_signer.sign_file_or_directory(Path(path))
|
||||
|
||||
|
||||
def create_buildbot_upload_zip(builder, package_files):
|
||||
import zipfile
|
||||
|
||||
buildbot_upload_zip = os.path.join(builder.upload_dir, "buildbot_upload.zip")
|
||||
if os.path.exists(buildbot_upload_zip):
|
||||
os.remove(buildbot_upload_zip)
|
||||
|
||||
try:
|
||||
z = zipfile.ZipFile(buildbot_upload_zip, "w", compression=zipfile.ZIP_STORED)
|
||||
for filepath, filename in package_files:
|
||||
print("Packaged", filename)
|
||||
z.write(filepath, arcname=filename)
|
||||
z.close()
|
||||
except Exception as ex:
|
||||
sys.stderr.write('Create buildbot_upload.zip failed: ' + str(ex) + '\n')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def create_tar_xz(src, dest, package_name):
|
||||
# One extra to remove leading os.sep when cleaning root for package_root
|
||||
ln = len(src) + 1
|
||||
flist = list()
|
||||
|
||||
# Create list of tuples containing file and archive name
|
||||
for root, dirs, files in os.walk(src):
|
||||
package_root = os.path.join(package_name, root[ln:])
|
||||
flist.extend([(os.path.join(root, file), os.path.join(package_root, file)) for file in files])
|
||||
|
||||
import tarfile
|
||||
|
||||
# Set UID/GID of archived files to 0, otherwise they'd be owned by whatever
|
||||
# user compiled the package. If root then unpacks it to /usr/local/ you get
|
||||
# a security issue.
|
||||
def _fakeroot(tarinfo):
|
||||
tarinfo.gid = 0
|
||||
tarinfo.gname = "root"
|
||||
tarinfo.uid = 0
|
||||
tarinfo.uname = "root"
|
||||
return tarinfo
|
||||
|
||||
package = tarfile.open(dest, 'w:xz', preset=9)
|
||||
for entry in flist:
|
||||
package.add(entry[0], entry[1], recursive=False, filter=_fakeroot)
|
||||
package.close()
|
||||
|
||||
|
||||
def cleanup_files(dirpath, extension):
|
||||
for f in os.listdir(dirpath):
|
||||
filepath = os.path.join(dirpath, f)
|
||||
if os.path.isfile(filepath) and f.endswith(extension):
|
||||
os.remove(filepath)
|
||||
|
||||
|
||||
def pack_mac(builder):
|
||||
info = buildbot_utils.VersionInfo(builder)
|
||||
|
||||
os.chdir(builder.build_dir)
|
||||
cleanup_files(builder.build_dir, '.dmg')
|
||||
|
||||
package_name = get_package_name(builder, 'macOS')
|
||||
package_filename = package_name + '.dmg'
|
||||
package_filepath = os.path.join(builder.build_dir, package_filename)
|
||||
|
||||
release_dir = os.path.join(builder.blender_dir, 'release', 'darwin')
|
||||
buildbot_dir = os.path.join(builder.blender_dir, 'build_files', 'buildbot')
|
||||
bundle_script = os.path.join(buildbot_dir, 'worker_bundle_dmg.py')
|
||||
|
||||
command = [bundle_script]
|
||||
command += ['--dmg', package_filepath]
|
||||
if info.is_development_build:
|
||||
background_image = os.path.join(release_dir, 'buildbot', 'background.tif')
|
||||
command += ['--background-image', background_image]
|
||||
if builder.codesign:
|
||||
command += ['--codesign']
|
||||
command += [builder.install_dir]
|
||||
buildbot_utils.call(command)
|
||||
|
||||
create_buildbot_upload_zip(builder, [(package_filepath, package_filename)])
|
||||
|
||||
|
||||
def pack_win(builder):
|
||||
info = buildbot_utils.VersionInfo(builder)
|
||||
|
||||
os.chdir(builder.build_dir)
|
||||
cleanup_files(builder.build_dir, '.zip')
|
||||
|
||||
# CPack will add the platform name
|
||||
cpack_name = get_package_name(builder, None)
|
||||
package_name = get_package_name(builder, 'windows' + str(builder.bits))
|
||||
|
||||
command = ['cmake', '-DCPACK_OVERRIDE_PACKAGENAME:STRING=' + cpack_name, '.']
|
||||
buildbot_utils.call(builder.command_prefix + command)
|
||||
command = ['cpack', '-G', 'ZIP']
|
||||
buildbot_utils.call(builder.command_prefix + command)
|
||||
|
||||
package_filename = package_name + '.zip'
|
||||
package_filepath = os.path.join(builder.build_dir, package_filename)
|
||||
package_files = [(package_filepath, package_filename)]
|
||||
|
||||
if info.version_cycle == 'release':
|
||||
# Installer only for final release builds, otherwise will get
|
||||
# 'this product is already installed' messages.
|
||||
command = ['cpack', '-G', 'WIX']
|
||||
buildbot_utils.call(builder.command_prefix + command)
|
||||
|
||||
package_filename = package_name + '.msi'
|
||||
package_filepath = os.path.join(builder.build_dir, package_filename)
|
||||
if builder.codesign:
|
||||
sign_file_or_directory(package_filepath)
|
||||
|
||||
package_files += [(package_filepath, package_filename)]
|
||||
|
||||
create_buildbot_upload_zip(builder, package_files)
|
||||
|
||||
|
||||
def pack_linux(builder):
|
||||
blender_executable = os.path.join(builder.install_dir, 'blender')
|
||||
|
||||
info = buildbot_utils.VersionInfo(builder)
|
||||
|
||||
# Strip all unused symbols from the binaries
|
||||
print("Stripping binaries...")
|
||||
buildbot_utils.call(builder.command_prefix + ['strip', '--strip-all', blender_executable])
|
||||
|
||||
print("Stripping python...")
|
||||
py_target = os.path.join(builder.install_dir, info.short_version)
|
||||
buildbot_utils.call(
|
||||
builder.command_prefix + [
|
||||
'find', py_target, '-iname', '*.so', '-exec', 'strip', '-s', '{}', ';',
|
||||
],
|
||||
)
|
||||
|
||||
# Construct package name
|
||||
platform_name = 'linux64'
|
||||
package_name = get_package_name(builder, platform_name)
|
||||
package_filename = package_name + ".tar.xz"
|
||||
|
||||
print("Creating .tar.xz archive")
|
||||
package_filepath = builder.install_dir + '.tar.xz'
|
||||
create_tar_xz(builder.install_dir, package_filepath, package_name)
|
||||
|
||||
# Create buildbot_upload.zip
|
||||
create_buildbot_upload_zip(builder, [(package_filepath, package_filename)])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
builder = buildbot_utils.create_builder_from_arguments()
|
||||
|
||||
# Make sure install directory always exists
|
||||
os.makedirs(builder.install_dir, exist_ok=True)
|
||||
|
||||
if builder.platform == 'mac':
|
||||
pack_mac(builder)
|
||||
elif builder.platform == 'win':
|
||||
pack_win(builder)
|
||||
elif builder.platform == 'linux':
|
||||
pack_linux(builder)
|
42
build_files/buildbot/worker_test.py
Normal file
42
build_files/buildbot/worker_test.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import buildbot_utils
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def get_ctest_arguments(builder):
|
||||
args = ['--output-on-failure']
|
||||
if builder.platform == 'win':
|
||||
args += ['-C', 'Release']
|
||||
return args
|
||||
|
||||
|
||||
def test(builder):
|
||||
os.chdir(builder.build_dir)
|
||||
|
||||
command = builder.command_prefix + ['ctest'] + get_ctest_arguments(builder)
|
||||
buildbot_utils.call(command)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
builder = buildbot_utils.create_builder_from_arguments()
|
||||
test(builder)
|
31
build_files/buildbot/worker_update.py
Normal file
31
build_files/buildbot/worker_update.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import buildbot_utils
|
||||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
builder = buildbot_utils.create_builder_from_arguments()
|
||||
os.chdir(builder.blender_dir)
|
||||
|
||||
# Run make update which handles all libraries and submodules.
|
||||
make_update = os.path.join(builder.blender_dir, "build_files", "utils", "make_update.py")
|
||||
buildbot_utils.call([sys.executable, make_update, '--no-blender', "--use-tests", "--use-centos-libraries"])
|
@@ -20,24 +20,8 @@ if(NOT CLANG_ROOT_DIR AND NOT $ENV{CLANG_ROOT_DIR} STREQUAL "")
|
||||
set(CLANG_ROOT_DIR $ENV{CLANG_ROOT_DIR})
|
||||
endif()
|
||||
|
||||
if(NOT LLVM_ROOT_DIR)
|
||||
if(DEFINED LLVM_VERSION)
|
||||
message(running llvm-config-${LLVM_VERSION})
|
||||
find_program(LLVM_CONFIG llvm-config-${LLVM_VERSION})
|
||||
endif()
|
||||
if(NOT LLVM_CONFIG)
|
||||
find_program(LLVM_CONFIG llvm-config)
|
||||
endif()
|
||||
|
||||
execute_process(COMMAND ${LLVM_CONFIG} --prefix
|
||||
OUTPUT_VARIABLE LLVM_ROOT_DIR
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
set(LLVM_ROOT_DIR ${LLVM_ROOT_DIR} CACHE PATH "Path to the LLVM installation")
|
||||
endif()
|
||||
|
||||
set(_CLANG_SEARCH_DIRS
|
||||
${CLANG_ROOT_DIR}
|
||||
${LLVM_ROOT_DIR}
|
||||
/opt/lib/clang
|
||||
)
|
||||
|
||||
|
@@ -56,6 +56,10 @@ set(WITH_TBB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_USD ON CACHE BOOL "" FORCE)
|
||||
|
||||
set(WITH_MEM_JEMALLOC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_CUDA_BINARIES ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_CUBIN_COMPILER OFF CACHE BOOL "" FORCE)
|
||||
set(CYCLES_CUDA_BINARIES_ARCH sm_30;sm_35;sm_37;sm_50;sm_52;sm_60;sm_61;sm_70;sm_75;sm_86;compute_75 CACHE STRING "" FORCE)
|
||||
set(WITH_CYCLES_DEVICE_OPTIX ON CACHE BOOL "" FORCE)
|
||||
|
||||
# platform dependent options
|
||||
if(APPLE)
|
||||
@@ -76,8 +80,4 @@ if(UNIX AND NOT APPLE)
|
||||
endif()
|
||||
if(NOT APPLE)
|
||||
set(WITH_XR_OPENXR ON CACHE BOOL "" FORCE)
|
||||
|
||||
set(WITH_CYCLES_DEVICE_OPTIX ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_CUDA_BINARIES ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_CUBIN_COMPILER OFF CACHE BOOL "" FORCE)
|
||||
endif()
|
||||
|
@@ -596,6 +596,14 @@ function(SETUP_LIBDIRS)
|
||||
link_directories(${GMP_LIBPATH})
|
||||
endif()
|
||||
|
||||
if(WITH_GHOST_WAYLAND)
|
||||
link_directories(
|
||||
${wayland-client_LIBRARY_DIRS}
|
||||
${wayland-egl_LIBRARY_DIRS}
|
||||
${xkbcommon_LIBRARY_DIRS}
|
||||
${wayland-cursor_LIBRARY_DIRS})
|
||||
endif()
|
||||
|
||||
if(WIN32 AND NOT UNIX)
|
||||
link_directories(${PTHREADS_LIBPATH})
|
||||
endif()
|
||||
|
@@ -104,8 +104,8 @@ if(WIN32)
|
||||
set(CPACK_WIX_LIGHT_EXTRA_FLAGS -dcl:medium)
|
||||
endif()
|
||||
|
||||
set(CPACK_PACKAGE_EXECUTABLES "blender-launcher" "blender")
|
||||
set(CPACK_CREATE_DESKTOP_LINKS "blender-launcher" "blender")
|
||||
set(CPACK_PACKAGE_EXECUTABLES "blender" "blender")
|
||||
set(CPACK_CREATE_DESKTOP_LINKS "blender" "blender")
|
||||
|
||||
include(CPack)
|
||||
|
||||
|
@@ -575,17 +575,17 @@ if(WITH_GHOST_WAYLAND)
|
||||
pkg_check_modules(wayland-scanner REQUIRED wayland-scanner)
|
||||
pkg_check_modules(xkbcommon REQUIRED xkbcommon)
|
||||
pkg_check_modules(wayland-cursor REQUIRED wayland-cursor)
|
||||
pkg_check_modules(dbus REQUIRED dbus-1)
|
||||
|
||||
set(WITH_GL_EGL ON)
|
||||
|
||||
list(APPEND PLATFORM_LINKLIBS
|
||||
${wayland-client_LINK_LIBRARIES}
|
||||
${wayland-egl_LINK_LIBRARIES}
|
||||
${xkbcommon_LINK_LIBRARIES}
|
||||
${wayland-cursor_LINK_LIBRARIES}
|
||||
${dbus_LINK_LIBRARIES}
|
||||
)
|
||||
if(WITH_GHOST_WAYLAND)
|
||||
list(APPEND PLATFORM_LINKLIBS
|
||||
${wayland-client_LIBRARIES}
|
||||
${wayland-egl_LIBRARIES}
|
||||
${xkbcommon_LIBRARIES}
|
||||
${wayland-cursor_LIBRARIES}
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_GHOST_X11)
|
||||
|
@@ -119,7 +119,7 @@ string(APPEND CMAKE_MODULE_LINKER_FLAGS " /SAFESEH:NO /ignore:4099")
|
||||
list(APPEND PLATFORM_LINKLIBS
|
||||
ws2_32 vfw32 winmm kernel32 user32 gdi32 comdlg32 Comctl32 version
|
||||
advapi32 shfolder shell32 ole32 oleaut32 uuid psapi Dbghelp Shlwapi
|
||||
pathcch Shcore
|
||||
pathcch
|
||||
)
|
||||
|
||||
if(WITH_INPUT_IME)
|
||||
@@ -144,8 +144,8 @@ add_definitions(-D_ALLOW_KEYWORD_MACROS)
|
||||
# that both /GR and /GR- are specified.
|
||||
remove_cc_flag("/GR")
|
||||
|
||||
# Make the Windows 8.1 API available for use.
|
||||
add_definitions(-D_WIN32_WINNT=0x603)
|
||||
# We want to support Windows 7 level ABI
|
||||
add_definitions(-D_WIN32_WINNT=0x601)
|
||||
include(build_files/cmake/platform/platform_win32_bundle_crt.cmake)
|
||||
remove_cc_flag("/MDd" "/MD" "/Zi")
|
||||
|
||||
@@ -675,11 +675,10 @@ if(WITH_SYSTEM_AUDASPACE)
|
||||
endif()
|
||||
|
||||
if(WITH_TBB)
|
||||
set(TBB_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbb.lib debug ${LIBDIR}/tbb/lib/tbb_debug.lib)
|
||||
set(TBB_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbb.lib debug ${LIBDIR}/tbb/lib/debug/tbb_debug.lib)
|
||||
set(TBB_INCLUDE_DIR ${LIBDIR}/tbb/include)
|
||||
set(TBB_INCLUDE_DIRS ${TBB_INCLUDE_DIR})
|
||||
if(WITH_TBB_MALLOC_PROXY)
|
||||
set(TBB_MALLOC_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbbmalloc.lib debug ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib)
|
||||
add_definitions(-DWITH_TBB_MALLOC)
|
||||
endif()
|
||||
endif()
|
||||
|
@@ -15,15 +15,6 @@ if(WITH_WINDOWS_BUNDLE_CRT)
|
||||
|
||||
include(InstallRequiredSystemLibraries)
|
||||
|
||||
# ucrtbase(d).dll cannot be in the manifest, due to the way windows 10 handles
|
||||
# redirects for this dll, for details see T88813.
|
||||
foreach(lib ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS})
|
||||
string(FIND ${lib} "ucrtbase" pos)
|
||||
if(NOT pos EQUAL -1)
|
||||
list(REMOVE_ITEM CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS ${lib})
|
||||
install(FILES ${lib} DESTINATION . COMPONENT Libraries)
|
||||
endif()
|
||||
endforeach()
|
||||
# Install the CRT to the blender.crt Sub folder.
|
||||
install(FILES ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS} DESTINATION ./blender.crt COMPONENT Libraries)
|
||||
|
||||
|
@@ -1,8 +0,0 @@
|
||||
Pipeline Config
|
||||
===============
|
||||
|
||||
This configuration file is used by buildbot new pipeline for the `update-code` step.
|
||||
|
||||
It will soon be used by the ../utils/make_update.py script.
|
||||
|
||||
Both buildbot and developers will eventually use the same configuration file.
|
@@ -1,87 +0,0 @@
|
||||
{
|
||||
"update-code":
|
||||
{
|
||||
"git" :
|
||||
{
|
||||
"submodules":
|
||||
[
|
||||
{ "path": "release/scripts/addons", "branch": "master", "commit_id": "HEAD" },
|
||||
{ "path": "release/scripts/addons_contrib", "branch": "master", "commit_id": "HEAD" },
|
||||
{ "path": "release/datafiles/locale", "branch": "master", "commit_id": "HEAD" },
|
||||
{ "path": "source/tools", "branch": "master", "commit_id": "HEAD" }
|
||||
]
|
||||
},
|
||||
"svn":
|
||||
{
|
||||
"tests": { "path": "lib/tests", "branch": "trunk", "commit_id": "HEAD" },
|
||||
"libraries":
|
||||
{
|
||||
"darwin-x86_64": { "path": "lib/darwin", "branch": "trunk", "commit_id": "HEAD" },
|
||||
"darwin-arm64": { "path": "lib/darwin_arm64", "branch": "trunk", "commit_id": "HEAD" },
|
||||
"linux-x86_64": { "path": "lib/linux_centos7_x86_64", "branch": "trunk", "commit_id": "HEAD" },
|
||||
"windows-amd64": { "path": "lib/win64_vc15", "branch": "trunk", "commit_id": "HEAD" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"buildbot":
|
||||
{
|
||||
"gcc":
|
||||
{
|
||||
"version": "9.0"
|
||||
},
|
||||
"sdks":
|
||||
{
|
||||
"optix":
|
||||
{
|
||||
"version": "7.1.0"
|
||||
},
|
||||
"cuda10":
|
||||
{
|
||||
"version": "10.1"
|
||||
},
|
||||
"cuda11":
|
||||
{
|
||||
"version": "11.3"
|
||||
}
|
||||
},
|
||||
"cmake":
|
||||
{
|
||||
"default":
|
||||
{
|
||||
"version": "any",
|
||||
"overrides":
|
||||
{
|
||||
|
||||
}
|
||||
},
|
||||
"darwin-x86_64":
|
||||
{
|
||||
"overrides":
|
||||
{
|
||||
|
||||
}
|
||||
},
|
||||
"darwin-arm64":
|
||||
{
|
||||
"overrides":
|
||||
{
|
||||
|
||||
}
|
||||
},
|
||||
"linux-x86_64":
|
||||
{
|
||||
"overrides":
|
||||
{
|
||||
|
||||
}
|
||||
},
|
||||
"windows-amd64":
|
||||
{
|
||||
"overrides":
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,5 +0,0 @@
|
||||
Make Utility Scripts
|
||||
====================
|
||||
|
||||
Scripts used only by developers for now
|
||||
|
@@ -85,7 +85,7 @@ def openBlendFile(filename):
|
||||
'''
|
||||
handle = open(filename, 'rb')
|
||||
magic = ReadString(handle, 7)
|
||||
if magic in {"BLENDER", "BULLETf"}:
|
||||
if magic in ("BLENDER", "BULLETf"):
|
||||
log.debug("normal blendfile detected")
|
||||
handle.seek(0, os.SEEK_SET)
|
||||
return handle
|
||||
@@ -137,7 +137,7 @@ class BlendFile:
|
||||
fileblock = BlendFileBlock(handle, self)
|
||||
found_dna_block = False
|
||||
while not found_dna_block:
|
||||
if fileblock.Header.Code in {"DNA1", "SDNA"}:
|
||||
if fileblock.Header.Code in ("DNA1", "SDNA"):
|
||||
self.Catalog = DNACatalog(self.Header, handle)
|
||||
found_dna_block = True
|
||||
else:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Doxyfile 1.9.1
|
||||
# Doxyfile 1.8.16
|
||||
|
||||
# This file describes the settings to be used by the documentation system
|
||||
# doxygen (www.doxygen.org) for a project.
|
||||
@@ -38,7 +38,7 @@ PROJECT_NAME = Blender
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = V3.0
|
||||
PROJECT_NUMBER = "V3.0"
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
@@ -227,14 +227,6 @@ QT_AUTOBRIEF = NO
|
||||
|
||||
MULTILINE_CPP_IS_BRIEF = NO
|
||||
|
||||
# By default Python docstrings are displayed as preformatted text and doxygen's
|
||||
# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the
|
||||
# doxygen's special commands can be used and the contents of the docstring
|
||||
# documentation blocks is shown as doxygen documentation.
|
||||
# The default value is: YES.
|
||||
|
||||
PYTHON_DOCSTRING = YES
|
||||
|
||||
# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
|
||||
# documentation from any documented member that it re-implements.
|
||||
# The default value is: YES.
|
||||
@@ -271,6 +263,12 @@ TAB_SIZE = 4
|
||||
|
||||
ALIASES =
|
||||
|
||||
# This tag can be used to specify a number of word-keyword mappings (TCL only).
|
||||
# A mapping has the form "name=value". For example adding "class=itcl::class"
|
||||
# will allow you to use the command class in the itcl::class meaning.
|
||||
|
||||
TCL_SUBST =
|
||||
|
||||
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
|
||||
# only. Doxygen will then generate output that is more tailored for C. For
|
||||
# instance, some of the names that are used will be different. The list of all
|
||||
@@ -311,22 +309,19 @@ OPTIMIZE_OUTPUT_SLICE = NO
|
||||
# parses. With this tag you can assign which parser to use for a given
|
||||
# extension. Doxygen has a built-in mapping, but you can override or extend it
|
||||
# using this tag. The format is ext=language, where ext is a file extension, and
|
||||
# language is one of the parsers supported by doxygen: IDL, Java, JavaScript,
|
||||
# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL,
|
||||
# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
|
||||
# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice,
|
||||
# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
|
||||
# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
|
||||
# tries to guess whether the code is fixed or free formatted code, this is the
|
||||
# default for Fortran type files). For instance to make doxygen treat .inc files
|
||||
# as Fortran files (default is PHP), and .f files as C (default is Fortran),
|
||||
# use: inc=Fortran f=C.
|
||||
# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat
|
||||
# .inc files as Fortran files (default is PHP), and .f files as C (default is
|
||||
# Fortran), use: inc=Fortran f=C.
|
||||
#
|
||||
# Note: For files without extension you can use no_extension as a placeholder.
|
||||
#
|
||||
# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
|
||||
# the files are not read by doxygen. When specifying no_extension you should add
|
||||
# * to the FILE_PATTERNS.
|
||||
#
|
||||
# Note see also the list of default file extension mappings.
|
||||
# the files are not read by doxygen.
|
||||
|
||||
EXTENSION_MAPPING =
|
||||
|
||||
@@ -460,19 +455,6 @@ TYPEDEF_HIDES_STRUCT = NO
|
||||
|
||||
LOOKUP_CACHE_SIZE = 3
|
||||
|
||||
# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use
|
||||
# during processing. When set to 0 doxygen will based this on the number of
|
||||
# cores available in the system. You can set it explicitly to a value larger
|
||||
# than 0 to get more control over the balance between CPU load and processing
|
||||
# speed. At this moment only the input processing can be done using multiple
|
||||
# threads. Since this is still an experimental feature the default is set to 1,
|
||||
# which efficively disables parallel processing. Please report any issues you
|
||||
# encounter. Generating dot graphs in parallel is controlled by the
|
||||
# DOT_NUM_THREADS setting.
|
||||
# Minimum value: 0, maximum value: 32, default value: 1.
|
||||
|
||||
NUM_PROC_THREADS = 1
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Build related configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
@@ -536,13 +518,6 @@ EXTRACT_LOCAL_METHODS = NO
|
||||
|
||||
EXTRACT_ANON_NSPACES = NO
|
||||
|
||||
# If this flag is set to YES, the name of an unnamed parameter in a declaration
|
||||
# will be determined by the corresponding definition. By default unnamed
|
||||
# parameters remain unnamed in the output.
|
||||
# The default value is: YES.
|
||||
|
||||
RESOLVE_UNNAMED_PARAMS = YES
|
||||
|
||||
# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
|
||||
# undocumented members inside documented classes or files. If set to NO these
|
||||
# members will be included in the various overviews, but no documentation
|
||||
@@ -560,8 +535,8 @@ HIDE_UNDOC_MEMBERS = NO
|
||||
HIDE_UNDOC_CLASSES = NO
|
||||
|
||||
# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
|
||||
# declarations. If set to NO, these declarations will be included in the
|
||||
# documentation.
|
||||
# (class|struct|union) declarations. If set to NO, these declarations will be
|
||||
# included in the documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
HIDE_FRIEND_COMPOUNDS = NO
|
||||
@@ -580,18 +555,11 @@ HIDE_IN_BODY_DOCS = NO
|
||||
|
||||
INTERNAL_DOCS = YES
|
||||
|
||||
# With the correct setting of option CASE_SENSE_NAMES doxygen will better be
|
||||
# able to match the capabilities of the underlying filesystem. In case the
|
||||
# filesystem is case sensitive (i.e. it supports files in the same directory
|
||||
# whose names only differ in casing), the option must be set to YES to properly
|
||||
# deal with such files in case they appear in the input. For filesystems that
|
||||
# are not case sensitive the option should be be set to NO to properly deal with
|
||||
# output files written for symbols that only differ in casing, such as for two
|
||||
# classes, one named CLASS and the other named Class, and to also support
|
||||
# references to files without having to specify the exact matching casing. On
|
||||
# Windows (including Cygwin) and MacOS, users should typically set this option
|
||||
# to NO, whereas on Linux or other Unix flavors it should typically be set to
|
||||
# YES.
|
||||
# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
|
||||
# names in lower-case letters. If set to YES, upper-case letters are also
|
||||
# allowed. This is useful if you have classes or files whose names only differ
|
||||
# in case and if your file system supports case sensitive file names. Windows
|
||||
# (including Cygwin) ands Mac users are advised to set this option to NO.
|
||||
# The default value is: system dependent.
|
||||
|
||||
CASE_SENSE_NAMES = YES
|
||||
@@ -830,10 +798,7 @@ WARN_IF_DOC_ERROR = YES
|
||||
WARN_NO_PARAMDOC = NO
|
||||
|
||||
# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
|
||||
# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS
|
||||
# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but
|
||||
# at the end of the doxygen process doxygen will return with a non-zero status.
|
||||
# Possible values are: NO, YES and FAIL_ON_WARNINGS.
|
||||
# a warning is encountered.
|
||||
# The default value is: NO.
|
||||
|
||||
WARN_AS_ERROR = NO
|
||||
@@ -875,8 +840,8 @@ INPUT = doxygen.main.h \
|
||||
# This tag can be used to specify the character encoding of the source files
|
||||
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
|
||||
# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
|
||||
# documentation (see:
|
||||
# https://www.gnu.org/software/libiconv/) for the list of possible encodings.
|
||||
# documentation (see: https://www.gnu.org/software/libiconv/) for the list of
|
||||
# possible encodings.
|
||||
# The default value is: UTF-8.
|
||||
|
||||
INPUT_ENCODING = UTF-8
|
||||
@@ -889,15 +854,11 @@ INPUT_ENCODING = UTF-8
|
||||
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
|
||||
# read by doxygen.
|
||||
#
|
||||
# Note the list of default checked file patterns might differ from the list of
|
||||
# default file extension mappings.
|
||||
#
|
||||
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
|
||||
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
|
||||
# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
|
||||
# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment),
|
||||
# *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, *.vhdl,
|
||||
# *.ucf, *.qsf and *.ice.
|
||||
# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
|
||||
# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice.
|
||||
|
||||
FILE_PATTERNS =
|
||||
|
||||
@@ -1125,6 +1086,13 @@ VERBATIM_HEADERS = YES
|
||||
|
||||
ALPHABETICAL_INDEX = YES
|
||||
|
||||
# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
|
||||
# which the alphabetical index list will be split.
|
||||
# Minimum value: 1, maximum value: 20, default value: 5.
|
||||
# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
|
||||
|
||||
COLS_IN_ALPHA_INDEX = 5
|
||||
|
||||
# In case all classes in a project start with a common prefix, all classes will
|
||||
# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
|
||||
# can be used to specify a prefix (or a list of prefixes) that should be ignored
|
||||
@@ -1263,9 +1231,9 @@ HTML_TIMESTAMP = YES
|
||||
|
||||
# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
|
||||
# documentation will contain a main index with vertical navigation menus that
|
||||
# are dynamically created via JavaScript. If disabled, the navigation index will
|
||||
# are dynamically created via Javascript. If disabled, the navigation index will
|
||||
# consists of multiple levels of tabs that are statically embedded in every HTML
|
||||
# page. Disable this option to support browsers that do not have JavaScript,
|
||||
# page. Disable this option to support browsers that do not have Javascript,
|
||||
# like the Qt help browser.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
@@ -1295,11 +1263,10 @@ HTML_INDEX_NUM_ENTRIES = 100
|
||||
|
||||
# If the GENERATE_DOCSET tag is set to YES, additional index files will be
|
||||
# generated that can be used as input for Apple's Xcode 3 integrated development
|
||||
# environment (see:
|
||||
# https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To
|
||||
# create a documentation set, doxygen will generate a Makefile in the HTML
|
||||
# output directory. Running make will produce the docset in that directory and
|
||||
# running make install will install the docset in
|
||||
# environment (see: https://developer.apple.com/xcode/), introduced with OSX
|
||||
# 10.5 (Leopard). To create a documentation set, doxygen will generate a
|
||||
# Makefile in the HTML output directory. Running make will produce the docset in
|
||||
# that directory and running make install will install the docset in
|
||||
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
|
||||
# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
|
||||
# genXcode/_index.html for more information.
|
||||
@@ -1341,8 +1308,8 @@ DOCSET_PUBLISHER_NAME = Publisher
|
||||
# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
|
||||
# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
|
||||
# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
|
||||
# (see:
|
||||
# https://www.microsoft.com/en-us/download/details.aspx?id=21138) on Windows.
|
||||
# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on
|
||||
# Windows.
|
||||
#
|
||||
# The HTML Help Workshop contains a compiler that can convert all HTML output
|
||||
# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
|
||||
@@ -1372,7 +1339,7 @@ CHM_FILE = blender.chm
|
||||
HHC_LOCATION = "C:/Program Files (x86)/HTML Help Workshop/hhc.exe"
|
||||
|
||||
# The GENERATE_CHI flag controls if a separate .chi index file is generated
|
||||
# (YES) or that it should be included in the main .chm file (NO).
|
||||
# (YES) or that it should be included in the master .chm file (NO).
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
|
||||
|
||||
@@ -1417,8 +1384,7 @@ QCH_FILE =
|
||||
|
||||
# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
|
||||
# Project output. For more information please see Qt Help Project / Namespace
|
||||
# (see:
|
||||
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
|
||||
# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
|
||||
# The default value is: org.doxygen.Project.
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
@@ -1426,8 +1392,8 @@ QHP_NAMESPACE = org.doxygen.Project
|
||||
|
||||
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
|
||||
# Help Project output. For more information please see Qt Help Project / Virtual
|
||||
# Folders (see:
|
||||
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders).
|
||||
# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-
|
||||
# folders).
|
||||
# The default value is: doc.
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
@@ -1435,16 +1401,16 @@ QHP_VIRTUAL_FOLDER = doc
|
||||
|
||||
# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
|
||||
# filter to add. For more information please see Qt Help Project / Custom
|
||||
# Filters (see:
|
||||
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters).
|
||||
# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
|
||||
# filters).
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
QHP_CUST_FILTER_NAME =
|
||||
|
||||
# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
|
||||
# custom filter to add. For more information please see Qt Help Project / Custom
|
||||
# Filters (see:
|
||||
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters).
|
||||
# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
|
||||
# filters).
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
QHP_CUST_FILTER_ATTRS =
|
||||
@@ -1456,9 +1422,9 @@ QHP_CUST_FILTER_ATTRS =
|
||||
|
||||
QHP_SECT_FILTER_ATTRS =
|
||||
|
||||
# The QHG_LOCATION tag can be used to specify the location (absolute path
|
||||
# including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to
|
||||
# run qhelpgenerator on the generated .qhp file.
|
||||
# The QHG_LOCATION tag can be used to specify the location of Qt's
|
||||
# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
|
||||
# generated .qhp file.
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
QHG_LOCATION =
|
||||
@@ -1535,17 +1501,6 @@ TREEVIEW_WIDTH = 246
|
||||
|
||||
EXT_LINKS_IN_WINDOW = NO
|
||||
|
||||
# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg
|
||||
# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see
|
||||
# https://inkscape.org) to generate formulas as SVG images instead of PNGs for
|
||||
# the HTML output. These images will generally look nicer at scaled resolutions.
|
||||
# Possible values are: png (the default) and svg (looks nicer but requires the
|
||||
# pdf2svg or inkscape tool).
|
||||
# The default value is: png.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_FORMULA_FORMAT = png
|
||||
|
||||
# Use this tag to change the font size of LaTeX formulas included as images in
|
||||
# the HTML documentation. When you change the font size after a successful
|
||||
# doxygen run you need to manually remove any form_*.png images from the HTML
|
||||
@@ -1566,14 +1521,8 @@ FORMULA_FONTSIZE = 10
|
||||
|
||||
FORMULA_TRANSPARENT = YES
|
||||
|
||||
# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands
|
||||
# to create new LaTeX commands to be used in formulas as building blocks. See
|
||||
# the section "Including formulas" for details.
|
||||
|
||||
FORMULA_MACROFILE =
|
||||
|
||||
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
|
||||
# https://www.mathjax.org) which uses client side JavaScript for the rendering
|
||||
# https://www.mathjax.org) which uses client side Javascript for the rendering
|
||||
# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
|
||||
# installed or if you want to formulas look prettier in the HTML output. When
|
||||
# enabled you may also need to install MathJax separately and configure the path
|
||||
@@ -1585,7 +1534,7 @@ USE_MATHJAX = NO
|
||||
|
||||
# When MathJax is enabled you can set the default output format to be used for
|
||||
# the MathJax output. See the MathJax site (see:
|
||||
# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details.
|
||||
# http://docs.mathjax.org/en/latest/output.html) for more details.
|
||||
# Possible values are: HTML-CSS (which is slower, but has the best
|
||||
# compatibility), NativeMML (i.e. MathML) and SVG.
|
||||
# The default value is: HTML-CSS.
|
||||
@@ -1601,7 +1550,7 @@ MATHJAX_FORMAT = HTML-CSS
|
||||
# Content Delivery Network so you can quickly see the result without installing
|
||||
# MathJax. However, it is strongly recommended to install a local copy of
|
||||
# MathJax from https://www.mathjax.org before deployment.
|
||||
# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2.
|
||||
# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/.
|
||||
# This tag requires that the tag USE_MATHJAX is set to YES.
|
||||
|
||||
MATHJAX_RELPATH = http://www.mathjax.org/mathjax
|
||||
@@ -1615,8 +1564,7 @@ MATHJAX_EXTENSIONS =
|
||||
|
||||
# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
|
||||
# of code that will be used on startup of the MathJax code. See the MathJax site
|
||||
# (see:
|
||||
# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an
|
||||
# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
|
||||
# example see the documentation.
|
||||
# This tag requires that the tag USE_MATHJAX is set to YES.
|
||||
|
||||
@@ -1644,7 +1592,7 @@ MATHJAX_CODEFILE =
|
||||
SEARCHENGINE = NO
|
||||
|
||||
# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
|
||||
# implemented using a web server instead of a web client using JavaScript. There
|
||||
# implemented using a web server instead of a web client using Javascript. There
|
||||
# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
|
||||
# setting. When disabled, doxygen will generate a PHP script for searching and
|
||||
# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
|
||||
@@ -1663,8 +1611,7 @@ SERVER_BASED_SEARCH = NO
|
||||
#
|
||||
# Doxygen ships with an example indexer (doxyindexer) and search engine
|
||||
# (doxysearch.cgi) which are based on the open source search engine library
|
||||
# Xapian (see:
|
||||
# https://xapian.org/).
|
||||
# Xapian (see: https://xapian.org/).
|
||||
#
|
||||
# See the section "External Indexing and Searching" for details.
|
||||
# The default value is: NO.
|
||||
@@ -1677,9 +1624,8 @@ EXTERNAL_SEARCH = NO
|
||||
#
|
||||
# Doxygen ships with an example indexer (doxyindexer) and search engine
|
||||
# (doxysearch.cgi) which are based on the open source search engine library
|
||||
# Xapian (see:
|
||||
# https://xapian.org/). See the section "External Indexing and Searching" for
|
||||
# details.
|
||||
# Xapian (see: https://xapian.org/). See the section "External Indexing and
|
||||
# Searching" for details.
|
||||
# This tag requires that the tag SEARCHENGINE is set to YES.
|
||||
|
||||
SEARCHENGINE_URL =
|
||||
@@ -1843,11 +1789,9 @@ LATEX_EXTRA_FILES =
|
||||
|
||||
PDF_HYPERLINKS = NO
|
||||
|
||||
# If the USE_PDFLATEX tag is set to YES, doxygen will use the engine as
|
||||
# specified with LATEX_CMD_NAME to generate the PDF file directly from the LaTeX
|
||||
# files. Set this option to YES, to get a higher quality PDF documentation.
|
||||
#
|
||||
# See also section LATEX_CMD_NAME for selecting the engine.
|
||||
# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
|
||||
# the PDF file directly from the LaTeX files. Set this option to YES, to get a
|
||||
# higher quality PDF documentation.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
@@ -2182,8 +2126,7 @@ INCLUDE_FILE_PATTERNS =
|
||||
# recursively expanded use the := operator instead of the = operator.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
PREDEFINED = BUILD_DATE \
|
||||
DOXYGEN=1
|
||||
PREDEFINED = BUILD_DATE
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
||||
# tag can be used to specify a list of macro names that should be expanded. The
|
||||
@@ -2360,31 +2303,9 @@ UML_LOOK = YES
|
||||
# but if the number exceeds 15, the total amount of fields shown is limited to
|
||||
# 10.
|
||||
# Minimum value: 0, maximum value: 100, default value: 10.
|
||||
# This tag requires that the tag UML_LOOK is set to YES.
|
||||
|
||||
UML_LIMIT_NUM_FIELDS = 10
|
||||
|
||||
# If the DOT_UML_DETAILS tag is set to NO, doxygen will show attributes and
|
||||
# methods without types and arguments in the UML graphs. If the DOT_UML_DETAILS
|
||||
# tag is set to YES, doxygen will add type and arguments for attributes and
|
||||
# methods in the UML graphs. If the DOT_UML_DETAILS tag is set to NONE, doxygen
|
||||
# will not generate fields with class member information in the UML graphs. The
|
||||
# class diagrams will look similar to the default class diagrams but using UML
|
||||
# notation for the relationships.
|
||||
# Possible values are: NO, YES and NONE.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag UML_LOOK is set to YES.
|
||||
|
||||
DOT_UML_DETAILS = NO
|
||||
|
||||
# The DOT_WRAP_THRESHOLD tag can be used to set the maximum number of characters
|
||||
# to display on a single line. If the actual line length exceeds this threshold
|
||||
# significantly it will wrapped across multiple lines. Some heuristics are apply
|
||||
# to avoid ugly line breaks.
|
||||
# Minimum value: 0, maximum value: 1000, default value: 17.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_WRAP_THRESHOLD = 17
|
||||
UML_LIMIT_NUM_FIELDS = 10
|
||||
|
||||
# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
|
||||
# collaboration graphs will show the relations between templates and their
|
||||
@@ -2575,11 +2496,9 @@ DOT_MULTI_TARGETS = YES
|
||||
|
||||
GENERATE_LEGEND = YES
|
||||
|
||||
# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate
|
||||
# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
|
||||
# files that are used to generate the various graphs.
|
||||
#
|
||||
# Note: This setting is not only used for dot files but also for msc and
|
||||
# plantuml temporary files.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_CLEANUP = YES
|
||||
|
@@ -29,7 +29,7 @@ with offscreen.bind():
|
||||
amount = 10
|
||||
for i in range(-amount, amount + 1):
|
||||
x_pos = i / amount
|
||||
draw_circle_2d((x_pos, 0.0), (1, 1, 1, 1), 0.5, segments=200)
|
||||
draw_circle_2d((x_pos, 0.0), (1, 1, 1, 1), 0.5, 200)
|
||||
|
||||
|
||||
# Drawing the generated texture in 3D space
|
||||
|
@@ -34,9 +34,7 @@ with offscreen.bind():
|
||||
for i in range(RING_AMOUNT):
|
||||
draw_circle_2d(
|
||||
(random.uniform(-1, 1), random.uniform(-1, 1)),
|
||||
(1, 1, 1, 1), random.uniform(0.1, 1),
|
||||
segments=20,
|
||||
)
|
||||
(1, 1, 1, 1), random.uniform(0.1, 1), 20)
|
||||
|
||||
buffer = fb.read_color(0, 0, WIDTH, HEIGHT, 4, 0, 'UBYTE')
|
||||
|
||||
|
@@ -954,7 +954,7 @@ def pymodule2sphinx(basepath, module_name, module, title, module_all_extra):
|
||||
# constant, not much fun we can do here except to list it.
|
||||
# TODO, figure out some way to document these!
|
||||
fw(".. data:: %s\n\n" % attribute)
|
||||
write_indented_lines(" ", fw, "Constant value %s" % repr(value), False)
|
||||
write_indented_lines(" ", fw, "constant value %s" % repr(value), False)
|
||||
fw("\n")
|
||||
else:
|
||||
BPY_LOGGER.debug("\tnot documenting %s.%s of %r type" % (module_name, attribute, value_type.__name__))
|
||||
@@ -1246,7 +1246,7 @@ def pyrna_enum2sphinx(prop, use_empty_descriptions=False):
|
||||
"%s.\n" % (
|
||||
identifier,
|
||||
# Account for multi-line enum descriptions, allowing this to be a block of text.
|
||||
indent(" -- ".join(escape_rst(val) for val in (name, description) if val) or "Undocumented", " "),
|
||||
indent(", ".join(escape_rst(val) for val in (name, description) if val) or "Undocumented", " "),
|
||||
)
|
||||
for identifier, name, description in prop.enum_items
|
||||
])
|
||||
|
@@ -75,7 +75,6 @@ void FFMPEGWriter::encode()
|
||||
m_frame->nb_samples = m_input_samples;
|
||||
m_frame->format = m_codecCtx->sample_fmt;
|
||||
m_frame->channel_layout = m_codecCtx->channel_layout;
|
||||
m_frame->channels = m_specs.channels;
|
||||
|
||||
if(avcodec_fill_audio_frame(m_frame, m_specs.channels, m_codecCtx->sample_fmt, reinterpret_cast<data_t*>(data), m_input_buffer.getSize(), 0) < 0)
|
||||
AUD_THROW(FileException, "File couldn't be written, filling the audio frame failed with ffmpeg.");
|
||||
|
@@ -132,7 +132,7 @@ def init():
|
||||
_workaround_buggy_drivers()
|
||||
|
||||
path = os.path.dirname(__file__)
|
||||
user_path = os.path.dirname(os.path.abspath(bpy.utils.user_resource('CONFIG', path='')))
|
||||
user_path = os.path.dirname(os.path.abspath(bpy.utils.user_resource('CONFIG', '')))
|
||||
|
||||
_cycles.init(path, user_path, bpy.app.background)
|
||||
_parse_command_line()
|
||||
|
@@ -83,8 +83,6 @@ struct BlenderCamera {
|
||||
BoundBox2D pano_viewplane;
|
||||
BoundBox2D viewport_camera_border;
|
||||
|
||||
float passepartout_alpha;
|
||||
|
||||
Transform matrix;
|
||||
|
||||
float offscreen_dicing_scale;
|
||||
@@ -127,7 +125,6 @@ static void blender_camera_init(BlenderCamera *bcam, BL::RenderSettings &b_rende
|
||||
bcam->pano_viewplane.top = 1.0f;
|
||||
bcam->viewport_camera_border.right = 1.0f;
|
||||
bcam->viewport_camera_border.top = 1.0f;
|
||||
bcam->passepartout_alpha = 0.5f;
|
||||
bcam->offscreen_dicing_scale = 1.0f;
|
||||
bcam->matrix = transform_identity();
|
||||
|
||||
@@ -215,8 +212,6 @@ static void blender_camera_from_object(BlenderCamera *bcam,
|
||||
|
||||
bcam->lens = b_camera.lens();
|
||||
|
||||
bcam->passepartout_alpha = b_camera.show_passepartout() ? b_camera.passepartout_alpha() : 0.0f;
|
||||
|
||||
if (b_camera.dof().use_dof()) {
|
||||
/* allow f/stop number to change aperture_size but still
|
||||
* give manual control over aperture radius */
|
||||
@@ -839,19 +834,15 @@ static void blender_camera_border(BlenderCamera *bcam,
|
||||
full_border,
|
||||
&bcam->viewport_camera_border);
|
||||
|
||||
if (b_render.use_border()) {
|
||||
bcam->border.left = b_render.border_min_x();
|
||||
bcam->border.right = b_render.border_max_x();
|
||||
bcam->border.bottom = b_render.border_min_y();
|
||||
bcam->border.top = b_render.border_max_y();
|
||||
}
|
||||
else if (bcam->passepartout_alpha == 1.0f) {
|
||||
bcam->border = full_border;
|
||||
}
|
||||
else {
|
||||
if (!b_render.use_border()) {
|
||||
return;
|
||||
}
|
||||
|
||||
bcam->border.left = b_render.border_min_x();
|
||||
bcam->border.right = b_render.border_max_x();
|
||||
bcam->border.bottom = b_render.border_min_y();
|
||||
bcam->border.top = b_render.border_max_y();
|
||||
|
||||
/* Determine viewport subset matching camera border. */
|
||||
blender_camera_border_subset(b_engine,
|
||||
b_render,
|
||||
@@ -894,7 +885,8 @@ void BlenderSync::sync_view(BL::SpaceView3D &b_v3d,
|
||||
}
|
||||
}
|
||||
|
||||
BufferParams BlenderSync::get_buffer_params(BL::SpaceView3D &b_v3d,
|
||||
BufferParams BlenderSync::get_buffer_params(BL::RenderSettings &b_render,
|
||||
BL::SpaceView3D &b_v3d,
|
||||
BL::RegionView3D &b_rv3d,
|
||||
Camera *cam,
|
||||
int width,
|
||||
@@ -910,8 +902,7 @@ BufferParams BlenderSync::get_buffer_params(BL::SpaceView3D &b_v3d,
|
||||
if (b_v3d && b_rv3d && b_rv3d.view_perspective() != BL::RegionView3D::view_perspective_CAMERA)
|
||||
use_border = b_v3d.use_render_border();
|
||||
else
|
||||
/* the camera can always have a passepartout */
|
||||
use_border = true;
|
||||
use_border = b_render.use_border();
|
||||
|
||||
if (use_border) {
|
||||
/* border render */
|
||||
|
@@ -34,17 +34,12 @@ void BlenderSync::sync_light(BL::Object &b_parent,
|
||||
bool *use_portal)
|
||||
{
|
||||
/* test if we need to sync */
|
||||
Light *light;
|
||||
ObjectKey key(b_parent, persistent_id, b_ob_instance, false);
|
||||
BL::Light b_light(b_ob.data());
|
||||
|
||||
Light *light = light_map.find(key);
|
||||
|
||||
/* Check if the transform was modified, in case a linked collection is moved we do not get a
|
||||
* specific depsgraph update (T88515). This also mimics the behavior for Objects. */
|
||||
const bool tfm_updated = (light && light->get_tfm() != tfm);
|
||||
|
||||
/* Update if either object or light data changed. */
|
||||
if (!tfm_updated && !light_map.add_or_update(&light, b_ob, b_parent, key)) {
|
||||
if (!light_map.add_or_update(&light, b_ob, b_parent, key)) {
|
||||
Shader *shader;
|
||||
if (!shader_map.add_or_update(&shader, b_light)) {
|
||||
if (light->get_is_portal())
|
||||
|
@@ -155,7 +155,7 @@ void BlenderSession::create_session()
|
||||
|
||||
/* set buffer parameters */
|
||||
BufferParams buffer_params = BlenderSync::get_buffer_params(
|
||||
b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
|
||||
b_render, b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
|
||||
session->reset(buffer_params, session_params.samples);
|
||||
|
||||
b_engine.use_highlight_tiles(session_params.progressive_refine == false);
|
||||
@@ -242,7 +242,8 @@ void BlenderSession::reset_session(BL::BlendData &b_data, BL::Depsgraph &b_depsg
|
||||
|
||||
BL::SpaceView3D b_null_space_view3d(PointerRNA_NULL);
|
||||
BL::RegionView3D b_null_region_view3d(PointerRNA_NULL);
|
||||
BufferParams buffer_params = BlenderSync::get_buffer_params(b_null_space_view3d,
|
||||
BufferParams buffer_params = BlenderSync::get_buffer_params(b_render,
|
||||
b_null_space_view3d,
|
||||
b_null_region_view3d,
|
||||
scene->camera,
|
||||
width,
|
||||
@@ -485,7 +486,7 @@ void BlenderSession::render(BL::Depsgraph &b_depsgraph_)
|
||||
SessionParams session_params = BlenderSync::get_session_params(
|
||||
b_engine, b_userpref, b_scene, background, b_view_layer);
|
||||
BufferParams buffer_params = BlenderSync::get_buffer_params(
|
||||
b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
|
||||
b_render, b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
|
||||
|
||||
/* temporary render result to find needed passes and views */
|
||||
BL::RenderResult b_rr = begin_render_result(
|
||||
@@ -809,7 +810,7 @@ void BlenderSession::synchronize(BL::Depsgraph &b_depsgraph_)
|
||||
|
||||
/* get buffer parameters */
|
||||
BufferParams buffer_params = BlenderSync::get_buffer_params(
|
||||
b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
|
||||
b_render, b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
|
||||
|
||||
if (!buffer_params.denoising_data_pass) {
|
||||
session_params.denoising.use = false;
|
||||
@@ -888,7 +889,7 @@ bool BlenderSession::draw(int w, int h)
|
||||
SessionParams session_params = BlenderSync::get_session_params(
|
||||
b_engine, b_userpref, b_scene, background);
|
||||
BufferParams buffer_params = BlenderSync::get_buffer_params(
|
||||
b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
|
||||
b_render, b_v3d, b_rv3d, scene->camera, width, height, session_params.denoising.use);
|
||||
bool session_pause = BlenderSync::get_session_pause(b_scene, background);
|
||||
|
||||
if (session_pause == false) {
|
||||
@@ -906,7 +907,7 @@ bool BlenderSession::draw(int w, int h)
|
||||
|
||||
/* draw */
|
||||
BufferParams buffer_params = BlenderSync::get_buffer_params(
|
||||
b_v3d, b_rv3d, scene->camera, width, height, session->params.denoising.use);
|
||||
b_render, b_v3d, b_rv3d, scene->camera, width, height, session->params.denoising.use);
|
||||
DeviceDrawParams draw_params;
|
||||
|
||||
if (session->params.display_buffer_linear) {
|
||||
|
@@ -1373,7 +1373,7 @@ void BlenderSync::sync_world(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d,
|
||||
BlenderViewportParameters new_viewport_parameters(b_v3d);
|
||||
|
||||
if (world_recalc || update_all || b_world.ptr.data != world_map ||
|
||||
viewport_parameters.shader_modified(new_viewport_parameters)) {
|
||||
viewport_parameters.modified(new_viewport_parameters)) {
|
||||
Shader *shader = scene->default_background;
|
||||
ShaderGraph *graph = new ShaderGraph();
|
||||
|
||||
@@ -1501,8 +1501,8 @@ void BlenderSync::sync_world(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d,
|
||||
background->set_transparent_roughness_threshold(0.0f);
|
||||
}
|
||||
|
||||
background->set_use_shader(view_layer.use_background_shader ||
|
||||
viewport_parameters.use_custom_shader());
|
||||
background->set_use_shader(view_layer.use_background_shader |
|
||||
viewport_parameters.custom_viewport_parameters());
|
||||
background->set_use_ao(background->get_use_ao() && view_layer.use_background_ao);
|
||||
|
||||
background->tag_update(scene);
|
||||
|
@@ -224,13 +224,9 @@ void BlenderSync::sync_recalc(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d
|
||||
|
||||
if (b_v3d) {
|
||||
BlenderViewportParameters new_viewport_parameters(b_v3d);
|
||||
|
||||
if (viewport_parameters.shader_modified(new_viewport_parameters)) {
|
||||
if (viewport_parameters.modified(new_viewport_parameters)) {
|
||||
world_recalc = true;
|
||||
has_updates_ = true;
|
||||
}
|
||||
|
||||
has_updates_ |= viewport_parameters.modified(new_viewport_parameters);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -250,7 +246,7 @@ void BlenderSync::sync_data(BL::RenderSettings &b_render,
|
||||
|
||||
BL::ViewLayer b_view_layer = b_depsgraph.view_layer_eval();
|
||||
|
||||
sync_view_layer(b_view_layer);
|
||||
sync_view_layer(b_v3d, b_view_layer);
|
||||
sync_integrator();
|
||||
sync_film(b_v3d);
|
||||
sync_shaders(b_depsgraph, b_v3d);
|
||||
@@ -445,7 +441,7 @@ void BlenderSync::sync_film(BL::SpaceView3D &b_v3d)
|
||||
|
||||
/* Render Layer */
|
||||
|
||||
void BlenderSync::sync_view_layer(BL::ViewLayer &b_view_layer)
|
||||
void BlenderSync::sync_view_layer(BL::SpaceView3D & /*b_v3d*/, BL::ViewLayer &b_view_layer)
|
||||
{
|
||||
view_layer.name = b_view_layer.name();
|
||||
|
||||
|
@@ -73,7 +73,7 @@ class BlenderSync {
|
||||
int width,
|
||||
int height,
|
||||
void **python_thread_state);
|
||||
void sync_view_layer(BL::ViewLayer &b_view_layer);
|
||||
void sync_view_layer(BL::SpaceView3D &b_v3d, BL::ViewLayer &b_view_layer);
|
||||
vector<Pass> sync_render_passes(BL::Scene &b_scene,
|
||||
BL::RenderLayer &b_render_layer,
|
||||
BL::ViewLayer &b_view_layer,
|
||||
@@ -104,7 +104,8 @@ class BlenderSync {
|
||||
bool background,
|
||||
BL::ViewLayer b_view_layer = BL::ViewLayer(PointerRNA_NULL));
|
||||
static bool get_session_pause(BL::Scene &b_scene, bool background);
|
||||
static BufferParams get_buffer_params(BL::SpaceView3D &b_v3d,
|
||||
static BufferParams get_buffer_params(BL::RenderSettings &b_render,
|
||||
BL::SpaceView3D &b_v3d,
|
||||
BL::RegionView3D &b_rv3d,
|
||||
Camera *cam,
|
||||
int width,
|
||||
|
@@ -13,7 +13,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include "blender_viewport.h"
|
||||
|
||||
#include "blender_util.h"
|
||||
@@ -26,39 +25,29 @@ BlenderViewportParameters::BlenderViewportParameters()
|
||||
studiolight_rotate_z(0.0f),
|
||||
studiolight_intensity(1.0f),
|
||||
studiolight_background_alpha(1.0f),
|
||||
display_pass(PASS_COMBINED)
|
||||
studiolight_path(ustring())
|
||||
{
|
||||
}
|
||||
|
||||
BlenderViewportParameters::BlenderViewportParameters(BL::SpaceView3D &b_v3d)
|
||||
: BlenderViewportParameters()
|
||||
{
|
||||
if (!b_v3d) {
|
||||
return;
|
||||
}
|
||||
|
||||
BL::View3DShading shading = b_v3d.shading();
|
||||
PointerRNA cshading = RNA_pointer_get(&shading.ptr, "cycles");
|
||||
|
||||
/* We only copy the shading parameters if we are in look dev mode. otherwise
|
||||
/* We only copy the parameters if we are in look dev mode. otherwise
|
||||
* defaults are being used. These defaults mimic normal render settings */
|
||||
if (shading.type() == BL::View3DShading::type_RENDERED) {
|
||||
use_scene_world = shading.use_scene_world_render();
|
||||
use_scene_lights = shading.use_scene_lights_render();
|
||||
|
||||
if (b_v3d && b_v3d.shading().type() == BL::View3DShading::type_RENDERED) {
|
||||
use_scene_world = b_v3d.shading().use_scene_world_render();
|
||||
use_scene_lights = b_v3d.shading().use_scene_lights_render();
|
||||
if (!use_scene_world) {
|
||||
studiolight_rotate_z = shading.studiolight_rotate_z();
|
||||
studiolight_intensity = shading.studiolight_intensity();
|
||||
studiolight_background_alpha = shading.studiolight_background_alpha();
|
||||
studiolight_path = shading.selected_studio_light().path();
|
||||
studiolight_rotate_z = b_v3d.shading().studiolight_rotate_z();
|
||||
studiolight_intensity = b_v3d.shading().studiolight_intensity();
|
||||
studiolight_background_alpha = b_v3d.shading().studiolight_background_alpha();
|
||||
studiolight_path = b_v3d.shading().selected_studio_light().path();
|
||||
}
|
||||
}
|
||||
|
||||
/* Film. */
|
||||
display_pass = (PassType)get_enum(cshading, "render_pass", -1, -1);
|
||||
}
|
||||
|
||||
bool BlenderViewportParameters::shader_modified(const BlenderViewportParameters &other) const
|
||||
/* Check if two instances are different. */
|
||||
const bool BlenderViewportParameters::modified(const BlenderViewportParameters &other) const
|
||||
{
|
||||
return use_scene_world != other.use_scene_world || use_scene_lights != other.use_scene_lights ||
|
||||
studiolight_rotate_z != other.studiolight_rotate_z ||
|
||||
@@ -67,26 +56,26 @@ bool BlenderViewportParameters::shader_modified(const BlenderViewportParameters
|
||||
studiolight_path != other.studiolight_path;
|
||||
}
|
||||
|
||||
bool BlenderViewportParameters::film_modified(const BlenderViewportParameters &other) const
|
||||
{
|
||||
return display_pass != other.display_pass;
|
||||
}
|
||||
|
||||
bool BlenderViewportParameters::modified(const BlenderViewportParameters &other) const
|
||||
{
|
||||
return shader_modified(other) || film_modified(other);
|
||||
}
|
||||
|
||||
bool BlenderViewportParameters::use_custom_shader() const
|
||||
const bool BlenderViewportParameters::custom_viewport_parameters() const
|
||||
{
|
||||
return !(use_scene_world && use_scene_lights);
|
||||
}
|
||||
|
||||
PassType BlenderViewportParameters::get_viewport_display_render_pass(BL::SpaceView3D &b_v3d)
|
||||
{
|
||||
PassType display_pass = PASS_NONE;
|
||||
if (b_v3d) {
|
||||
BL::View3DShading b_view3dshading = b_v3d.shading();
|
||||
PointerRNA cshading = RNA_pointer_get(&b_view3dshading.ptr, "cycles");
|
||||
display_pass = (PassType)get_enum(cshading, "render_pass", -1, -1);
|
||||
}
|
||||
return display_pass;
|
||||
}
|
||||
|
||||
PassType update_viewport_display_passes(BL::SpaceView3D &b_v3d, vector<Pass> &passes)
|
||||
{
|
||||
if (b_v3d) {
|
||||
const BlenderViewportParameters viewport_parameters(b_v3d);
|
||||
const PassType display_pass = viewport_parameters.display_pass;
|
||||
PassType display_pass = BlenderViewportParameters::get_viewport_display_render_pass(b_v3d);
|
||||
|
||||
passes.clear();
|
||||
Pass::add(display_pass, passes);
|
||||
|
@@ -18,18 +18,17 @@
|
||||
#define __BLENDER_VIEWPORT_H__
|
||||
|
||||
#include "MEM_guardedalloc.h"
|
||||
|
||||
#include "RNA_access.h"
|
||||
#include "RNA_blender_cpp.h"
|
||||
#include "RNA_types.h"
|
||||
|
||||
#include "render/film.h"
|
||||
#include "util/util_param.h"
|
||||
|
||||
CCL_NAMESPACE_BEGIN
|
||||
|
||||
class BlenderViewportParameters {
|
||||
public:
|
||||
/* Shader. */
|
||||
private:
|
||||
bool use_scene_world;
|
||||
bool use_scene_lights;
|
||||
float studiolight_rotate_z;
|
||||
@@ -37,24 +36,17 @@ class BlenderViewportParameters {
|
||||
float studiolight_background_alpha;
|
||||
ustring studiolight_path;
|
||||
|
||||
/* Film. */
|
||||
PassType display_pass;
|
||||
|
||||
BlenderViewportParameters();
|
||||
explicit BlenderViewportParameters(BL::SpaceView3D &b_v3d);
|
||||
BlenderViewportParameters(BL::SpaceView3D &b_v3d);
|
||||
|
||||
/* Check whether any of shading related settings are different from the given parameters. */
|
||||
bool shader_modified(const BlenderViewportParameters &other) const;
|
||||
const bool modified(const BlenderViewportParameters &other) const;
|
||||
const bool custom_viewport_parameters() const;
|
||||
friend class BlenderSync;
|
||||
|
||||
/* Check whether any of film related settings are different from the given parameters. */
|
||||
bool film_modified(const BlenderViewportParameters &other) const;
|
||||
|
||||
/* Check whether any of settings are different from the given parameters. */
|
||||
bool modified(const BlenderViewportParameters &other) const;
|
||||
|
||||
/* Returns truth when a custom shader defined by the viewport is to be used instead of the
|
||||
* regular background shader or scene light. */
|
||||
bool use_custom_shader() const;
|
||||
public:
|
||||
/* Retrieve the render pass that needs to be displayed on the given `SpaceView3D`
|
||||
* When the `b_v3d` parameter is not given `PASS_NONE` will be returned. */
|
||||
static PassType get_viewport_display_render_pass(BL::SpaceView3D &b_v3d);
|
||||
};
|
||||
|
||||
PassType update_viewport_display_passes(BL::SpaceView3D &b_v3d, vector<Pass> &passes);
|
||||
|
@@ -726,11 +726,7 @@ class OptiXDevice : public CUDADevice {
|
||||
}
|
||||
}
|
||||
else if (task.type == DeviceTask::SHADER) {
|
||||
// CUDA kernels are used when doing baking
|
||||
if (optix_module == NULL)
|
||||
CUDADevice::shader(task);
|
||||
else
|
||||
launch_shader_eval(task, thread_index);
|
||||
launch_shader_eval(task, thread_index);
|
||||
}
|
||||
else if (task.type == DeviceTask::DENOISE_BUFFER) {
|
||||
// Set up a single tile that covers the whole task and denoise it
|
||||
|
@@ -195,108 +195,31 @@ ccl_device float2 regular_polygon_sample(float corners, float rotation, float u,
|
||||
|
||||
ccl_device float3 ensure_valid_reflection(float3 Ng, float3 I, float3 N)
|
||||
{
|
||||
float3 R = 2 * dot(N, I) * N - I;
|
||||
float3 R;
|
||||
float NI = dot(N, I);
|
||||
float NgR, threshold;
|
||||
|
||||
/* Reflection rays may always be at least as shallow as the incoming ray. */
|
||||
float threshold = min(0.9f * dot(Ng, I), 0.01f);
|
||||
if (dot(Ng, R) >= threshold) {
|
||||
return N;
|
||||
}
|
||||
/* Check if the incident ray is coming from behind normal N. */
|
||||
if (NI > 0) {
|
||||
/* Normal reflection */
|
||||
R = (2 * NI) * N - I;
|
||||
NgR = dot(Ng, R);
|
||||
|
||||
/* Form coordinate system with Ng as the Z axis and N inside the X-Z-plane.
|
||||
* The X axis is found by normalizing the component of N that's orthogonal to Ng.
|
||||
* The Y axis isn't actually needed.
|
||||
*/
|
||||
float NdotNg = dot(N, Ng);
|
||||
float3 X = normalize(N - NdotNg * Ng);
|
||||
|
||||
/* Keep math expressions. */
|
||||
/* clang-format off */
|
||||
/* Calculate N.z and N.x in the local coordinate system.
|
||||
*
|
||||
* The goal of this computation is to find a N' that is rotated towards Ng just enough
|
||||
* to lift R' above the threshold (here called t), therefore dot(R', Ng) = t.
|
||||
*
|
||||
* According to the standard reflection equation,
|
||||
* this means that we want dot(2*dot(N', I)*N' - I, Ng) = t.
|
||||
*
|
||||
* Since the Z axis of our local coordinate system is Ng, dot(x, Ng) is just x.z, so we get
|
||||
* 2*dot(N', I)*N'.z - I.z = t.
|
||||
*
|
||||
* The rotation is simple to express in the coordinate system we formed -
|
||||
* since N lies in the X-Z-plane, we know that N' will also lie in the X-Z-plane,
|
||||
* so N'.y = 0 and therefore dot(N', I) = N'.x*I.x + N'.z*I.z .
|
||||
*
|
||||
* Furthermore, we want N' to be normalized, so N'.x = sqrt(1 - N'.z^2).
|
||||
*
|
||||
* With these simplifications,
|
||||
* we get the final equation 2*(sqrt(1 - N'.z^2)*I.x + N'.z*I.z)*N'.z - I.z = t.
|
||||
*
|
||||
* The only unknown here is N'.z, so we can solve for that.
|
||||
*
|
||||
* The equation has four solutions in general:
|
||||
*
|
||||
* N'.z = +-sqrt(0.5*(+-sqrt(I.x^2*(I.x^2 + I.z^2 - t^2)) + t*I.z + I.x^2 + I.z^2)/(I.x^2 + I.z^2))
|
||||
* We can simplify this expression a bit by grouping terms:
|
||||
*
|
||||
* a = I.x^2 + I.z^2
|
||||
* b = sqrt(I.x^2 * (a - t^2))
|
||||
* c = I.z*t + a
|
||||
* N'.z = +-sqrt(0.5*(+-b + c)/a)
|
||||
*
|
||||
* Two solutions can immediately be discarded because they're negative so N' would lie in the
|
||||
* lower hemisphere.
|
||||
*/
|
||||
/* clang-format on */
|
||||
|
||||
float Ix = dot(I, X), Iz = dot(I, Ng);
|
||||
float Ix2 = sqr(Ix), Iz2 = sqr(Iz);
|
||||
float a = Ix2 + Iz2;
|
||||
|
||||
float b = safe_sqrtf(Ix2 * (a - sqr(threshold)));
|
||||
float c = Iz * threshold + a;
|
||||
|
||||
/* Evaluate both solutions.
|
||||
* In many cases one can be immediately discarded (if N'.z would be imaginary or larger than
|
||||
* one), so check for that first. If no option is viable (might happen in extreme cases like N
|
||||
* being in the wrong hemisphere), give up and return Ng. */
|
||||
float fac = 0.5f / a;
|
||||
float N1_z2 = fac * (b + c), N2_z2 = fac * (-b + c);
|
||||
bool valid1 = (N1_z2 > 1e-5f) && (N1_z2 <= (1.0f + 1e-5f));
|
||||
bool valid2 = (N2_z2 > 1e-5f) && (N2_z2 <= (1.0f + 1e-5f));
|
||||
|
||||
float2 N_new;
|
||||
if (valid1 && valid2) {
|
||||
/* If both are possible, do the expensive reflection-based check. */
|
||||
float2 N1 = make_float2(safe_sqrtf(1.0f - N1_z2), safe_sqrtf(N1_z2));
|
||||
float2 N2 = make_float2(safe_sqrtf(1.0f - N2_z2), safe_sqrtf(N2_z2));
|
||||
|
||||
float R1 = 2 * (N1.x * Ix + N1.y * Iz) * N1.y - Iz;
|
||||
float R2 = 2 * (N2.x * Ix + N2.y * Iz) * N2.y - Iz;
|
||||
|
||||
valid1 = (R1 >= 1e-5f);
|
||||
valid2 = (R2 >= 1e-5f);
|
||||
if (valid1 && valid2) {
|
||||
/* If both solutions are valid, return the one with the shallower reflection since it will be
|
||||
* closer to the input (if the original reflection wasn't shallow, we would not be in this
|
||||
* part of the function). */
|
||||
N_new = (R1 < R2) ? N1 : N2;
|
||||
/* Reflection rays may always be at least as shallow as the incoming ray. */
|
||||
threshold = min(0.9f * dot(Ng, I), 0.01f);
|
||||
if (NgR >= threshold) {
|
||||
return N;
|
||||
}
|
||||
else {
|
||||
/* If only one reflection is valid (= positive), pick that one. */
|
||||
N_new = (R1 > R2) ? N1 : N2;
|
||||
}
|
||||
}
|
||||
else if (valid1 || valid2) {
|
||||
/* Only one solution passes the N'.z criterium, so pick that one. */
|
||||
float Nz2 = valid1 ? N1_z2 : N2_z2;
|
||||
N_new = make_float2(safe_sqrtf(1.0f - Nz2), safe_sqrtf(Nz2));
|
||||
}
|
||||
else {
|
||||
return Ng;
|
||||
/* Bad incident */
|
||||
R = -I;
|
||||
NgR = dot(Ng, R);
|
||||
threshold = 0.01f;
|
||||
}
|
||||
|
||||
return N_new.x * X + N_new.y * Ng;
|
||||
R = R + Ng * (threshold - NgR); /* Lift the reflection above the threshold. */
|
||||
return normalize(I * len(R) + R * len(I)); /* Find a bisector. */
|
||||
}
|
||||
|
||||
CCL_NAMESPACE_END
|
||||
|
@@ -52,9 +52,6 @@ shader node_vector_math(string math_type = "add",
|
||||
else if (math_type == "faceforward") {
|
||||
Vector = compatible_faceforward(Vector1, Vector2, Vector3);
|
||||
}
|
||||
else if (math_type == "multiply_add") {
|
||||
Vector = Vector1 * Vector2 + Vector3;
|
||||
}
|
||||
else if (math_type == "dot_product") {
|
||||
Value = dot(Vector1, Vector2);
|
||||
}
|
||||
|
@@ -84,67 +84,30 @@ closure color principled_hair(normal N,
|
||||
closure color henyey_greenstein(float g) BUILTIN;
|
||||
closure color absorption() BUILTIN;
|
||||
|
||||
normal ensure_valid_reflection(normal Ng, vector I, normal N)
|
||||
normal ensure_valid_reflection(normal Ng, normal I, normal N)
|
||||
{
|
||||
/* The implementation here mirrors the one in kernel_montecarlo.h,
|
||||
* check there for an explanation of the algorithm. */
|
||||
vector R;
|
||||
float NI = dot(N, I);
|
||||
float NgR, threshold;
|
||||
|
||||
float sqr(float x)
|
||||
{
|
||||
return x * x;
|
||||
}
|
||||
|
||||
vector R = 2 * dot(N, I) * N - I;
|
||||
|
||||
float threshold = min(0.9 * dot(Ng, I), 0.01);
|
||||
if (dot(Ng, R) >= threshold) {
|
||||
return N;
|
||||
}
|
||||
|
||||
float NdotNg = dot(N, Ng);
|
||||
vector X = normalize(N - NdotNg * Ng);
|
||||
|
||||
float Ix = dot(I, X), Iz = dot(I, Ng);
|
||||
float Ix2 = sqr(Ix), Iz2 = sqr(Iz);
|
||||
float a = Ix2 + Iz2;
|
||||
|
||||
float b = sqrt(Ix2 * (a - sqr(threshold)));
|
||||
float c = Iz * threshold + a;
|
||||
|
||||
float fac = 0.5 / a;
|
||||
float N1_z2 = fac * (b + c), N2_z2 = fac * (-b + c);
|
||||
int valid1 = (N1_z2 > 1e-5) && (N1_z2 <= (1.0 + 1e-5));
|
||||
int valid2 = (N2_z2 > 1e-5) && (N2_z2 <= (1.0 + 1e-5));
|
||||
|
||||
float N_new_x, N_new_z;
|
||||
if (valid1 && valid2) {
|
||||
float N1_x = sqrt(1.0 - N1_z2), N1_z = sqrt(N1_z2);
|
||||
float N2_x = sqrt(1.0 - N2_z2), N2_z = sqrt(N2_z2);
|
||||
|
||||
float R1 = 2 * (N1_x * Ix + N1_z * Iz) * N1_z - Iz;
|
||||
float R2 = 2 * (N2_x * Ix + N2_z * Iz) * N2_z - Iz;
|
||||
|
||||
valid1 = (R1 >= 1e-5);
|
||||
valid2 = (R2 >= 1e-5);
|
||||
if (valid1 && valid2) {
|
||||
N_new_x = (R1 < R2) ? N1_x : N2_x;
|
||||
N_new_z = (R1 < R2) ? N1_z : N2_z;
|
||||
if (NI > 0) {
|
||||
R = (2 * NI) * N - I;
|
||||
NgR = dot(Ng, R);
|
||||
threshold = min(0.9 * dot(Ng, I), 0.01);
|
||||
if (NgR >= threshold) {
|
||||
return N;
|
||||
}
|
||||
else {
|
||||
N_new_x = (R1 > R2) ? N1_x : N2_x;
|
||||
N_new_z = (R1 > R2) ? N1_z : N2_z;
|
||||
}
|
||||
}
|
||||
else if (valid1 || valid2) {
|
||||
float Nz2 = valid1 ? N1_z2 : N2_z2;
|
||||
N_new_x = sqrt(1.0 - Nz2);
|
||||
N_new_z = sqrt(Nz2);
|
||||
}
|
||||
else {
|
||||
return Ng;
|
||||
R = -I;
|
||||
NgR = dot(Ng, R);
|
||||
threshold = 0.01;
|
||||
}
|
||||
|
||||
return N_new_x * X + N_new_z * Ng;
|
||||
R = R + Ng * (threshold - NgR);
|
||||
return normalize(I * length(R) + R * length(I));
|
||||
}
|
||||
|
||||
#endif /* CCL_STDOSL_H */
|
||||
|
@@ -58,8 +58,7 @@ ccl_device void svm_node_vector_math(KernelGlobals *kg,
|
||||
float3 vector;
|
||||
|
||||
/* 3 Vector Operators */
|
||||
if (type == NODE_VECTOR_MATH_WRAP || type == NODE_VECTOR_MATH_FACEFORWARD ||
|
||||
type == NODE_VECTOR_MATH_MULTIPLY_ADD) {
|
||||
if (type == NODE_VECTOR_MATH_WRAP || type == NODE_VECTOR_MATH_FACEFORWARD) {
|
||||
uint4 extra_node = read_node(kg, offset);
|
||||
c = stack_load_float3(stack, extra_node.x);
|
||||
}
|
||||
|
@@ -52,9 +52,6 @@ ccl_device void svm_vector_math(float *value,
|
||||
case NODE_VECTOR_MATH_FACEFORWARD:
|
||||
*vector = faceforward(a, b, c);
|
||||
break;
|
||||
case NODE_VECTOR_MATH_MULTIPLY_ADD:
|
||||
*vector = a * b + c;
|
||||
break;
|
||||
case NODE_VECTOR_MATH_DOT_PRODUCT:
|
||||
*value = dot(a, b);
|
||||
break;
|
||||
|
@@ -341,7 +341,6 @@ typedef enum NodeVectorMathType {
|
||||
NODE_VECTOR_MATH_TANGENT,
|
||||
NODE_VECTOR_MATH_REFRACT,
|
||||
NODE_VECTOR_MATH_FACEFORWARD,
|
||||
NODE_VECTOR_MATH_MULTIPLY_ADD,
|
||||
} NodeVectorMathType;
|
||||
|
||||
typedef enum NodeClampType {
|
||||
|
@@ -654,7 +654,8 @@ static void update_attributes(AttributeSet &attributes, CachedData &cached_data,
|
||||
list<Attribute>::iterator it;
|
||||
for (it = attributes.attributes.begin(); it != attributes.attributes.end();) {
|
||||
if (cached_attributes.find(&(*it)) == cached_attributes.end()) {
|
||||
attributes.remove(it++);
|
||||
attributes.attributes.erase(it++);
|
||||
attributes.modified = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@@ -606,8 +606,7 @@ void read_geometry_data(AlembicProcedural *proc,
|
||||
|
||||
template<typename T> struct value_type_converter {
|
||||
using cycles_type = float;
|
||||
/* Use `TypeDesc::FLOAT` instead of `TypeFloat` to work around a compiler bug in gcc 11. */
|
||||
static constexpr TypeDesc type_desc = TypeDesc::FLOAT;
|
||||
static constexpr TypeDesc type_desc = TypeFloat;
|
||||
static constexpr const char *type_name = "float (default)";
|
||||
|
||||
static cycles_type convert_value(T value)
|
||||
|
@@ -383,23 +383,6 @@ AttributeStandard Attribute::name_standard(const char *name)
|
||||
return ATTR_STD_NONE;
|
||||
}
|
||||
|
||||
AttrKernelDataType Attribute::kernel_type(const Attribute &attr)
|
||||
{
|
||||
if (attr.element == ATTR_ELEMENT_CORNER) {
|
||||
return AttrKernelDataType::UCHAR4;
|
||||
}
|
||||
|
||||
if (attr.type == TypeDesc::TypeFloat) {
|
||||
return AttrKernelDataType::FLOAT;
|
||||
}
|
||||
|
||||
if (attr.type == TypeFloat2) {
|
||||
return AttrKernelDataType::FLOAT2;
|
||||
}
|
||||
|
||||
return AttrKernelDataType::FLOAT3;
|
||||
}
|
||||
|
||||
void Attribute::get_uv_tiles(Geometry *geom,
|
||||
AttributePrimitive prim,
|
||||
unordered_set<int> &tiles) const
|
||||
@@ -434,7 +417,7 @@ void Attribute::get_uv_tiles(Geometry *geom,
|
||||
/* Attribute Set */
|
||||
|
||||
AttributeSet::AttributeSet(Geometry *geometry, AttributePrimitive prim)
|
||||
: modified_flag(~0u), geometry(geometry), prim(prim)
|
||||
: geometry(geometry), prim(prim)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -457,7 +440,7 @@ Attribute *AttributeSet::add(ustring name, TypeDesc type, AttributeElement eleme
|
||||
|
||||
Attribute new_attr(name, type, element, geometry, prim);
|
||||
attributes.emplace_back(std::move(new_attr));
|
||||
tag_modified(attributes.back());
|
||||
modified = true;
|
||||
return &attributes.back();
|
||||
}
|
||||
|
||||
@@ -479,7 +462,8 @@ void AttributeSet::remove(ustring name)
|
||||
|
||||
for (it = attributes.begin(); it != attributes.end(); it++) {
|
||||
if (&*it == attr) {
|
||||
remove(it);
|
||||
modified = true;
|
||||
attributes.erase(it);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -624,7 +608,8 @@ void AttributeSet::remove(AttributeStandard std)
|
||||
|
||||
for (it = attributes.begin(); it != attributes.end(); it++) {
|
||||
if (&*it == attr) {
|
||||
remove(it);
|
||||
modified = true;
|
||||
attributes.erase(it);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -649,12 +634,6 @@ void AttributeSet::remove(Attribute *attribute)
|
||||
}
|
||||
}
|
||||
|
||||
void AttributeSet::remove(list<Attribute>::iterator it)
|
||||
{
|
||||
tag_modified(*it);
|
||||
attributes.erase(it);
|
||||
}
|
||||
|
||||
void AttributeSet::resize(bool reserve_only)
|
||||
{
|
||||
foreach (Attribute &attr, attributes) {
|
||||
@@ -695,13 +674,15 @@ void AttributeSet::update(AttributeSet &&new_attributes)
|
||||
for (it = attributes.begin(); it != attributes.end();) {
|
||||
if (it->std != ATTR_STD_NONE) {
|
||||
if (new_attributes.find(it->std) == nullptr) {
|
||||
remove(it++);
|
||||
modified = true;
|
||||
attributes.erase(it++);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
else if (it->name != "") {
|
||||
if (new_attributes.find(it->name) == nullptr) {
|
||||
remove(it++);
|
||||
modified = true;
|
||||
attributes.erase(it++);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@@ -718,27 +699,7 @@ void AttributeSet::clear_modified()
|
||||
foreach (Attribute &attr, attributes) {
|
||||
attr.modified = false;
|
||||
}
|
||||
|
||||
modified_flag = 0;
|
||||
}
|
||||
|
||||
void AttributeSet::tag_modified(const Attribute &attr)
|
||||
{
|
||||
/* Some attributes are not stored in the various kernel attribute arrays
|
||||
* (DeviceScene::attribute_*), so the modified flags are only set if the associated standard
|
||||
* corresponds to an attribute which will be stored in the kernel's attribute arrays. */
|
||||
const bool modifies_device_array = (attr.std != ATTR_STD_FACE_NORMAL &&
|
||||
attr.std != ATTR_STD_VERTEX_NORMAL);
|
||||
|
||||
if (modifies_device_array) {
|
||||
AttrKernelDataType kernel_type = Attribute::kernel_type(attr);
|
||||
modified_flag |= (1u << kernel_type);
|
||||
}
|
||||
}
|
||||
|
||||
bool AttributeSet::modified(AttrKernelDataType kernel_type) const
|
||||
{
|
||||
return (modified_flag & (1u << kernel_type)) != 0;
|
||||
modified = false;
|
||||
}
|
||||
|
||||
/* AttributeRequest */
|
||||
|
@@ -39,21 +39,6 @@ class Hair;
|
||||
class Mesh;
|
||||
struct Transform;
|
||||
|
||||
/* AttrKernelDataType.
|
||||
*
|
||||
* The data type of the device arrays storing the attribute's data. Those data types are different
|
||||
* than the ones for attributes as some attribute types are stored in the same array, e.g. Point,
|
||||
* Vector, and Transform are all stored as float3 in the kernel.
|
||||
*
|
||||
* The values of this enumeration are also used as flags to detect changes in AttributeSet. */
|
||||
|
||||
enum AttrKernelDataType {
|
||||
FLOAT = 0,
|
||||
FLOAT2 = 1,
|
||||
FLOAT3 = 2,
|
||||
UCHAR4 = 3,
|
||||
};
|
||||
|
||||
/* Attribute
|
||||
*
|
||||
* Arbitrary data layers on meshes.
|
||||
@@ -182,8 +167,6 @@ class Attribute {
|
||||
static const char *standard_name(AttributeStandard std);
|
||||
static AttributeStandard name_standard(const char *name);
|
||||
|
||||
static AttrKernelDataType kernel_type(const Attribute &attr);
|
||||
|
||||
void get_uv_tiles(Geometry *geom, AttributePrimitive prim, unordered_set<int> &tiles) const;
|
||||
};
|
||||
|
||||
@@ -192,12 +175,11 @@ class Attribute {
|
||||
* Set of attributes on a mesh. */
|
||||
|
||||
class AttributeSet {
|
||||
uint32_t modified_flag;
|
||||
|
||||
public:
|
||||
Geometry *geometry;
|
||||
AttributePrimitive prim;
|
||||
list<Attribute> attributes;
|
||||
bool modified = true;
|
||||
|
||||
AttributeSet(Geometry *geometry, AttributePrimitive prim);
|
||||
AttributeSet(AttributeSet &&) = default;
|
||||
@@ -215,8 +197,6 @@ class AttributeSet {
|
||||
|
||||
void remove(Attribute *attribute);
|
||||
|
||||
void remove(list<Attribute>::iterator it);
|
||||
|
||||
void resize(bool reserve_only = false);
|
||||
void clear(bool preserve_voxel_data = false);
|
||||
|
||||
@@ -224,18 +204,7 @@ class AttributeSet {
|
||||
* and remove any attribute not found on the new set from this. */
|
||||
void update(AttributeSet &&new_attributes);
|
||||
|
||||
/* Return whether the attributes of the given kernel_type are modified, where "modified" means
|
||||
* that some attributes of the given type were added or removed from this AttributeSet. This does
|
||||
* not mean that the data of the remaining attributes in this AttributeSet were also modified. To
|
||||
* check this, use Attribute.modified. */
|
||||
bool modified(AttrKernelDataType kernel_type) const;
|
||||
|
||||
void clear_modified();
|
||||
|
||||
private:
|
||||
/* Set the relevant modified flag for the attribute. Only attributes that are stored in device
|
||||
* arrays will be considered for tagging this AttributeSet as modified. */
|
||||
void tag_modified(const Attribute &attr);
|
||||
};
|
||||
|
||||
/* AttributeRequest
|
||||
|
@@ -830,13 +830,10 @@ void GeometryManager::device_update_attributes(Device *device,
|
||||
dscene->attributes_float3.alloc(attr_float3_size);
|
||||
dscene->attributes_uchar4.alloc(attr_uchar4_size);
|
||||
|
||||
/* The order of those flags needs to match that of AttrKernelDataType. */
|
||||
const bool attributes_need_realloc[4] = {
|
||||
dscene->attributes_float.need_realloc(),
|
||||
dscene->attributes_float2.need_realloc(),
|
||||
dscene->attributes_float3.need_realloc(),
|
||||
dscene->attributes_uchar4.need_realloc(),
|
||||
};
|
||||
const bool copy_all_data = dscene->attributes_float.need_realloc() ||
|
||||
dscene->attributes_float2.need_realloc() ||
|
||||
dscene->attributes_float3.need_realloc() ||
|
||||
dscene->attributes_uchar4.need_realloc();
|
||||
|
||||
size_t attr_float_offset = 0;
|
||||
size_t attr_float2_offset = 0;
|
||||
@@ -855,7 +852,7 @@ void GeometryManager::device_update_attributes(Device *device,
|
||||
|
||||
if (attr) {
|
||||
/* force a copy if we need to reallocate all the data */
|
||||
attr->modified |= attributes_need_realloc[Attribute::kernel_type(*attr)];
|
||||
attr->modified |= copy_all_data;
|
||||
}
|
||||
|
||||
update_attribute_element_offset(geom,
|
||||
@@ -878,7 +875,7 @@ void GeometryManager::device_update_attributes(Device *device,
|
||||
|
||||
if (subd_attr) {
|
||||
/* force a copy if we need to reallocate all the data */
|
||||
subd_attr->modified |= attributes_need_realloc[Attribute::kernel_type(*subd_attr)];
|
||||
subd_attr->modified |= copy_all_data;
|
||||
}
|
||||
|
||||
update_attribute_element_offset(mesh,
|
||||
@@ -909,10 +906,6 @@ void GeometryManager::device_update_attributes(Device *device,
|
||||
foreach (AttributeRequest &req, attributes.requests) {
|
||||
Attribute *attr = values.find(req);
|
||||
|
||||
if (attr) {
|
||||
attr->modified |= attributes_need_realloc[Attribute::kernel_type(*attr)];
|
||||
}
|
||||
|
||||
update_attribute_element_offset(object->geometry,
|
||||
dscene->attributes_float,
|
||||
attr_float_offset,
|
||||
@@ -948,10 +941,10 @@ void GeometryManager::device_update_attributes(Device *device,
|
||||
/* copy to device */
|
||||
progress.set_status("Updating Mesh", "Copying Attributes to device");
|
||||
|
||||
dscene->attributes_float.copy_to_device_if_modified();
|
||||
dscene->attributes_float2.copy_to_device_if_modified();
|
||||
dscene->attributes_float3.copy_to_device_if_modified();
|
||||
dscene->attributes_uchar4.copy_to_device_if_modified();
|
||||
dscene->attributes_float.copy_to_device();
|
||||
dscene->attributes_float2.copy_to_device();
|
||||
dscene->attributes_float3.copy_to_device();
|
||||
dscene->attributes_uchar4.copy_to_device();
|
||||
|
||||
if (progress.get_cancel())
|
||||
return;
|
||||
@@ -1438,43 +1431,21 @@ static void update_device_flags_attribute(uint32_t &device_update_flags,
|
||||
continue;
|
||||
}
|
||||
|
||||
AttrKernelDataType kernel_type = Attribute::kernel_type(attr);
|
||||
|
||||
switch (kernel_type) {
|
||||
case AttrKernelDataType::FLOAT: {
|
||||
device_update_flags |= ATTR_FLOAT_MODIFIED;
|
||||
break;
|
||||
}
|
||||
case AttrKernelDataType::FLOAT2: {
|
||||
device_update_flags |= ATTR_FLOAT2_MODIFIED;
|
||||
break;
|
||||
}
|
||||
case AttrKernelDataType::FLOAT3: {
|
||||
device_update_flags |= ATTR_FLOAT3_MODIFIED;
|
||||
break;
|
||||
}
|
||||
case AttrKernelDataType::UCHAR4: {
|
||||
device_update_flags |= ATTR_UCHAR4_MODIFIED;
|
||||
break;
|
||||
}
|
||||
if (attr.element == ATTR_ELEMENT_CORNER) {
|
||||
device_update_flags |= ATTR_UCHAR4_MODIFIED;
|
||||
}
|
||||
else if (attr.type == TypeDesc::TypeFloat) {
|
||||
device_update_flags |= ATTR_FLOAT_MODIFIED;
|
||||
}
|
||||
else if (attr.type == TypeFloat2) {
|
||||
device_update_flags |= ATTR_FLOAT2_MODIFIED;
|
||||
}
|
||||
else if (attr.type == TypeDesc::TypeMatrix) {
|
||||
device_update_flags |= ATTR_FLOAT3_MODIFIED;
|
||||
}
|
||||
else if (attr.element != ATTR_ELEMENT_VOXEL) {
|
||||
device_update_flags |= ATTR_FLOAT3_MODIFIED;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void update_attribute_realloc_flags(uint32_t &device_update_flags,
|
||||
const AttributeSet &attributes)
|
||||
{
|
||||
if (attributes.modified(AttrKernelDataType::FLOAT)) {
|
||||
device_update_flags |= ATTR_FLOAT_NEEDS_REALLOC;
|
||||
}
|
||||
if (attributes.modified(AttrKernelDataType::FLOAT2)) {
|
||||
device_update_flags |= ATTR_FLOAT2_NEEDS_REALLOC;
|
||||
}
|
||||
if (attributes.modified(AttrKernelDataType::FLOAT3)) {
|
||||
device_update_flags |= ATTR_FLOAT3_NEEDS_REALLOC;
|
||||
}
|
||||
if (attributes.modified(AttrKernelDataType::UCHAR4)) {
|
||||
device_update_flags |= ATTR_UCHAR4_NEEDS_REALLOC;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1500,11 +1471,16 @@ void GeometryManager::device_update_preprocess(Device *device, Scene *scene, Pro
|
||||
foreach (Geometry *geom, scene->geometry) {
|
||||
geom->has_volume = false;
|
||||
|
||||
update_attribute_realloc_flags(device_update_flags, geom->attributes);
|
||||
if (geom->attributes.modified) {
|
||||
device_update_flags |= ATTRS_NEED_REALLOC;
|
||||
}
|
||||
|
||||
if (geom->is_mesh()) {
|
||||
Mesh *mesh = static_cast<Mesh *>(geom);
|
||||
update_attribute_realloc_flags(device_update_flags, mesh->subd_attributes);
|
||||
|
||||
if (mesh->subd_attributes.modified) {
|
||||
device_update_flags |= ATTRS_NEED_REALLOC;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (Node *node, geom->get_used_shaders()) {
|
||||
@@ -2063,7 +2039,7 @@ void GeometryManager::device_update(Device *device,
|
||||
* for meshes with correct bounding boxes.
|
||||
*
|
||||
* This wouldn't cause wrong results, just true
|
||||
* displacement might be less optimal to calculate.
|
||||
* displacement might be less optimal ot calculate.
|
||||
*/
|
||||
scene->object_manager->need_flags_update = old_need_object_flags_update;
|
||||
}
|
||||
|
@@ -6093,7 +6093,6 @@ NODE_DEFINE(VectorMathNode)
|
||||
type_enum.insert("reflect", NODE_VECTOR_MATH_REFLECT);
|
||||
type_enum.insert("refract", NODE_VECTOR_MATH_REFRACT);
|
||||
type_enum.insert("faceforward", NODE_VECTOR_MATH_FACEFORWARD);
|
||||
type_enum.insert("multiply_add", NODE_VECTOR_MATH_MULTIPLY_ADD);
|
||||
|
||||
type_enum.insert("dot_product", NODE_VECTOR_MATH_DOT_PRODUCT);
|
||||
|
||||
@@ -6166,8 +6165,7 @@ void VectorMathNode::compile(SVMCompiler &compiler)
|
||||
int vector_stack_offset = compiler.stack_assign_if_linked(vector_out);
|
||||
|
||||
/* 3 Vector Operators */
|
||||
if (math_type == NODE_VECTOR_MATH_WRAP || math_type == NODE_VECTOR_MATH_FACEFORWARD ||
|
||||
math_type == NODE_VECTOR_MATH_MULTIPLY_ADD) {
|
||||
if (math_type == NODE_VECTOR_MATH_WRAP || math_type == NODE_VECTOR_MATH_FACEFORWARD) {
|
||||
ShaderInput *vector3_in = input("Vector3");
|
||||
int vector3_stack_offset = compiler.stack_assign(vector3_in);
|
||||
compiler.add_node(
|
||||
|
@@ -32,7 +32,7 @@ typedef function<void(void)> TaskRunFunction;
|
||||
|
||||
/* Task Pool
|
||||
*
|
||||
* Pool of tasks that will be executed by the central TaskScheduler. For each
|
||||
* Pool of tasks that will be executed by the central TaskScheduler.For each
|
||||
* pool, we can wait for all tasks to be done, or cancel them before they are
|
||||
* done.
|
||||
*
|
||||
@@ -77,7 +77,7 @@ class TaskPool {
|
||||
|
||||
/* Task Scheduler
|
||||
*
|
||||
* Central scheduler that holds running threads ready to execute tasks. A single
|
||||
* Central scheduler that holds running threads ready to execute tasks. A singe
|
||||
* queue holds the task from all pools. */
|
||||
|
||||
class TaskScheduler {
|
||||
|
@@ -43,71 +43,41 @@
|
||||
# define FFMPEG_INLINE static inline
|
||||
#endif
|
||||
|
||||
#if (LIBAVFORMAT_VERSION_MAJOR < 58) || \
|
||||
((LIBAVFORMAT_VERSION_MAJOR == 58) && (LIBAVFORMAT_VERSION_MINOR < 76))
|
||||
# define FFMPEG_USE_DURATION_WORKAROUND 1
|
||||
|
||||
/* Before ffmpeg 4.4, package duration calculation used depricated variables to calculate the
|
||||
* packet duration. Use the function from commit
|
||||
* github.com/FFmpeg/FFmpeg/commit/1c0885334dda9ee8652e60c586fa2e3674056586
|
||||
* to calculate the correct framerate for ffmpeg < 4.4.
|
||||
*/
|
||||
|
||||
FFMPEG_INLINE
|
||||
void my_guess_pkt_duration(AVFormatContext *s, AVStream *st, AVPacket *pkt)
|
||||
void my_update_cur_dts(AVFormatContext *s, AVStream *ref_st, int64_t timestamp)
|
||||
{
|
||||
if (pkt->duration < 0 && st->codecpar->codec_type != AVMEDIA_TYPE_SUBTITLE) {
|
||||
av_log(s,
|
||||
AV_LOG_WARNING,
|
||||
"Packet with invalid duration %" PRId64 " in stream %d\n",
|
||||
pkt->duration,
|
||||
pkt->stream_index);
|
||||
pkt->duration = 0;
|
||||
}
|
||||
int i;
|
||||
|
||||
if (pkt->duration) {
|
||||
return;
|
||||
}
|
||||
for (i = 0; i < s->nb_streams; i++) {
|
||||
AVStream *st = s->streams[i];
|
||||
|
||||
switch (st->codecpar->codec_type) {
|
||||
case AVMEDIA_TYPE_VIDEO:
|
||||
if (st->avg_frame_rate.num > 0 && st->avg_frame_rate.den > 0) {
|
||||
pkt->duration = av_rescale_q(1, av_inv_q(st->avg_frame_rate), st->time_base);
|
||||
}
|
||||
else if (st->time_base.num * 1000LL > st->time_base.den) {
|
||||
pkt->duration = 1;
|
||||
}
|
||||
break;
|
||||
case AVMEDIA_TYPE_AUDIO: {
|
||||
int frame_size = av_get_audio_frame_duration2(st->codecpar, pkt->size);
|
||||
if (frame_size && st->codecpar->sample_rate) {
|
||||
pkt->duration = av_rescale_q(
|
||||
frame_size, (AVRational){1, st->codecpar->sample_rate}, st->time_base);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
st->cur_dts = av_rescale(timestamp,
|
||||
st->time_base.den * (int64_t)ref_st->time_base.num,
|
||||
st->time_base.num * (int64_t)ref_st->time_base.den);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
FFMPEG_INLINE
|
||||
int64_t timestamp_from_pts_or_dts(int64_t pts, int64_t dts)
|
||||
void av_update_cur_dts(AVFormatContext *s, AVStream *ref_st, int64_t timestamp)
|
||||
{
|
||||
/* Some videos do not have any pts values, use dts instead in those cases if
|
||||
* possible. Usually when this happens dts can act as pts because as all frames
|
||||
* should then be presented in their decoded in order. IE pts == dts. */
|
||||
my_update_cur_dts(s, ref_st, timestamp);
|
||||
}
|
||||
|
||||
FFMPEG_INLINE
|
||||
int64_t av_get_pts_from_frame(AVFormatContext *avctx, AVFrame *picture)
|
||||
{
|
||||
int64_t pts;
|
||||
pts = picture->pts;
|
||||
|
||||
if (pts == AV_NOPTS_VALUE) {
|
||||
return dts;
|
||||
pts = picture->pkt_dts;
|
||||
}
|
||||
if (pts == AV_NOPTS_VALUE) {
|
||||
pts = 0;
|
||||
}
|
||||
return pts;
|
||||
}
|
||||
|
||||
FFMPEG_INLINE
|
||||
int64_t av_get_pts_from_frame(AVFrame *picture)
|
||||
{
|
||||
return timestamp_from_pts_or_dts(picture->pts, picture->pkt_dts);
|
||||
(void)avctx;
|
||||
return pts;
|
||||
}
|
||||
|
||||
/* -------------------------------------------------------------------- */
|
||||
|
@@ -282,7 +282,6 @@ elseif(WITH_GHOST_X11 OR WITH_GHOST_WAYLAND)
|
||||
${wayland-egl_INCLUDE_DIRS}
|
||||
${xkbcommon_INCLUDE_DIRS}
|
||||
${wayland-cursor_INCLUDE_DIRS}
|
||||
${dbus_INCLUDE_DIRS}
|
||||
)
|
||||
|
||||
list(APPEND SRC
|
||||
@@ -322,11 +321,6 @@ elseif(WITH_GHOST_X11 OR WITH_GHOST_WAYLAND)
|
||||
xdg-shell
|
||||
"${WAYLAND_PROTOCOLS_DIR}/stable/xdg-shell/xdg-shell.xml"
|
||||
)
|
||||
# xdg-decoration.
|
||||
generate_protocol_bindings(
|
||||
xdg-decoration
|
||||
"${WAYLAND_PROTOCOLS_DIR}/unstable/xdg-decoration/xdg-decoration-unstable-v1.xml"
|
||||
)
|
||||
# Pointer-constraints.
|
||||
generate_protocol_bindings(
|
||||
pointer-constraints
|
||||
|
@@ -40,7 +40,6 @@
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
|
||||
#include "GHOST_WaylandCursorSettings.h"
|
||||
#include <pointer-constraints-client-protocol.h>
|
||||
#include <relative-pointer-client-protocol.h>
|
||||
#include <wayland-cursor.h>
|
||||
@@ -53,6 +52,15 @@
|
||||
|
||||
#include <cstring>
|
||||
|
||||
struct output_t {
|
||||
struct wl_output *output;
|
||||
int32_t width, height;
|
||||
int transform;
|
||||
int scale;
|
||||
std::string make;
|
||||
std::string model;
|
||||
};
|
||||
|
||||
struct buffer_t {
|
||||
void *data;
|
||||
size_t size;
|
||||
@@ -64,12 +72,6 @@ struct cursor_t {
|
||||
struct wl_buffer *buffer;
|
||||
struct wl_cursor_image image;
|
||||
struct buffer_t *file_buffer = nullptr;
|
||||
struct wl_cursor_theme *theme = nullptr;
|
||||
int size;
|
||||
std::string theme_name;
|
||||
// outputs on which the cursor is visible
|
||||
std::unordered_set<const output_t *> outputs;
|
||||
int scale = 1;
|
||||
};
|
||||
|
||||
struct data_offer_t {
|
||||
@@ -140,14 +142,10 @@ struct display_t {
|
||||
struct wl_display *display;
|
||||
struct wl_compositor *compositor = nullptr;
|
||||
struct xdg_wm_base *xdg_shell = nullptr;
|
||||
struct zxdg_decoration_manager_v1 *xdg_decoration_manager = nullptr;
|
||||
struct wl_shm *shm = nullptr;
|
||||
std::vector<output_t *> outputs;
|
||||
std::vector<input_t *> inputs;
|
||||
struct {
|
||||
std::string theme;
|
||||
int size;
|
||||
} cursor;
|
||||
struct wl_cursor_theme *cursor_theme = nullptr;
|
||||
struct wl_data_device_manager *data_device_manager = nullptr;
|
||||
struct zwp_relative_pointer_manager_v1 *relative_pointer_manager = nullptr;
|
||||
struct zwp_pointer_constraints_v1 *pointer_constraints = nullptr;
|
||||
@@ -156,8 +154,6 @@ struct display_t {
|
||||
std::vector<struct wl_egl_window *> os_egl_windows;
|
||||
};
|
||||
|
||||
static GHOST_WindowManager *window_manager = nullptr;
|
||||
|
||||
static void display_destroy(display_t *d)
|
||||
{
|
||||
if (d->data_device_manager) {
|
||||
@@ -192,9 +188,6 @@ static void display_destroy(display_t *d)
|
||||
if (input->cursor.surface) {
|
||||
wl_surface_destroy(input->cursor.surface);
|
||||
}
|
||||
if (input->cursor.theme) {
|
||||
wl_cursor_theme_destroy(input->cursor.theme);
|
||||
}
|
||||
if (input->pointer) {
|
||||
wl_pointer_destroy(input->pointer);
|
||||
}
|
||||
@@ -217,6 +210,10 @@ static void display_destroy(display_t *d)
|
||||
delete input;
|
||||
}
|
||||
|
||||
if (d->cursor_theme) {
|
||||
wl_cursor_theme_destroy(d->cursor_theme);
|
||||
}
|
||||
|
||||
if (d->shm) {
|
||||
wl_shm_destroy(d->shm);
|
||||
}
|
||||
@@ -241,10 +238,6 @@ static void display_destroy(display_t *d)
|
||||
wl_compositor_destroy(d->compositor);
|
||||
}
|
||||
|
||||
if (d->xdg_decoration_manager) {
|
||||
zxdg_decoration_manager_v1_destroy(d->xdg_decoration_manager);
|
||||
}
|
||||
|
||||
if (d->xdg_shell) {
|
||||
xdg_wm_base_destroy(d->xdg_shell);
|
||||
}
|
||||
@@ -485,9 +478,7 @@ static void dnd_events(const input_t *const input, const GHOST_TEventType event)
|
||||
static std::string read_pipe(data_offer_t *data_offer, const std::string mime_receive)
|
||||
{
|
||||
int pipefd[2];
|
||||
if (pipe(pipefd) != 0) {
|
||||
return {};
|
||||
}
|
||||
pipe(pipefd);
|
||||
wl_data_offer_receive(data_offer->id, mime_receive.c_str(), pipefd[1]);
|
||||
close(pipefd[1]);
|
||||
|
||||
@@ -522,9 +513,7 @@ static void data_source_send(void *data,
|
||||
int32_t fd)
|
||||
{
|
||||
const char *const buffer = static_cast<char *>(data);
|
||||
if (write(fd, buffer, strlen(buffer) + 1) < 0) {
|
||||
GHOST_PRINT("error writing to clipboard: " << std::strerror(errno) << std::endl);
|
||||
}
|
||||
write(fd, buffer, strlen(buffer) + 1);
|
||||
close(fd);
|
||||
}
|
||||
|
||||
@@ -800,80 +789,13 @@ static void cursor_buffer_release(void *data, struct wl_buffer *wl_buffer)
|
||||
cursor_t *cursor = static_cast<cursor_t *>(data);
|
||||
|
||||
wl_buffer_destroy(wl_buffer);
|
||||
|
||||
if (wl_buffer == cursor->buffer) {
|
||||
/* the mapped buffer was from a custom cursor */
|
||||
cursor->buffer = nullptr;
|
||||
}
|
||||
cursor->buffer = nullptr;
|
||||
}
|
||||
|
||||
const struct wl_buffer_listener cursor_buffer_listener = {
|
||||
cursor_buffer_release,
|
||||
};
|
||||
|
||||
static GHOST_IWindow *get_window(struct wl_surface *surface)
|
||||
{
|
||||
if (!surface) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
for (GHOST_IWindow *win : window_manager->getWindows()) {
|
||||
if (surface == static_cast<const GHOST_WindowWayland *>(win)->surface()) {
|
||||
return win;
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
static bool update_cursor_scale(cursor_t &cursor, wl_shm *shm)
|
||||
{
|
||||
int scale = 0;
|
||||
for (const output_t *output : cursor.outputs) {
|
||||
if (output->scale > scale)
|
||||
scale = output->scale;
|
||||
}
|
||||
|
||||
if (scale > 0 && cursor.scale != scale) {
|
||||
cursor.scale = scale;
|
||||
wl_surface_set_buffer_scale(cursor.surface, scale);
|
||||
wl_cursor_theme_destroy(cursor.theme);
|
||||
cursor.theme = wl_cursor_theme_load(cursor.theme_name.c_str(), scale * cursor.size, shm);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static void cursor_surface_enter(void *data,
|
||||
struct wl_surface * /*wl_surface*/,
|
||||
struct wl_output *output)
|
||||
{
|
||||
input_t *input = static_cast<input_t *>(data);
|
||||
for (const output_t *reg_output : input->system->outputs()) {
|
||||
if (reg_output->output == output) {
|
||||
input->cursor.outputs.insert(reg_output);
|
||||
}
|
||||
}
|
||||
update_cursor_scale(input->cursor, input->system->shm());
|
||||
}
|
||||
|
||||
static void cursor_surface_leave(void *data,
|
||||
struct wl_surface * /*wl_surface*/,
|
||||
struct wl_output *output)
|
||||
{
|
||||
input_t *input = static_cast<input_t *>(data);
|
||||
for (const output_t *reg_output : input->system->outputs()) {
|
||||
if (reg_output->output == output) {
|
||||
input->cursor.outputs.erase(reg_output);
|
||||
}
|
||||
}
|
||||
update_cursor_scale(input->cursor, input->system->shm());
|
||||
}
|
||||
|
||||
struct wl_surface_listener cursor_surface_listener = {
|
||||
cursor_surface_enter,
|
||||
cursor_surface_leave,
|
||||
};
|
||||
|
||||
static void pointer_enter(void *data,
|
||||
struct wl_pointer * /*wl_pointer*/,
|
||||
uint32_t serial,
|
||||
@@ -881,28 +803,22 @@ static void pointer_enter(void *data,
|
||||
wl_fixed_t surface_x,
|
||||
wl_fixed_t surface_y)
|
||||
{
|
||||
GHOST_WindowWayland *win = static_cast<GHOST_WindowWayland *>(get_window(surface));
|
||||
|
||||
if (!win) {
|
||||
if (!surface) {
|
||||
return;
|
||||
}
|
||||
|
||||
win->activate();
|
||||
|
||||
input_t *input = static_cast<input_t *>(data);
|
||||
input->pointer_serial = serial;
|
||||
input->x = win->scale() * wl_fixed_to_int(surface_x);
|
||||
input->y = win->scale() * wl_fixed_to_int(surface_y);
|
||||
input->x = wl_fixed_to_int(surface_x);
|
||||
input->y = wl_fixed_to_int(surface_y);
|
||||
input->focus_pointer = surface;
|
||||
|
||||
win->setCursorShape(win->getCursorShape());
|
||||
|
||||
input->system->pushEvent(new GHOST_EventCursor(input->system->getMilliSeconds(),
|
||||
GHOST_kEventCursorMove,
|
||||
static_cast<GHOST_WindowWayland *>(win),
|
||||
input->x,
|
||||
input->y,
|
||||
GHOST_TABLET_DATA_NONE));
|
||||
input->system->pushEvent(
|
||||
new GHOST_EventCursor(input->system->getMilliSeconds(),
|
||||
GHOST_kEventCursorMove,
|
||||
static_cast<GHOST_WindowWayland *>(wl_surface_get_user_data(surface)),
|
||||
input->x,
|
||||
input->y,
|
||||
GHOST_TABLET_DATA_NONE));
|
||||
}
|
||||
|
||||
static void pointer_leave(void *data,
|
||||
@@ -910,14 +826,9 @@ static void pointer_leave(void *data,
|
||||
uint32_t /*serial*/,
|
||||
struct wl_surface *surface)
|
||||
{
|
||||
GHOST_IWindow *win = get_window(surface);
|
||||
|
||||
if (!win) {
|
||||
return;
|
||||
if (surface != nullptr) {
|
||||
static_cast<input_t *>(data)->focus_pointer = nullptr;
|
||||
}
|
||||
|
||||
static_cast<input_t *>(data)->focus_pointer = nullptr;
|
||||
static_cast<GHOST_WindowWayland *>(win)->deactivate();
|
||||
}
|
||||
|
||||
static void pointer_motion(void *data,
|
||||
@@ -928,20 +839,21 @@ static void pointer_motion(void *data,
|
||||
{
|
||||
input_t *input = static_cast<input_t *>(data);
|
||||
|
||||
GHOST_WindowWayland *win = static_cast<GHOST_WindowWayland *>(get_window(input->focus_pointer));
|
||||
GHOST_IWindow *win = static_cast<GHOST_WindowWayland *>(
|
||||
wl_surface_get_user_data(input->focus_pointer));
|
||||
|
||||
if (!win) {
|
||||
return;
|
||||
}
|
||||
|
||||
input->x = win->scale() * wl_fixed_to_int(surface_x);
|
||||
input->y = win->scale() * wl_fixed_to_int(surface_y);
|
||||
input->x = wl_fixed_to_int(surface_x);
|
||||
input->y = wl_fixed_to_int(surface_y);
|
||||
|
||||
input->system->pushEvent(new GHOST_EventCursor(input->system->getMilliSeconds(),
|
||||
GHOST_kEventCursorMove,
|
||||
win,
|
||||
input->x,
|
||||
input->y,
|
||||
wl_fixed_to_int(surface_x),
|
||||
wl_fixed_to_int(surface_y),
|
||||
GHOST_TABLET_DATA_NONE));
|
||||
}
|
||||
|
||||
@@ -952,14 +864,6 @@ static void pointer_button(void *data,
|
||||
uint32_t button,
|
||||
uint32_t state)
|
||||
{
|
||||
input_t *input = static_cast<input_t *>(data);
|
||||
|
||||
GHOST_IWindow *win = get_window(input->focus_pointer);
|
||||
|
||||
if (!win) {
|
||||
return;
|
||||
}
|
||||
|
||||
GHOST_TEventType etype = GHOST_kEventUnknown;
|
||||
switch (state) {
|
||||
case WL_POINTER_BUTTON_STATE_RELEASED:
|
||||
@@ -983,6 +887,9 @@ static void pointer_button(void *data,
|
||||
break;
|
||||
}
|
||||
|
||||
input_t *input = static_cast<input_t *>(data);
|
||||
GHOST_IWindow *win = static_cast<GHOST_WindowWayland *>(
|
||||
wl_surface_get_user_data(input->focus_pointer));
|
||||
input->data_source->source_serial = serial;
|
||||
input->buttons.set(ebutton, state == WL_POINTER_BUTTON_STATE_PRESSED);
|
||||
input->system->pushEvent(new GHOST_EventButton(
|
||||
@@ -995,18 +902,12 @@ static void pointer_axis(void *data,
|
||||
uint32_t axis,
|
||||
wl_fixed_t value)
|
||||
{
|
||||
input_t *input = static_cast<input_t *>(data);
|
||||
|
||||
GHOST_IWindow *win = get_window(input->focus_pointer);
|
||||
|
||||
if (!win) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (axis != WL_POINTER_AXIS_VERTICAL_SCROLL) {
|
||||
return;
|
||||
}
|
||||
|
||||
input_t *input = static_cast<input_t *>(data);
|
||||
GHOST_IWindow *win = static_cast<GHOST_WindowWayland *>(
|
||||
wl_surface_get_user_data(input->focus_pointer));
|
||||
input->system->pushEvent(
|
||||
new GHOST_EventWheel(input->system->getMilliSeconds(), win, std::signbit(value) ? +1 : -1));
|
||||
}
|
||||
@@ -1236,12 +1137,7 @@ static void seat_capabilities(void *data, struct wl_seat *wl_seat, uint32_t capa
|
||||
input->cursor.visible = true;
|
||||
input->cursor.buffer = nullptr;
|
||||
input->cursor.file_buffer = new buffer_t;
|
||||
if (!get_cursor_settings(input->cursor.theme_name, input->cursor.size)) {
|
||||
input->cursor.theme_name = std::string();
|
||||
input->cursor.size = default_cursor_size;
|
||||
}
|
||||
wl_pointer_add_listener(input->pointer, &pointer_listener, data);
|
||||
wl_surface_add_listener(input->cursor.surface, &cursor_surface_listener, data);
|
||||
}
|
||||
|
||||
if (capabilities & WL_SEAT_CAPABILITY_KEYBOARD) {
|
||||
@@ -1264,8 +1160,8 @@ static void output_geometry(void *data,
|
||||
struct wl_output * /*wl_output*/,
|
||||
int32_t /*x*/,
|
||||
int32_t /*y*/,
|
||||
int32_t physical_width,
|
||||
int32_t physical_height,
|
||||
int32_t /*physical_width*/,
|
||||
int32_t /*physical_height*/,
|
||||
int32_t /*subpixel*/,
|
||||
const char *make,
|
||||
const char *model,
|
||||
@@ -1275,8 +1171,6 @@ static void output_geometry(void *data,
|
||||
output->transform = transform;
|
||||
output->make = std::string(make);
|
||||
output->model = std::string(model);
|
||||
output->width_mm = physical_width;
|
||||
output->height_mm = physical_height;
|
||||
}
|
||||
|
||||
static void output_mode(void *data,
|
||||
@@ -1287,8 +1181,8 @@ static void output_mode(void *data,
|
||||
int32_t /*refresh*/)
|
||||
{
|
||||
output_t *output = static_cast<output_t *>(data);
|
||||
output->width_pxl = width;
|
||||
output->height_pxl = height;
|
||||
output->width = width;
|
||||
output->height = height;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1333,17 +1227,13 @@ static void global_add(void *data,
|
||||
struct display_t *display = static_cast<struct display_t *>(data);
|
||||
if (!strcmp(interface, wl_compositor_interface.name)) {
|
||||
display->compositor = static_cast<wl_compositor *>(
|
||||
wl_registry_bind(wl_registry, name, &wl_compositor_interface, 3));
|
||||
wl_registry_bind(wl_registry, name, &wl_compositor_interface, 1));
|
||||
}
|
||||
else if (!strcmp(interface, xdg_wm_base_interface.name)) {
|
||||
display->xdg_shell = static_cast<xdg_wm_base *>(
|
||||
wl_registry_bind(wl_registry, name, &xdg_wm_base_interface, 1));
|
||||
xdg_wm_base_add_listener(display->xdg_shell, &shell_listener, nullptr);
|
||||
}
|
||||
else if (!strcmp(interface, zxdg_decoration_manager_v1_interface.name)) {
|
||||
display->xdg_decoration_manager = static_cast<zxdg_decoration_manager_v1 *>(
|
||||
wl_registry_bind(wl_registry, name, &zxdg_decoration_manager_v1_interface, 1));
|
||||
}
|
||||
else if (!strcmp(interface, wl_output_interface.name)) {
|
||||
output_t *output = new output_t;
|
||||
output->scale = 1;
|
||||
@@ -1445,6 +1335,16 @@ GHOST_SystemWayland::GHOST_SystemWayland() : GHOST_System(), d(new display_t)
|
||||
wl_data_device_add_listener(input->data_device, &data_device_listener, input);
|
||||
}
|
||||
}
|
||||
|
||||
const char *theme = std::getenv("XCURSOR_THEME");
|
||||
const char *size = std::getenv("XCURSOR_SIZE");
|
||||
const int sizei = size ? std::stoi(size) : default_cursor_size;
|
||||
|
||||
d->cursor_theme = wl_cursor_theme_load(theme, sizei, d->shm);
|
||||
if (!d->cursor_theme) {
|
||||
display_destroy(d);
|
||||
throw std::runtime_error("Wayland: unable to access cursor themes!");
|
||||
}
|
||||
}
|
||||
|
||||
GHOST_SystemWayland::~GHOST_SystemWayland()
|
||||
@@ -1571,8 +1471,8 @@ void GHOST_SystemWayland::getMainDisplayDimensions(GHOST_TUns32 &width, GHOST_TU
|
||||
{
|
||||
if (getNumDisplays() > 0) {
|
||||
/* We assume first output as main. */
|
||||
width = uint32_t(d->outputs[0]->width_pxl) / d->outputs[0]->scale;
|
||||
height = uint32_t(d->outputs[0]->height_pxl) / d->outputs[0]->scale;
|
||||
width = uint32_t(d->outputs[0]->width);
|
||||
height = uint32_t(d->outputs[0]->height);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1581,7 +1481,7 @@ void GHOST_SystemWayland::getAllDisplayDimensions(GHOST_TUns32 &width, GHOST_TUn
|
||||
getMainDisplayDimensions(width, height);
|
||||
}
|
||||
|
||||
GHOST_IContext *GHOST_SystemWayland::createOffscreenContext(GHOST_GLSettings /*glSettings*/)
|
||||
GHOST_IContext *GHOST_SystemWayland::createOffscreenContext(GHOST_GLSettings glSettings)
|
||||
{
|
||||
/* Create new off-screen window. */
|
||||
wl_surface *os_surface = wl_compositor_create_surface(compositor());
|
||||
@@ -1630,11 +1530,6 @@ GHOST_IWindow *GHOST_SystemWayland::createWindow(const char *title,
|
||||
const bool is_dialog,
|
||||
const GHOST_IWindow *parentWindow)
|
||||
{
|
||||
/* globally store pointer to window manager */
|
||||
if (!window_manager) {
|
||||
window_manager = getWindowManager();
|
||||
}
|
||||
|
||||
GHOST_WindowWayland *window = new GHOST_WindowWayland(
|
||||
this,
|
||||
title,
|
||||
@@ -1679,21 +1574,6 @@ xdg_wm_base *GHOST_SystemWayland::shell()
|
||||
return d->xdg_shell;
|
||||
}
|
||||
|
||||
zxdg_decoration_manager_v1 *GHOST_SystemWayland::decoration_manager()
|
||||
{
|
||||
return d->xdg_decoration_manager;
|
||||
}
|
||||
|
||||
const std::vector<output_t *> &GHOST_SystemWayland::outputs() const
|
||||
{
|
||||
return d->outputs;
|
||||
}
|
||||
|
||||
wl_shm *GHOST_SystemWayland::shm() const
|
||||
{
|
||||
return d->shm;
|
||||
}
|
||||
|
||||
void GHOST_SystemWayland::setSelection(const std::string &selection)
|
||||
{
|
||||
this->selection = selection;
|
||||
@@ -1701,20 +1581,23 @@ void GHOST_SystemWayland::setSelection(const std::string &selection)
|
||||
|
||||
static void set_cursor_buffer(input_t *input, wl_buffer *buffer)
|
||||
{
|
||||
cursor_t *c = &input->cursor;
|
||||
input->cursor.visible = (buffer != nullptr);
|
||||
|
||||
c->visible = (buffer != nullptr);
|
||||
wl_surface_attach(input->cursor.surface, buffer, 0, 0);
|
||||
wl_surface_commit(input->cursor.surface);
|
||||
|
||||
wl_surface_attach(c->surface, buffer, 0, 0);
|
||||
|
||||
wl_surface_damage(c->surface, 0, 0, int32_t(c->image.width), int32_t(c->image.height));
|
||||
wl_pointer_set_cursor(input->pointer,
|
||||
input->pointer_serial,
|
||||
c->visible ? c->surface : nullptr,
|
||||
int32_t(c->image.hotspot_x) / c->scale,
|
||||
int32_t(c->image.hotspot_y) / c->scale);
|
||||
|
||||
wl_surface_commit(c->surface);
|
||||
if (input->cursor.visible) {
|
||||
wl_surface_damage(input->cursor.surface,
|
||||
0,
|
||||
0,
|
||||
int32_t(input->cursor.image.width),
|
||||
int32_t(input->cursor.image.height));
|
||||
wl_pointer_set_cursor(input->pointer,
|
||||
input->pointer_serial,
|
||||
input->cursor.surface,
|
||||
int32_t(input->cursor.image.hotspot_x),
|
||||
int32_t(input->cursor.image.hotspot_y));
|
||||
}
|
||||
}
|
||||
|
||||
GHOST_TSuccess GHOST_SystemWayland::setCursorShape(GHOST_TStandardCursor shape)
|
||||
@@ -1725,15 +1608,7 @@ GHOST_TSuccess GHOST_SystemWayland::setCursorShape(GHOST_TStandardCursor shape)
|
||||
const std::string cursor_name = cursors.count(shape) ? cursors.at(shape) :
|
||||
cursors.at(GHOST_kStandardCursorDefault);
|
||||
|
||||
input_t *input = d->inputs[0];
|
||||
cursor_t *c = &input->cursor;
|
||||
|
||||
if (!c->theme) {
|
||||
/* The cursor surface hasn't entered an output yet. Initialize theme with scale 1. */
|
||||
c->theme = wl_cursor_theme_load(c->theme_name.c_str(), c->size, d->inputs[0]->system->shm());
|
||||
}
|
||||
|
||||
wl_cursor *cursor = wl_cursor_theme_get_cursor(c->theme, cursor_name.c_str());
|
||||
wl_cursor *cursor = wl_cursor_theme_get_cursor(d->cursor_theme, cursor_name.c_str());
|
||||
|
||||
if (!cursor) {
|
||||
GHOST_PRINT("cursor '" << cursor_name << "' does not exist" << std::endl);
|
||||
@@ -1745,11 +1620,11 @@ GHOST_TSuccess GHOST_SystemWayland::setCursorShape(GHOST_TStandardCursor shape)
|
||||
if (!buffer) {
|
||||
return GHOST_kFailure;
|
||||
}
|
||||
|
||||
cursor_t *c = &d->inputs[0]->cursor;
|
||||
c->buffer = buffer;
|
||||
c->image = *image;
|
||||
|
||||
set_cursor_buffer(input, buffer);
|
||||
set_cursor_buffer(d->inputs[0], buffer);
|
||||
|
||||
return GHOST_kSuccess;
|
||||
}
|
||||
@@ -1860,11 +1735,6 @@ GHOST_TSuccess GHOST_SystemWayland::setCursorVisibility(bool visible)
|
||||
GHOST_TSuccess GHOST_SystemWayland::setCursorGrab(const GHOST_TGrabCursorMode mode,
|
||||
wl_surface *surface)
|
||||
{
|
||||
/* ignore, if the required protocols are not supported */
|
||||
if (!d->relative_pointer_manager || !d->pointer_constraints) {
|
||||
return GHOST_kFailure;
|
||||
}
|
||||
|
||||
if (d->inputs.empty()) {
|
||||
return GHOST_kFailure;
|
||||
}
|
||||
@@ -1884,7 +1754,6 @@ GHOST_TSuccess GHOST_SystemWayland::setCursorGrab(const GHOST_TGrabCursorMode mo
|
||||
break;
|
||||
|
||||
case GHOST_kGrabNormal:
|
||||
break;
|
||||
case GHOST_kGrabWrap:
|
||||
input->relative_pointer = zwp_relative_pointer_manager_v1_get_relative_pointer(
|
||||
d->relative_pointer_manager, input->pointer);
|
||||
|
@@ -26,7 +26,6 @@
|
||||
#include "GHOST_WindowWayland.h"
|
||||
|
||||
#include <wayland-client.h>
|
||||
#include <xdg-decoration-client-protocol.h>
|
||||
#include <xdg-shell-client-protocol.h>
|
||||
|
||||
#include <string>
|
||||
@@ -35,16 +34,6 @@ class GHOST_WindowWayland;
|
||||
|
||||
struct display_t;
|
||||
|
||||
struct output_t {
|
||||
struct wl_output *output;
|
||||
int32_t width_pxl, height_pxl; // dimensions in pixel
|
||||
int32_t width_mm, height_mm; // dimensions in millimeter
|
||||
int transform;
|
||||
int scale;
|
||||
std::string make;
|
||||
std::string model;
|
||||
};
|
||||
|
||||
class GHOST_SystemWayland : public GHOST_System {
|
||||
public:
|
||||
GHOST_SystemWayland();
|
||||
@@ -95,12 +84,6 @@ class GHOST_SystemWayland : public GHOST_System {
|
||||
|
||||
xdg_wm_base *shell();
|
||||
|
||||
zxdg_decoration_manager_v1 *decoration_manager();
|
||||
|
||||
const std::vector<output_t *> &outputs() const;
|
||||
|
||||
wl_shm *shm() const;
|
||||
|
||||
void setSelection(const std::string &selection);
|
||||
|
||||
GHOST_TSuccess setCursorShape(GHOST_TStandardCursor shape);
|
||||
|
@@ -32,7 +32,6 @@
|
||||
#include <commctrl.h>
|
||||
#include <psapi.h>
|
||||
#include <shellapi.h>
|
||||
#include <shellscalingapi.h>
|
||||
#include <shlobj.h>
|
||||
#include <tlhelp32.h>
|
||||
#include <windowsx.h>
|
||||
@@ -98,6 +97,41 @@
|
||||
# define VK_GR_LESS 0xE2
|
||||
#endif // VK_GR_LESS
|
||||
|
||||
#ifndef VK_MEDIA_NEXT_TRACK
|
||||
# define VK_MEDIA_NEXT_TRACK 0xB0
|
||||
#endif // VK_MEDIA_NEXT_TRACK
|
||||
#ifndef VK_MEDIA_PREV_TRACK
|
||||
# define VK_MEDIA_PREV_TRACK 0xB1
|
||||
#endif // VK_MEDIA_PREV_TRACK
|
||||
#ifndef VK_MEDIA_STOP
|
||||
# define VK_MEDIA_STOP 0xB2
|
||||
#endif // VK_MEDIA_STOP
|
||||
#ifndef VK_MEDIA_PLAY_PAUSE
|
||||
# define VK_MEDIA_PLAY_PAUSE 0xB3
|
||||
#endif // VK_MEDIA_PLAY_PAUSE
|
||||
|
||||
// Window message newer than Windows 7
|
||||
#ifndef WM_DPICHANGED
|
||||
# define WM_DPICHANGED 0x02E0
|
||||
#endif // WM_DPICHANGED
|
||||
|
||||
// WM_POINTER API messages minimum Windows 7
|
||||
#ifndef WM_POINTERENTER
|
||||
# define WM_POINTERENTER 0x0249
|
||||
#endif // WM_POINTERENTER
|
||||
#ifndef WM_POINTERDOWN
|
||||
# define WM_POINTERDOWN 0x0246
|
||||
#endif // WM_POINTERDOWN
|
||||
#ifndef WM_POINTERUPDATE
|
||||
# define WM_POINTERUPDATE 0x0245
|
||||
#endif // WM_POINTERUPDATE
|
||||
#ifndef WM_POINTERUP
|
||||
# define WM_POINTERUP 0x0247
|
||||
#endif // WM_POINTERUP
|
||||
#ifndef WM_POINTERLEAVE
|
||||
# define WM_POINTERLEAVE 0x024A
|
||||
#endif // WM_POINTERLEAVE
|
||||
|
||||
/* Workaround for some laptop touchpads, some of which seems to
|
||||
* have driver issues which makes it so window function receives
|
||||
* the message, but PeekMessage doesn't pick those messages for
|
||||
@@ -139,6 +173,24 @@ static void initRawInput()
|
||||
#undef DEVICE_COUNT
|
||||
}
|
||||
|
||||
#ifndef DPI_ENUMS_DECLARED
|
||||
typedef enum PROCESS_DPI_AWARENESS {
|
||||
PROCESS_DPI_UNAWARE = 0,
|
||||
PROCESS_SYSTEM_DPI_AWARE = 1,
|
||||
PROCESS_PER_MONITOR_DPI_AWARE = 2
|
||||
} PROCESS_DPI_AWARENESS;
|
||||
|
||||
typedef enum MONITOR_DPI_TYPE {
|
||||
MDT_EFFECTIVE_DPI = 0,
|
||||
MDT_ANGULAR_DPI = 1,
|
||||
MDT_RAW_DPI = 2,
|
||||
MDT_DEFAULT = MDT_EFFECTIVE_DPI
|
||||
} MONITOR_DPI_TYPE;
|
||||
|
||||
# define USER_DEFAULT_SCREEN_DPI 96
|
||||
|
||||
# define DPI_ENUMS_DECLARED
|
||||
#endif
|
||||
typedef HRESULT(API *GHOST_WIN32_SetProcessDpiAwareness)(PROCESS_DPI_AWARENESS);
|
||||
typedef BOOL(API *GHOST_WIN32_EnableNonClientDpiScaling)(HWND);
|
||||
|
||||
@@ -153,7 +205,15 @@ GHOST_SystemWin32::GHOST_SystemWin32()
|
||||
|
||||
// Tell Windows we are per monitor DPI aware. This disables the default
|
||||
// blurry scaling and enables WM_DPICHANGED to allow us to draw at proper DPI.
|
||||
SetProcessDpiAwareness(PROCESS_PER_MONITOR_DPI_AWARE);
|
||||
HMODULE m_shcore = ::LoadLibrary("Shcore.dll");
|
||||
if (m_shcore) {
|
||||
GHOST_WIN32_SetProcessDpiAwareness fpSetProcessDpiAwareness =
|
||||
(GHOST_WIN32_SetProcessDpiAwareness)::GetProcAddress(m_shcore, "SetProcessDpiAwareness");
|
||||
|
||||
if (fpSetProcessDpiAwareness) {
|
||||
fpSetProcessDpiAwareness(PROCESS_PER_MONITOR_DPI_AWARE);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if current keyboard layout uses AltGr and save keylayout ID for
|
||||
// specialized handling if keys like VK_OEM_*. I.e. french keylayout
|
||||
@@ -521,7 +581,14 @@ GHOST_TSuccess GHOST_SystemWin32::init()
|
||||
InitCommonControls();
|
||||
|
||||
/* Disable scaling on high DPI displays on Vista */
|
||||
SetProcessDPIAware();
|
||||
HMODULE
|
||||
user32 = ::LoadLibraryA("user32.dll");
|
||||
typedef BOOL(WINAPI * LPFNSETPROCESSDPIAWARE)();
|
||||
LPFNSETPROCESSDPIAWARE SetProcessDPIAware = (LPFNSETPROCESSDPIAWARE)GetProcAddress(
|
||||
user32, "SetProcessDPIAware");
|
||||
if (SetProcessDPIAware)
|
||||
SetProcessDPIAware();
|
||||
FreeLibrary(user32);
|
||||
initRawInput();
|
||||
|
||||
m_lfstart = ::GetTickCount();
|
||||
@@ -1462,7 +1529,14 @@ LRESULT WINAPI GHOST_SystemWin32::s_wndProc(HWND hwnd, UINT msg, WPARAM wParam,
|
||||
* since DefWindowProc propagates it up the parent chain
|
||||
* until it finds a window that processes it.
|
||||
*/
|
||||
processWheelEvent(window, wParam, lParam);
|
||||
|
||||
/* Get the window under the mouse and send event to its queue. */
|
||||
POINT mouse_pos = {GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam)};
|
||||
HWND mouse_hwnd = ChildWindowFromPoint(HWND_DESKTOP, mouse_pos);
|
||||
GHOST_WindowWin32 *mouse_window = (GHOST_WindowWin32 *)::GetWindowLongPtr(mouse_hwnd,
|
||||
GWLP_USERDATA);
|
||||
|
||||
processWheelEvent(mouse_window ? mouse_window : window, wParam, lParam);
|
||||
eventHandled = true;
|
||||
#ifdef BROKEN_PEEK_TOUCHPAD
|
||||
PostMessage(hwnd, WM_USER, 0, 0);
|
||||
|
@@ -1,130 +0,0 @@
|
||||
/*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
/** \file
|
||||
* \ingroup GHOST
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#include <dbus/dbus.h>
|
||||
#include <string>
|
||||
|
||||
static DBusMessage *get_setting_sync(DBusConnection *const connection,
|
||||
const char *key,
|
||||
const char *value)
|
||||
{
|
||||
DBusError error;
|
||||
dbus_bool_t success;
|
||||
DBusMessage *message;
|
||||
DBusMessage *reply;
|
||||
|
||||
dbus_error_init(&error);
|
||||
|
||||
message = dbus_message_new_method_call("org.freedesktop.portal.Desktop",
|
||||
"/org/freedesktop/portal/desktop",
|
||||
"org.freedesktop.portal.Settings",
|
||||
"Read");
|
||||
|
||||
success = dbus_message_append_args(
|
||||
message, DBUS_TYPE_STRING, &key, DBUS_TYPE_STRING, &value, DBUS_TYPE_INVALID);
|
||||
|
||||
if (!success) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
reply = dbus_connection_send_with_reply_and_block(
|
||||
connection, message, DBUS_TIMEOUT_USE_DEFAULT, &error);
|
||||
|
||||
dbus_message_unref(message);
|
||||
|
||||
if (dbus_error_is_set(&error)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return reply;
|
||||
}
|
||||
|
||||
static bool parse_type(DBusMessage *const reply, const int type, void *value)
|
||||
{
|
||||
DBusMessageIter iter[3];
|
||||
|
||||
dbus_message_iter_init(reply, &iter[0]);
|
||||
if (dbus_message_iter_get_arg_type(&iter[0]) != DBUS_TYPE_VARIANT) {
|
||||
return false;
|
||||
}
|
||||
|
||||
dbus_message_iter_recurse(&iter[0], &iter[1]);
|
||||
if (dbus_message_iter_get_arg_type(&iter[1]) != DBUS_TYPE_VARIANT) {
|
||||
return false;
|
||||
}
|
||||
|
||||
dbus_message_iter_recurse(&iter[1], &iter[2]);
|
||||
if (dbus_message_iter_get_arg_type(&iter[2]) != type) {
|
||||
return false;
|
||||
}
|
||||
|
||||
dbus_message_iter_get_basic(&iter[2], value);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool get_cursor_settings(std::string &theme, int &size)
|
||||
{
|
||||
static const char name[] = "org.gnome.desktop.interface";
|
||||
static const char key_theme[] = "cursor-theme";
|
||||
static const char key_size[] = "cursor-size";
|
||||
|
||||
DBusError error;
|
||||
DBusConnection *connection;
|
||||
DBusMessage *reply;
|
||||
const char *value_theme = NULL;
|
||||
|
||||
dbus_error_init(&error);
|
||||
|
||||
connection = dbus_bus_get(DBUS_BUS_SESSION, &error);
|
||||
|
||||
if (dbus_error_is_set(&error)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
reply = get_setting_sync(connection, name, key_theme);
|
||||
if (!reply) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!parse_type(reply, DBUS_TYPE_STRING, &value_theme)) {
|
||||
dbus_message_unref(reply);
|
||||
return false;
|
||||
}
|
||||
|
||||
theme = std::string(value_theme);
|
||||
|
||||
dbus_message_unref(reply);
|
||||
|
||||
reply = get_setting_sync(connection, name, key_size);
|
||||
if (!reply) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!parse_type(reply, DBUS_TYPE_INT32, &size)) {
|
||||
dbus_message_unref(reply);
|
||||
return false;
|
||||
}
|
||||
|
||||
dbus_message_unref(reply);
|
||||
|
||||
return true;
|
||||
}
|
@@ -29,19 +29,11 @@
|
||||
|
||||
#include <wayland-egl.h>
|
||||
|
||||
static constexpr size_t base_dpi = 96;
|
||||
|
||||
struct window_t {
|
||||
GHOST_WindowWayland *w;
|
||||
wl_surface *surface;
|
||||
// outputs on which the window is currently shown on
|
||||
std::unordered_set<const output_t *> outputs;
|
||||
GHOST_TUns16 dpi = 0;
|
||||
int scale = 1;
|
||||
struct xdg_surface *xdg_surface;
|
||||
struct xdg_toplevel *xdg_toplevel;
|
||||
struct zxdg_toplevel_decoration_v1 *xdg_toplevel_decoration = nullptr;
|
||||
enum zxdg_toplevel_decoration_v1_mode decoration_mode;
|
||||
wl_egl_window *egl_window;
|
||||
int32_t pending_width, pending_height;
|
||||
bool is_maximised;
|
||||
@@ -101,30 +93,17 @@ static const xdg_toplevel_listener toplevel_listener = {
|
||||
toplevel_close,
|
||||
};
|
||||
|
||||
static void toplevel_decoration_configure(
|
||||
void *data,
|
||||
struct zxdg_toplevel_decoration_v1 * /*zxdg_toplevel_decoration_v1*/,
|
||||
uint32_t mode)
|
||||
{
|
||||
static_cast<window_t *>(data)->decoration_mode = zxdg_toplevel_decoration_v1_mode(mode);
|
||||
}
|
||||
|
||||
static const zxdg_toplevel_decoration_v1_listener toplevel_decoration_v1_listener = {
|
||||
toplevel_decoration_configure,
|
||||
};
|
||||
|
||||
static void surface_configure(void *data, xdg_surface *xdg_surface, uint32_t serial)
|
||||
{
|
||||
window_t *win = static_cast<window_t *>(data);
|
||||
|
||||
if (win->xdg_surface != xdg_surface) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (win->pending_width != 0 && win->pending_height != 0) {
|
||||
win->width = win->scale * win->pending_width;
|
||||
win->height = win->scale * win->pending_height;
|
||||
wl_egl_window_resize(win->egl_window, win->width, win->height, 0, 0);
|
||||
int w, h;
|
||||
wl_egl_window_get_attached_size(win->egl_window, &w, &h);
|
||||
if (win->pending_width != 0 && win->pending_height != 0 && win->pending_width != w &&
|
||||
win->pending_height != h) {
|
||||
win->width = win->pending_width;
|
||||
win->height = win->pending_height;
|
||||
wl_egl_window_resize(win->egl_window, win->pending_width, win->pending_height, 0, 0);
|
||||
win->pending_width = 0;
|
||||
win->pending_height = 0;
|
||||
win->w->notify_size();
|
||||
@@ -144,52 +123,6 @@ static const xdg_surface_listener surface_listener = {
|
||||
surface_configure,
|
||||
};
|
||||
|
||||
static bool update_scale(GHOST_WindowWayland *window)
|
||||
{
|
||||
int scale = 0;
|
||||
for (const output_t *output : window->outputs_active()) {
|
||||
if (output->scale > scale)
|
||||
scale = output->scale;
|
||||
}
|
||||
|
||||
if (scale > 0 && window->scale() != scale) {
|
||||
window->scale() = scale;
|
||||
// using the real DPI will cause wrong scaling of the UI
|
||||
// use a multiplier for the default DPI as workaround
|
||||
window->dpi() = scale * base_dpi;
|
||||
wl_surface_set_buffer_scale(window->surface(), scale);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static void surface_enter(void *data, struct wl_surface * /*wl_surface*/, struct wl_output *output)
|
||||
{
|
||||
GHOST_WindowWayland *w = static_cast<GHOST_WindowWayland *>(data);
|
||||
for (const output_t *reg_output : w->outputs()) {
|
||||
if (reg_output->output == output) {
|
||||
w->outputs_active().insert(reg_output);
|
||||
}
|
||||
}
|
||||
update_scale(w);
|
||||
}
|
||||
|
||||
static void surface_leave(void *data, struct wl_surface * /*wl_surface*/, struct wl_output *output)
|
||||
{
|
||||
GHOST_WindowWayland *w = static_cast<GHOST_WindowWayland *>(data);
|
||||
for (const output_t *reg_output : w->outputs()) {
|
||||
if (reg_output->output == output) {
|
||||
w->outputs_active().erase(reg_output);
|
||||
}
|
||||
}
|
||||
update_scale(w);
|
||||
}
|
||||
|
||||
struct wl_surface_listener wl_surface_listener = {
|
||||
surface_enter,
|
||||
surface_leave,
|
||||
};
|
||||
|
||||
/** \} */
|
||||
|
||||
/* -------------------------------------------------------------------- */
|
||||
@@ -228,28 +161,17 @@ GHOST_WindowWayland::GHOST_WindowWayland(GHOST_SystemWayland *system,
|
||||
|
||||
/* Window surfaces. */
|
||||
w->surface = wl_compositor_create_surface(m_system->compositor());
|
||||
wl_surface_add_listener(w->surface, &wl_surface_listener, this);
|
||||
|
||||
w->egl_window = wl_egl_window_create(w->surface, int(width), int(height));
|
||||
|
||||
w->xdg_surface = xdg_wm_base_get_xdg_surface(m_system->shell(), w->surface);
|
||||
w->xdg_toplevel = xdg_surface_get_toplevel(w->xdg_surface);
|
||||
|
||||
if (m_system->decoration_manager()) {
|
||||
w->xdg_toplevel_decoration = zxdg_decoration_manager_v1_get_toplevel_decoration(
|
||||
m_system->decoration_manager(), w->xdg_toplevel);
|
||||
zxdg_toplevel_decoration_v1_add_listener(
|
||||
w->xdg_toplevel_decoration, &toplevel_decoration_v1_listener, w);
|
||||
zxdg_toplevel_decoration_v1_set_mode(w->xdg_toplevel_decoration,
|
||||
ZXDG_TOPLEVEL_DECORATION_V1_MODE_SERVER_SIDE);
|
||||
}
|
||||
|
||||
wl_surface_set_user_data(w->surface, this);
|
||||
|
||||
xdg_surface_add_listener(w->xdg_surface, &surface_listener, w);
|
||||
xdg_toplevel_add_listener(w->xdg_toplevel, &toplevel_listener, w);
|
||||
|
||||
if (parentWindow && is_dialog) {
|
||||
if (parentWindow) {
|
||||
xdg_toplevel_set_parent(
|
||||
w->xdg_toplevel, dynamic_cast<const GHOST_WindowWayland *>(parentWindow)->w->xdg_toplevel);
|
||||
}
|
||||
@@ -270,9 +192,6 @@ GHOST_WindowWayland::GHOST_WindowWayland(GHOST_SystemWayland *system,
|
||||
if (setDrawingContextType(type) == GHOST_kFailure) {
|
||||
GHOST_PRINT("Failed to create EGL context" << std::endl);
|
||||
}
|
||||
|
||||
/* set swap interval to 0 to prevent blocking */
|
||||
setSwapInterval(0);
|
||||
}
|
||||
|
||||
GHOST_TSuccess GHOST_WindowWayland::close()
|
||||
@@ -307,31 +226,6 @@ GHOST_TSuccess GHOST_WindowWayland::notify_size()
|
||||
new GHOST_Event(m_system->getMilliSeconds(), GHOST_kEventWindowSize, this));
|
||||
}
|
||||
|
||||
wl_surface *GHOST_WindowWayland::surface() const
|
||||
{
|
||||
return w->surface;
|
||||
}
|
||||
|
||||
const std::vector<output_t *> &GHOST_WindowWayland::outputs() const
|
||||
{
|
||||
return m_system->outputs();
|
||||
}
|
||||
|
||||
std::unordered_set<const output_t *> &GHOST_WindowWayland::outputs_active()
|
||||
{
|
||||
return w->outputs;
|
||||
}
|
||||
|
||||
uint16_t &GHOST_WindowWayland::dpi()
|
||||
{
|
||||
return w->dpi;
|
||||
}
|
||||
|
||||
int &GHOST_WindowWayland::scale()
|
||||
{
|
||||
return w->scale;
|
||||
}
|
||||
|
||||
GHOST_TSuccess GHOST_WindowWayland::setWindowCursorGrab(GHOST_TGrabCursorMode mode)
|
||||
{
|
||||
return m_system->setCursorGrab(mode, w->surface);
|
||||
@@ -416,9 +310,6 @@ GHOST_WindowWayland::~GHOST_WindowWayland()
|
||||
releaseNativeHandles();
|
||||
|
||||
wl_egl_window_destroy(w->egl_window);
|
||||
if (w->xdg_toplevel_decoration) {
|
||||
zxdg_toplevel_decoration_v1_destroy(w->xdg_toplevel_decoration);
|
||||
}
|
||||
xdg_toplevel_destroy(w->xdg_toplevel);
|
||||
xdg_surface_destroy(w->xdg_surface);
|
||||
wl_surface_destroy(w->surface);
|
||||
@@ -426,11 +317,6 @@ GHOST_WindowWayland::~GHOST_WindowWayland()
|
||||
delete w;
|
||||
}
|
||||
|
||||
GHOST_TUns16 GHOST_WindowWayland::getDPIHint()
|
||||
{
|
||||
return w->dpi;
|
||||
}
|
||||
|
||||
GHOST_TSuccess GHOST_WindowWayland::setWindowCursorVisibility(bool visible)
|
||||
{
|
||||
return m_system->setCursorVisibility(visible);
|
||||
|
@@ -24,14 +24,9 @@
|
||||
|
||||
#include "GHOST_Window.h"
|
||||
|
||||
#include <unordered_set>
|
||||
#include <vector>
|
||||
|
||||
class GHOST_SystemWayland;
|
||||
|
||||
struct window_t;
|
||||
struct wl_surface;
|
||||
struct output_t;
|
||||
|
||||
class GHOST_WindowWayland : public GHOST_Window {
|
||||
public:
|
||||
@@ -52,8 +47,6 @@ class GHOST_WindowWayland : public GHOST_Window {
|
||||
|
||||
~GHOST_WindowWayland() override;
|
||||
|
||||
GHOST_TUns16 getDPIHint() override;
|
||||
|
||||
GHOST_TSuccess close();
|
||||
|
||||
GHOST_TSuccess activate();
|
||||
@@ -62,16 +55,6 @@ class GHOST_WindowWayland : public GHOST_Window {
|
||||
|
||||
GHOST_TSuccess notify_size();
|
||||
|
||||
wl_surface *surface() const;
|
||||
|
||||
const std::vector<output_t *> &outputs() const;
|
||||
|
||||
std::unordered_set<const output_t *> &outputs_active();
|
||||
|
||||
uint16_t &dpi();
|
||||
|
||||
int &scale();
|
||||
|
||||
protected:
|
||||
GHOST_TSuccess setWindowCursorGrab(GHOST_TGrabCursorMode mode) override;
|
||||
|
||||
|
@@ -84,6 +84,9 @@ GHOST_WindowWin32::GHOST_WindowWin32(GHOST_SystemWin32 *system,
|
||||
m_wantAlphaBackground(alphaBackground),
|
||||
m_normal_state(GHOST_kWindowStateNormal),
|
||||
m_user32(NULL),
|
||||
m_fpGetPointerInfoHistory(NULL),
|
||||
m_fpGetPointerPenInfoHistory(NULL),
|
||||
m_fpGetPointerTouchInfoHistory(NULL),
|
||||
m_parentWindowHwnd(parentwindow ? parentwindow->m_hWnd : HWND_DESKTOP),
|
||||
m_debug_context(is_debug)
|
||||
{
|
||||
@@ -150,7 +153,19 @@ GHOST_WindowWin32::GHOST_WindowWin32(GHOST_SystemWin32 *system,
|
||||
m_user32 = ::LoadLibrary("user32.dll");
|
||||
|
||||
if (m_hWnd) {
|
||||
RegisterTouchWindow(m_hWnd, 0);
|
||||
if (m_user32) {
|
||||
// Touch enabled screens with pen support by default have gestures
|
||||
// enabled, which results in a delay between the pointer down event
|
||||
// and the first move when using the stylus. RegisterTouchWindow
|
||||
// disables the new gesture architecture enabling the events to be
|
||||
// sent immediately to the application rather than being absorbed by
|
||||
// the gesture API.
|
||||
GHOST_WIN32_RegisterTouchWindow pRegisterTouchWindow = (GHOST_WIN32_RegisterTouchWindow)
|
||||
GetProcAddress(m_user32, "RegisterTouchWindow");
|
||||
if (pRegisterTouchWindow) {
|
||||
pRegisterTouchWindow(m_hWnd, 0);
|
||||
}
|
||||
}
|
||||
|
||||
// Register this window as a droptarget. Requires m_hWnd to be valid.
|
||||
// Note that OleInitialize(0) has to be called prior to this. Done in GHOST_SystemWin32.
|
||||
@@ -217,6 +232,16 @@ GHOST_WindowWin32::GHOST_WindowWin32(GHOST_SystemWin32 *system,
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize Windows Ink
|
||||
if (m_user32) {
|
||||
m_fpGetPointerInfoHistory = (GHOST_WIN32_GetPointerInfoHistory)::GetProcAddress(
|
||||
m_user32, "GetPointerInfoHistory");
|
||||
m_fpGetPointerPenInfoHistory = (GHOST_WIN32_GetPointerPenInfoHistory)::GetProcAddress(
|
||||
m_user32, "GetPointerPenInfoHistory");
|
||||
m_fpGetPointerTouchInfoHistory = (GHOST_WIN32_GetPointerTouchInfoHistory)::GetProcAddress(
|
||||
m_user32, "GetPointerTouchInfoHistory");
|
||||
}
|
||||
|
||||
// Initialize Wintab
|
||||
m_wintab.handle = ::LoadLibrary("Wintab32.dll");
|
||||
if (m_wintab.handle && m_system->getTabletAPI() != GHOST_kTabletNative) {
|
||||
@@ -301,6 +326,9 @@ GHOST_WindowWin32::~GHOST_WindowWin32()
|
||||
if (m_user32) {
|
||||
FreeLibrary(m_user32);
|
||||
m_user32 = NULL;
|
||||
m_fpGetPointerInfoHistory = NULL;
|
||||
m_fpGetPointerPenInfoHistory = NULL;
|
||||
m_fpGetPointerTouchInfoHistory = NULL;
|
||||
}
|
||||
|
||||
if (m_customCursor) {
|
||||
@@ -922,14 +950,15 @@ GHOST_TSuccess GHOST_WindowWin32::getPointerInfo(
|
||||
GHOST_SystemWin32 *system = (GHOST_SystemWin32 *)GHOST_System::getSystem();
|
||||
GHOST_TUns32 outCount;
|
||||
|
||||
if (!(GetPointerInfoHistory(pointerId, &outCount, NULL))) {
|
||||
if (!(m_fpGetPointerInfoHistory && m_fpGetPointerInfoHistory(pointerId, &outCount, NULL))) {
|
||||
return GHOST_kFailure;
|
||||
}
|
||||
|
||||
auto pointerPenInfo = std::vector<POINTER_PEN_INFO>(outCount);
|
||||
outPointerInfo.resize(outCount);
|
||||
|
||||
if (!(GetPointerPenInfoHistory(pointerId, &outCount, pointerPenInfo.data()))) {
|
||||
if (!(m_fpGetPointerPenInfoHistory &&
|
||||
m_fpGetPointerPenInfoHistory(pointerId, &outCount, pointerPenInfo.data()))) {
|
||||
return GHOST_kFailure;
|
||||
}
|
||||
|
||||
|
@@ -53,12 +53,177 @@ typedef BOOL(API *GHOST_WIN32_WTPacket)(HCTX, UINT, LPVOID);
|
||||
typedef BOOL(API *GHOST_WIN32_WTEnable)(HCTX, BOOL);
|
||||
typedef BOOL(API *GHOST_WIN32_WTOverlap)(HCTX, BOOL);
|
||||
|
||||
// typedef to user32 functions to disable gestures on windows
|
||||
typedef BOOL(API *GHOST_WIN32_RegisterTouchWindow)(HWND hwnd, ULONG ulFlags);
|
||||
|
||||
// typedefs for user32 functions to allow dynamic loading of Windows 10 DPI scaling functions
|
||||
typedef UINT(API *GHOST_WIN32_GetDpiForWindow)(HWND);
|
||||
#ifndef USER_DEFAULT_SCREEN_DPI
|
||||
# define USER_DEFAULT_SCREEN_DPI 96
|
||||
#endif // USER_DEFAULT_SCREEN_DPI
|
||||
|
||||
// typedefs for user32 functions to allow pointer functions
|
||||
enum tagPOINTER_INPUT_TYPE {
|
||||
PT_POINTER = 1, // Generic pointer
|
||||
PT_TOUCH = 2, // Touch
|
||||
PT_PEN = 3, // Pen
|
||||
PT_MOUSE = 4, // Mouse
|
||||
#if (WINVER >= 0x0603)
|
||||
PT_TOUCHPAD = 5, // Touchpad
|
||||
#endif /* WINVER >= 0x0603 */
|
||||
};
|
||||
|
||||
typedef enum tagPOINTER_BUTTON_CHANGE_TYPE {
|
||||
POINTER_CHANGE_NONE,
|
||||
POINTER_CHANGE_FIRSTBUTTON_DOWN,
|
||||
POINTER_CHANGE_FIRSTBUTTON_UP,
|
||||
POINTER_CHANGE_SECONDBUTTON_DOWN,
|
||||
POINTER_CHANGE_SECONDBUTTON_UP,
|
||||
POINTER_CHANGE_THIRDBUTTON_DOWN,
|
||||
POINTER_CHANGE_THIRDBUTTON_UP,
|
||||
POINTER_CHANGE_FOURTHBUTTON_DOWN,
|
||||
POINTER_CHANGE_FOURTHBUTTON_UP,
|
||||
POINTER_CHANGE_FIFTHBUTTON_DOWN,
|
||||
POINTER_CHANGE_FIFTHBUTTON_UP,
|
||||
} POINTER_BUTTON_CHANGE_TYPE;
|
||||
|
||||
typedef DWORD POINTER_INPUT_TYPE;
|
||||
typedef UINT32 POINTER_FLAGS;
|
||||
|
||||
#define POINTER_FLAG_NONE 0x00000000
|
||||
#define POINTER_FLAG_NEW 0x00000001
|
||||
#define POINTER_FLAG_INRANGE 0x00000002
|
||||
#define POINTER_FLAG_INCONTACT 0x00000004
|
||||
#define POINTER_FLAG_FIRSTBUTTON 0x00000010
|
||||
#define POINTER_FLAG_SECONDBUTTON 0x00000020
|
||||
#define POINTER_FLAG_THIRDBUTTON 0x00000040
|
||||
#define POINTER_FLAG_FOURTHBUTTON 0x00000080
|
||||
#define POINTER_FLAG_FIFTHBUTTON 0x00000100
|
||||
#define POINTER_FLAG_PRIMARY 0x00002000
|
||||
#define POINTER_FLAG_CONFIDENCE 0x000004000
|
||||
#define POINTER_FLAG_CANCELED 0x000008000
|
||||
#define POINTER_FLAG_DOWN 0x00010000
|
||||
#define POINTER_FLAG_UPDATE 0x00020000
|
||||
#define POINTER_FLAG_UP 0x00040000
|
||||
#define POINTER_FLAG_WHEEL 0x00080000
|
||||
#define POINTER_FLAG_HWHEEL 0x00100000
|
||||
#define POINTER_FLAG_CAPTURECHANGED 0x00200000
|
||||
#define POINTER_FLAG_HASTRANSFORM 0x00400000
|
||||
|
||||
typedef struct tagPOINTER_INFO {
|
||||
POINTER_INPUT_TYPE pointerType;
|
||||
UINT32 pointerId;
|
||||
UINT32 frameId;
|
||||
POINTER_FLAGS pointerFlags;
|
||||
HANDLE sourceDevice;
|
||||
HWND hwndTarget;
|
||||
POINT ptPixelLocation;
|
||||
POINT ptHimetricLocation;
|
||||
POINT ptPixelLocationRaw;
|
||||
POINT ptHimetricLocationRaw;
|
||||
DWORD dwTime;
|
||||
UINT32 historyCount;
|
||||
INT32 InputData;
|
||||
DWORD dwKeyStates;
|
||||
UINT64 PerformanceCount;
|
||||
POINTER_BUTTON_CHANGE_TYPE ButtonChangeType;
|
||||
} POINTER_INFO;
|
||||
|
||||
typedef UINT32 PEN_FLAGS;
|
||||
#define PEN_FLAG_NONE 0x00000000 // Default
|
||||
#define PEN_FLAG_BARREL 0x00000001 // The barrel button is pressed
|
||||
#define PEN_FLAG_INVERTED 0x00000002 // The pen is inverted
|
||||
#define PEN_FLAG_ERASER 0x00000004 // The eraser button is pressed
|
||||
|
||||
typedef UINT32 PEN_MASK;
|
||||
#define PEN_MASK_NONE 0x00000000 // Default - none of the optional fields are valid
|
||||
#define PEN_MASK_PRESSURE 0x00000001 // The pressure field is valid
|
||||
#define PEN_MASK_ROTATION 0x00000002 // The rotation field is valid
|
||||
#define PEN_MASK_TILT_X 0x00000004 // The tiltX field is valid
|
||||
#define PEN_MASK_TILT_Y 0x00000008 // The tiltY field is valid
|
||||
|
||||
typedef struct tagPOINTER_PEN_INFO {
|
||||
POINTER_INFO pointerInfo;
|
||||
PEN_FLAGS penFlags;
|
||||
PEN_MASK penMask;
|
||||
UINT32 pressure;
|
||||
UINT32 rotation;
|
||||
INT32 tiltX;
|
||||
INT32 tiltY;
|
||||
} POINTER_PEN_INFO;
|
||||
|
||||
/*
|
||||
* Flags that appear in pointer input message parameters
|
||||
*/
|
||||
#define POINTER_MESSAGE_FLAG_NEW 0x00000001 // New pointer
|
||||
#define POINTER_MESSAGE_FLAG_INRANGE 0x00000002 // Pointer has not departed
|
||||
#define POINTER_MESSAGE_FLAG_INCONTACT 0x00000004 // Pointer is in contact
|
||||
#define POINTER_MESSAGE_FLAG_FIRSTBUTTON 0x00000010 // Primary action
|
||||
#define POINTER_MESSAGE_FLAG_SECONDBUTTON 0x00000020 // Secondary action
|
||||
#define POINTER_MESSAGE_FLAG_THIRDBUTTON 0x00000040 // Third button
|
||||
#define POINTER_MESSAGE_FLAG_FOURTHBUTTON 0x00000080 // Fourth button
|
||||
#define POINTER_MESSAGE_FLAG_FIFTHBUTTON 0x00000100 // Fifth button
|
||||
#define POINTER_MESSAGE_FLAG_PRIMARY 0x00002000 // Pointer is primary
|
||||
#define POINTER_MESSAGE_FLAG_CONFIDENCE \
|
||||
0x00004000 // Pointer is considered unlikely to be accidental
|
||||
#define POINTER_MESSAGE_FLAG_CANCELED 0x00008000 // Pointer is departing in an abnormal manner
|
||||
|
||||
typedef UINT32 TOUCH_FLAGS;
|
||||
#define TOUCH_FLAG_NONE 0x00000000 // Default
|
||||
|
||||
typedef UINT32 TOUCH_MASK;
|
||||
#define TOUCH_MASK_NONE 0x00000000 // Default - none of the optional fields are valid
|
||||
#define TOUCH_MASK_CONTACTAREA 0x00000001 // The rcContact field is valid
|
||||
#define TOUCH_MASK_ORIENTATION 0x00000002 // The orientation field is valid
|
||||
#define TOUCH_MASK_PRESSURE 0x00000004 // The pressure field is valid
|
||||
|
||||
typedef struct tagPOINTER_TOUCH_INFO {
|
||||
POINTER_INFO pointerInfo;
|
||||
TOUCH_FLAGS touchFlags;
|
||||
TOUCH_MASK touchMask;
|
||||
RECT rcContact;
|
||||
RECT rcContactRaw;
|
||||
UINT32 orientation;
|
||||
UINT32 pressure;
|
||||
} POINTER_TOUCH_INFO;
|
||||
|
||||
/*
|
||||
* Macros to retrieve information from pointer input message parameters
|
||||
*/
|
||||
#define GET_POINTERID_WPARAM(wParam) (LOWORD(wParam))
|
||||
#define IS_POINTER_FLAG_SET_WPARAM(wParam, flag) (((DWORD)HIWORD(wParam) & (flag)) == (flag))
|
||||
#define IS_POINTER_NEW_WPARAM(wParam) IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_NEW)
|
||||
#define IS_POINTER_INRANGE_WPARAM(wParam) \
|
||||
IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_INRANGE)
|
||||
#define IS_POINTER_INCONTACT_WPARAM(wParam) \
|
||||
IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_INCONTACT)
|
||||
#define IS_POINTER_FIRSTBUTTON_WPARAM(wParam) \
|
||||
IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_FIRSTBUTTON)
|
||||
#define IS_POINTER_SECONDBUTTON_WPARAM(wParam) \
|
||||
IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_SECONDBUTTON)
|
||||
#define IS_POINTER_THIRDBUTTON_WPARAM(wParam) \
|
||||
IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_THIRDBUTTON)
|
||||
#define IS_POINTER_FOURTHBUTTON_WPARAM(wParam) \
|
||||
IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_FOURTHBUTTON)
|
||||
#define IS_POINTER_FIFTHBUTTON_WPARAM(wParam) \
|
||||
IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_FIFTHBUTTON)
|
||||
#define IS_POINTER_PRIMARY_WPARAM(wParam) \
|
||||
IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_PRIMARY)
|
||||
#define HAS_POINTER_CONFIDENCE_WPARAM(wParam) \
|
||||
IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_CONFIDENCE)
|
||||
#define IS_POINTER_CANCELED_WPARAM(wParam) \
|
||||
IS_POINTER_FLAG_SET_WPARAM(wParam, POINTER_MESSAGE_FLAG_CANCELED)
|
||||
|
||||
typedef BOOL(WINAPI *GHOST_WIN32_GetPointerInfoHistory)(UINT32 pointerId,
|
||||
UINT32 *entriesCount,
|
||||
POINTER_INFO *pointerInfo);
|
||||
typedef BOOL(WINAPI *GHOST_WIN32_GetPointerPenInfoHistory)(UINT32 pointerId,
|
||||
UINT32 *entriesCount,
|
||||
POINTER_PEN_INFO *penInfo);
|
||||
typedef BOOL(WINAPI *GHOST_WIN32_GetPointerTouchInfoHistory)(UINT32 pointerId,
|
||||
UINT32 *entriesCount,
|
||||
POINTER_TOUCH_INFO *touchInfo);
|
||||
|
||||
struct GHOST_PointerInfoWin32 {
|
||||
GHOST_TInt32 pointerId;
|
||||
GHOST_TInt32 isPrimary;
|
||||
@@ -411,6 +576,9 @@ class GHOST_WindowWin32 : public GHOST_Window {
|
||||
|
||||
/** `user32.dll` handle */
|
||||
HMODULE m_user32;
|
||||
GHOST_WIN32_GetPointerInfoHistory m_fpGetPointerInfoHistory;
|
||||
GHOST_WIN32_GetPointerPenInfoHistory m_fpGetPointerPenInfoHistory;
|
||||
GHOST_WIN32_GetPointerTouchInfoHistory m_fpGetPointerTouchInfoHistory;
|
||||
|
||||
HWND m_parentWindowHwnd;
|
||||
|
||||
|
@@ -77,7 +77,7 @@ class scoped_array {
|
||||
|
||||
void reset(T* new_array) {
|
||||
if (sizeof(T)) {
|
||||
delete[] array_;
|
||||
delete array_;
|
||||
}
|
||||
array_ = new_array;
|
||||
}
|
||||
|
@@ -112,7 +112,7 @@ void MeshTopology::getEdgeVertexIndices(int edge_index, int *v1, int *v2) const
|
||||
|
||||
if (edge_index >= edges_.size()) {
|
||||
*v1 = -1;
|
||||
*v2 = -1;
|
||||
*v1 = -1;
|
||||
return;
|
||||
}
|
||||
|
||||
|
@@ -31,13 +31,9 @@ typedef struct plConvexHull__ {
|
||||
plConvexHull plConvexHullCompute(float (*coords)[3], int count);
|
||||
void plConvexHullDelete(plConvexHull hull);
|
||||
int plConvexHullNumVertices(plConvexHull hull);
|
||||
int plConvexHullNumLoops(plConvexHull hull);
|
||||
int plConvexHullNumFaces(plConvexHull hull);
|
||||
void plConvexHullGetVertex(plConvexHull hull, int n, float coords[3], int *original_index);
|
||||
void plConvexHullGetLoop(plConvexHull hull, int n, int *v_from, int *v_to);
|
||||
int plConvexHullGetReversedLoopIndex(plConvexHull hull, int n);
|
||||
int plConvexHullGetFaceSize(plConvexHull hull, int n);
|
||||
void plConvexHullGetFaceLoops(plConvexHull hull, int n, int *loops);
|
||||
void plConvexHullGetFaceVertices(plConvexHull hull, int n, int *vertices);
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
@@ -39,12 +39,6 @@ int plConvexHullNumVertices(plConvexHull hull)
|
||||
return computer->vertices.size();
|
||||
}
|
||||
|
||||
int plConvexHullNumLoops(plConvexHull hull)
|
||||
{
|
||||
btConvexHullComputer *computer(reinterpret_cast<btConvexHullComputer *>(hull));
|
||||
return computer->edges.size();
|
||||
}
|
||||
|
||||
int plConvexHullNumFaces(plConvexHull hull)
|
||||
{
|
||||
btConvexHullComputer *computer(reinterpret_cast<btConvexHullComputer *>(hull));
|
||||
@@ -61,19 +55,6 @@ void plConvexHullGetVertex(plConvexHull hull, int n, float coords[3], int *origi
|
||||
(*original_index) = computer->original_vertex_index[n];
|
||||
}
|
||||
|
||||
void plConvexHullGetLoop(plConvexHull hull, int n, int *v_from, int *v_to)
|
||||
{
|
||||
btConvexHullComputer *computer(reinterpret_cast<btConvexHullComputer *>(hull));
|
||||
(*v_from) = computer->edges[n].getSourceVertex();
|
||||
(*v_to) = computer->edges[n].getTargetVertex();
|
||||
}
|
||||
|
||||
int plConvexHullGetReversedLoopIndex(plConvexHull hull, int n)
|
||||
{
|
||||
btConvexHullComputer *computer(reinterpret_cast<btConvexHullComputer *>(hull));
|
||||
return computer->edges[n].getReverseEdge() - &computer->edges[0];
|
||||
}
|
||||
|
||||
int plConvexHullGetFaceSize(plConvexHull hull, int n)
|
||||
{
|
||||
btConvexHullComputer *computer(reinterpret_cast<btConvexHullComputer *>(hull));
|
||||
@@ -88,19 +69,6 @@ int plConvexHullGetFaceSize(plConvexHull hull, int n)
|
||||
return count;
|
||||
}
|
||||
|
||||
void plConvexHullGetFaceLoops(plConvexHull hull, int n, int *loops)
|
||||
{
|
||||
btConvexHullComputer *computer(reinterpret_cast<btConvexHullComputer *>(hull));
|
||||
const btConvexHullComputer::Edge *e_orig, *e;
|
||||
int count;
|
||||
|
||||
for (e_orig = &computer->edges[computer->faces[n]], e = e_orig, count = 0;
|
||||
count == 0 || e != e_orig;
|
||||
e = e->getNextEdgeOfFace(), count++) {
|
||||
loops[count] = e - &computer->edges[0];
|
||||
}
|
||||
}
|
||||
|
||||
void plConvexHullGetFaceVertices(plConvexHull hull, int n, int *vertices)
|
||||
{
|
||||
btConvexHullComputer *computer(reinterpret_cast<btConvexHullComputer *>(hull));
|
||||
|
@@ -1,5 +0,0 @@
|
||||
Buildbot Configuration
|
||||
======================
|
||||
|
||||
Files used by Buildbot's `package-code-binaires` step for the darwin platform.
|
||||
|
55
release/darwin/README.txt
Normal file
55
release/darwin/README.txt
Normal file
@@ -0,0 +1,55 @@
|
||||
|
||||
macOS app bundling guide
|
||||
========================
|
||||
|
||||
Install Code Signing Certificate
|
||||
--------------------------------
|
||||
|
||||
* Go to https://developer.apple.com/account/resources/certificates/list
|
||||
* Download the Developer ID Application certificate.
|
||||
* Double click the file and add to key chain (default options).
|
||||
* Delete the file from the Downloads folder.
|
||||
|
||||
* You will also need to install a .p12 public/private key file for the
|
||||
certificate. This is only available for the owner of the Blender account,
|
||||
or can be exported and copied from another system that already has code
|
||||
signing set up.
|
||||
|
||||
Find the codesigning identity by running:
|
||||
|
||||
$ security find-identity -v -p codesigning
|
||||
|
||||
"Developer ID Application: Stichting Blender Foundation" is the identity needed.
|
||||
The long code at the start of the line is used as <identity> below.
|
||||
|
||||
Setup Apple ID
|
||||
--------------
|
||||
|
||||
* The Apple ID must have two step verification enabled.
|
||||
* Create an app specific password for the code signing app (label can be anything):
|
||||
https://support.apple.com/en-us/HT204397
|
||||
* Add the app specific password to keychain:
|
||||
|
||||
$ security add-generic-password -a <apple-id> -w <app-specific-password> -s altool-password
|
||||
|
||||
When running the bundle script, there will be a popup. To avoid that either:
|
||||
* Click Always Allow in the popup
|
||||
* In the Keychain Access app, change the Access Control settings on altool-password
|
||||
|
||||
Bundle
|
||||
------
|
||||
|
||||
Then the bundle is created as follows:
|
||||
|
||||
$ ./bundle.sh --source <sourcedir> --dmg <dmg> --bundle-id <bundleid> --username <apple-id> --password "@keychain:altool-password" --codesign <identity>
|
||||
|
||||
<sourcedir> directory where built Blender.app is
|
||||
<dmg> location and name of the final disk image
|
||||
<bundleid> id on notarization, for example org.blenderfoundation.blender.release
|
||||
<apple-id> your appleid email
|
||||
<identity> codesigning identity
|
||||
|
||||
When specifying only --sourcedir and --dmg, the build will not be signed.
|
||||
|
||||
Example :
|
||||
$ ./bundle.sh --source /data/build/bin --dmg /data/Blender-2.8-alpha-macOS-10.11.dmg --bundle-id org.blenderfoundation.blender.release --username "foo@mac.com" --password "@keychain:altool-password" --codesign AE825E26F12D08B692F360133210AF46F4CF7B97
|
18
release/darwin/blender.applescript
Normal file
18
release/darwin/blender.applescript
Normal file
@@ -0,0 +1,18 @@
|
||||
tell application "Finder"
|
||||
tell disk "Blender"
|
||||
open
|
||||
set current view of container window to icon view
|
||||
set toolbar visible of container window to false
|
||||
set statusbar visible of container window to false
|
||||
set the bounds of container window to {100, 100, 640, 472}
|
||||
set theViewOptions to icon view options of container window
|
||||
set arrangement of theViewOptions to not arranged
|
||||
set icon size of theViewOptions to 128
|
||||
set background picture of theViewOptions to file ".background:background.tif"
|
||||
set position of item " " of container window to {400, 190}
|
||||
set position of item "blender.app" of container window to {135, 190}
|
||||
update without registering applications
|
||||
delay 5
|
||||
close
|
||||
end tell
|
||||
end tell
|
212
release/darwin/bundle.sh
Executable file
212
release/darwin/bundle.sh
Executable file
@@ -0,0 +1,212 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Script to create a macOS dmg file for Blender builds, including code
|
||||
# signing and notarization for releases.
|
||||
|
||||
# Check that we have all needed tools.
|
||||
for i in osascript git codesign hdiutil xcrun ; do
|
||||
if [ ! -x "$(which ${i})" ]; then
|
||||
echo "Unable to execute command $i, macOS broken?"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# Defaults settings.
|
||||
_script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
_volume_name="Blender"
|
||||
_tmp_dir="$(mktemp -d)"
|
||||
_tmp_dmg="/tmp/blender-tmp.dmg"
|
||||
_background_image="${_script_dir}/background.tif"
|
||||
_mount_dir="/Volumes/${_volume_name}"
|
||||
_entitlements="${_script_dir}/entitlements.plist"
|
||||
|
||||
# Handle arguments.
|
||||
while [[ $# -gt 0 ]]; do
|
||||
key=$1
|
||||
case $key in
|
||||
-s|--source)
|
||||
SRC_DIR="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
-d|--dmg)
|
||||
DEST_DMG="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
-b|--bundle-id)
|
||||
N_BUNDLE_ID="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
-u|--username)
|
||||
N_USERNAME="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
-p|--password)
|
||||
N_PASSWORD="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
-c|--codesign)
|
||||
C_CERT="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
--background-image)
|
||||
_background_image="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
echo "Usage:"
|
||||
echo " $(basename "$0") --source DIR --dmg IMAGENAME "
|
||||
echo " optional arguments:"
|
||||
echo " --codesign <certname>"
|
||||
echo " --username <username>"
|
||||
echo " --password <password>"
|
||||
echo " --bundle-id <bundleid>"
|
||||
echo " Check https://developer.apple.com/documentation/security/notarizing_your_app_before_distribution/customizing_the_notarization_workflow "
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ ! -d "${SRC_DIR}/Blender.app" ]; then
|
||||
echo "use --source parameter to set source directory where Blender.app can be found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "${DEST_DMG}" ]; then
|
||||
echo "use --dmg parameter to set output dmg name"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Destroy destination dmg if there is any.
|
||||
test -f "${DEST_DMG}" && rm "${DEST_DMG}"
|
||||
if [ -d "${_mount_dir}" ]; then
|
||||
echo -n "Ejecting existing blender volume.."
|
||||
DEV_FILE=$(mount | grep "${_mount_dir}" | awk '{ print $1 }')
|
||||
diskutil eject "${DEV_FILE}" || exit 1
|
||||
echo
|
||||
fi
|
||||
|
||||
# Copy dmg contents.
|
||||
echo -n "Copying Blender.app..."
|
||||
cp -r "${SRC_DIR}/Blender.app" "${_tmp_dir}/" || exit 1
|
||||
echo
|
||||
|
||||
# Create the disk image.
|
||||
_directory_size=$(du -sh ${_tmp_dir} | awk -F'[^0-9]*' '$0=$1')
|
||||
_image_size=$(echo "${_directory_size}" + 400 | bc) # extra 400 need for codesign to work (why on earth?)
|
||||
|
||||
echo
|
||||
echo -n "Creating disk image of size ${_image_size}M.."
|
||||
test -f "${_tmp_dmg}" && rm "${_tmp_dmg}"
|
||||
hdiutil create -size "${_image_size}m" -fs HFS+ -srcfolder "${_tmp_dir}" -volname "${_volume_name}" -format UDRW "${_tmp_dmg}" -mode 755
|
||||
|
||||
echo "Mounting readwrite image..."
|
||||
hdiutil attach -readwrite -noverify -noautoopen "${_tmp_dmg}"
|
||||
|
||||
echo "Setting background picture.."
|
||||
if ! test -z "${_background_image}"; then
|
||||
echo "Copying background image ..."
|
||||
test -d "${_mount_dir}/.background" || mkdir "${_mount_dir}/.background"
|
||||
_background_image_NAME=$(basename "${_background_image}")
|
||||
cp "${_background_image}" "${_mount_dir}/.background/${_background_image_NAME}"
|
||||
fi
|
||||
|
||||
echo "Creating link to /Applications ..."
|
||||
ln -s /Applications "${_mount_dir}/Applications"
|
||||
echo "Renaming Applications to empty string."
|
||||
mv ${_mount_dir}/Applications "${_mount_dir}/ "
|
||||
|
||||
echo "Running applescript to set folder looks ..."
|
||||
cat "${_script_dir}/blender.applescript" | osascript
|
||||
|
||||
echo "Waiting after applescript ..."
|
||||
sleep 5
|
||||
|
||||
if [ ! -z "${C_CERT}" ]; then
|
||||
# Codesigning requires all libs and binaries to be signed separately.
|
||||
echo -n "Codesigning Python"
|
||||
for f in $(find "${_mount_dir}/Blender.app/Contents/Resources" -name "python*"); do
|
||||
if [ -x ${f} ] && [ ! -d ${f} ]; then
|
||||
codesign --remove-signature "${f}"
|
||||
codesign --timestamp --options runtime --entitlements="${_entitlements}" --sign "${C_CERT}" "${f}"
|
||||
fi
|
||||
done
|
||||
echo ; echo -n "Codesigning .dylib and .so libraries"
|
||||
for f in $(find "${_mount_dir}/Blender.app" -name "*.dylib" -o -name "*.so"); do
|
||||
codesign --remove-signature "${f}"
|
||||
codesign --timestamp --options runtime --entitlements="${_entitlements}" --sign "${C_CERT}" "${f}"
|
||||
done
|
||||
echo ; echo -n "Codesigning Blender.app"
|
||||
codesign --remove-signature "${_mount_dir}/Blender.app"
|
||||
codesign --timestamp --options runtime --entitlements="${_entitlements}" --sign "${C_CERT}" "${_mount_dir}/Blender.app"
|
||||
echo
|
||||
else
|
||||
echo "No codesigning cert given, skipping..."
|
||||
fi
|
||||
|
||||
# Need to eject dev files to remove /dev files and free .dmg for converting
|
||||
echo "Unmounting rw disk image ..."
|
||||
DEV_FILE=$(mount | grep "${_mount_dir}" | awk '{ print $1 }')
|
||||
diskutil eject "${DEV_FILE}"
|
||||
|
||||
sleep 3
|
||||
|
||||
echo "Compressing disk image ..."
|
||||
hdiutil convert "${_tmp_dmg}" -format UDZO -o "${DEST_DMG}"
|
||||
|
||||
# Codesign the dmg
|
||||
if [ ! -z "${C_CERT}" ]; then
|
||||
echo -n "Codesigning dmg..."
|
||||
codesign --timestamp --force --sign "${C_CERT}" "${DEST_DMG}"
|
||||
echo
|
||||
fi
|
||||
|
||||
# Cleanup
|
||||
rm -rf "${_tmp_dir}"
|
||||
rm "${_tmp_dmg}"
|
||||
|
||||
# Notarize
|
||||
if [ ! -z "${N_USERNAME}" ] && [ ! -z "${N_PASSWORD}" ] && [ ! -z "${N_BUNDLE_ID}" ]; then
|
||||
# Send to Apple
|
||||
echo "Sending ${DEST_DMG} for notarization..."
|
||||
_tmpout=$(mktemp)
|
||||
echo xcrun altool --notarize-app --verbose -f "${DEST_DMG}" --primary-bundle-id "${N_BUNDLE_ID}" --username "${N_USERNAME}" --password "${N_PASSWORD}"
|
||||
xcrun altool --notarize-app --verbose -f "${DEST_DMG}" --primary-bundle-id "${N_BUNDLE_ID}" --username "${N_USERNAME}" --password "${N_PASSWORD}" >${_tmpout} 2>&1
|
||||
|
||||
# Parse request uuid
|
||||
_requuid=$(cat "${_tmpout}" | grep "RequestUUID" | awk '{ print $3 }')
|
||||
echo "RequestUUID: ${_requuid}"
|
||||
if [ ! -z "${_requuid}" ]; then
|
||||
# Wait for Apple to confirm notarization is complete
|
||||
echo "Waiting for notarization to be complete.."
|
||||
for c in {20..0};do
|
||||
sleep 600
|
||||
xcrun altool --notarization-info "${_requuid}" --username "${N_USERNAME}" --password "${N_PASSWORD}" >${_tmpout} 2>&1
|
||||
_status=$(cat "${_tmpout}" | grep "Status:" | awk '{ print $2 }')
|
||||
if [ "${_status}" == "invalid" ]; then
|
||||
echo "Got invalid notarization!"
|
||||
break;
|
||||
fi
|
||||
|
||||
if [ "${_status}" == "success" ]; then
|
||||
echo -n "Notarization successful! Stapling..."
|
||||
xcrun stapler staple -v "${DEST_DMG}"
|
||||
break;
|
||||
fi
|
||||
echo "Notarization in progress, waiting..."
|
||||
done
|
||||
else
|
||||
cat ${_tmpout}
|
||||
echo "Error getting RequestUUID, notarization unsuccessful"
|
||||
fi
|
||||
else
|
||||
echo "No notarization credentials supplied, skipping..."
|
||||
fi
|
||||
|
||||
echo "..done. You should have ${DEST_DMG} ready to upload"
|
Submodule release/datafiles/locale updated: 4833954c0a...5ab29b1331
@@ -40,25 +40,6 @@
|
||||
</screenshot>
|
||||
</screenshots>
|
||||
<releases>
|
||||
<release version="2.93" date="2021-06-02">
|
||||
<description>
|
||||
<p>New features:</p>
|
||||
<ul>
|
||||
<li>Mesh primitive nodes</li>
|
||||
<li>Line Art</li>
|
||||
<li>EEVEE Realistic depth of field and volumetrics</li>
|
||||
<li>Spreadsheet editor</li>
|
||||
</ul>
|
||||
<p>Enhancements:</p>
|
||||
<ul>
|
||||
<li>Geometry nodes 22 new nodes and imrpoved attribute search</li>
|
||||
<li>Mask loops, textures and patterns for sculpting</li>
|
||||
<li>Grease pencil interpolate refactored and SVG and PDF support</li>
|
||||
<li>Persistent Data rendering settings for Cycles</li>
|
||||
<li>Video Sequencer Editor auto-proxy system</li>
|
||||
</ul>
|
||||
</description>
|
||||
</release>
|
||||
<release version="2.92" date="2021-02-25">
|
||||
<description>
|
||||
<p>New features:</p>
|
||||
|
@@ -1,17 +0,0 @@
|
||||
Snap Configuration
|
||||
===================
|
||||
|
||||
Files used by Buildbot's `package-code-store-snap` and `deliver-code-store-snap` steps.
|
||||
|
||||
Build pipeline snap tracks and channels
|
||||
|
||||
```
|
||||
<track>/stable
|
||||
- Latest stable release for the specified track
|
||||
<track>/candidate
|
||||
- Test builds for the upcoming stable release - *not used for now*
|
||||
<track>/beta
|
||||
- Nightly automated builds provided by a release branch
|
||||
<track>/egde/<branch>
|
||||
- Nightly or on demand builds - will also make use of branch
|
||||
```
|
38
release/freedesktop/snap/README.txt
Normal file
38
release/freedesktop/snap/README.txt
Normal file
@@ -0,0 +1,38 @@
|
||||
|
||||
Snap Package Instructions
|
||||
=========================
|
||||
|
||||
This folder contains the scripts for creating and uploading the snap on:
|
||||
https://snapcraft.io/blender
|
||||
|
||||
|
||||
Setup
|
||||
-----
|
||||
|
||||
This has only been tested to work on Ubuntu.
|
||||
|
||||
# Install required packages
|
||||
sudo apt install snapd snapcraft
|
||||
|
||||
|
||||
Steps
|
||||
-----
|
||||
|
||||
# Build the snap file
|
||||
python3 bundle.py --version 2.XX --url https://download.blender.org/release/Blender2.XX/blender-2.XX-x86_64.tar.bz2
|
||||
|
||||
# Install snap to test
|
||||
# --dangerous is needed since the snap has not been signed yet
|
||||
# --classic is required for installing Blender in general
|
||||
sudo snap install --dangerous --classic blender_2.XX_amd64.snap
|
||||
|
||||
# Upload
|
||||
snapcraft push --release=stable blender_2.XX_amd64.snap
|
||||
|
||||
|
||||
Release Values
|
||||
--------------
|
||||
|
||||
stable: final release
|
||||
candidate: release candidates
|
||||
|
21
release/freedesktop/snap/bundle.py
Executable file
21
release/freedesktop/snap/bundle.py
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--version", required=True)
|
||||
parser.add_argument("--url", required=True)
|
||||
parser.add_argument("--grade", default="stable", choices=["stable", "devel"])
|
||||
args = parser.parse_args()
|
||||
|
||||
yaml_text = pathlib.Path("snapcraft.yaml.in").read_text()
|
||||
yaml_text = yaml_text.replace("@VERSION@", args.version)
|
||||
yaml_text = yaml_text.replace("@URL@", args.url)
|
||||
yaml_text = yaml_text.replace("@GRADE@", args.grade)
|
||||
pathlib.Path("snapcraft.yaml").write_text(yaml_text)
|
||||
|
||||
subprocess.call(["snapcraft", "clean"])
|
||||
subprocess.call(["snapcraft", "snap"])
|
@@ -10,7 +10,12 @@ description: |
|
||||
scientists, students, VFX experts, animators, game artists, modders, and
|
||||
the list goes on.
|
||||
|
||||
icon: @ICON_PATH@
|
||||
The standard snap channels are used in the following way:
|
||||
|
||||
stable - Latest stable release.
|
||||
candidate - Test builds for the upcoming stable release.
|
||||
|
||||
icon: ../icons/scalable/apps/blender.svg
|
||||
|
||||
passthrough:
|
||||
license: GPL-3.0
|
||||
@@ -22,14 +27,13 @@ apps:
|
||||
command: ./blender-wrapper
|
||||
desktop: ./blender.desktop
|
||||
|
||||
base: core18
|
||||
version: '@VERSION@'
|
||||
grade: @GRADE@
|
||||
|
||||
parts:
|
||||
blender:
|
||||
plugin: dump
|
||||
source: @PACKAGE_PATH@
|
||||
source: @URL@
|
||||
build-attributes: [keep-execstack, no-patchelf]
|
||||
override-build: |
|
||||
snapcraftctl build
|
||||
@@ -43,7 +47,7 @@ parts:
|
||||
- libxrender1
|
||||
- libxxf86vm1
|
||||
wrapper:
|
||||
plugin: dump
|
||||
plugin: copy
|
||||
source: .
|
||||
stage:
|
||||
- ./blender-wrapper
|
||||
files:
|
||||
blender-wrapper: blender-wrapper
|
Submodule release/scripts/addons updated: f86f25e622...4fcdbfe7c2
Submodule release/scripts/addons_contrib updated: 5a82baad9f...7d78c8a63f
@@ -49,16 +49,16 @@ def _initialize():
|
||||
|
||||
def paths():
|
||||
# RELEASE SCRIPTS: official scripts distributed in Blender releases
|
||||
addon_paths = _bpy.utils.script_paths(subdir="addons")
|
||||
addon_paths = _bpy.utils.script_paths("addons")
|
||||
|
||||
# CONTRIB SCRIPTS: good for testing but not official scripts yet
|
||||
# if folder addons_contrib/ exists, scripts in there will be loaded too
|
||||
addon_paths += _bpy.utils.script_paths(subdir="addons_contrib")
|
||||
addon_paths += _bpy.utils.script_paths("addons_contrib")
|
||||
|
||||
return addon_paths
|
||||
|
||||
|
||||
def modules_refresh(*, module_cache=addons_fake_modules):
|
||||
def modules_refresh(module_cache=addons_fake_modules):
|
||||
global error_encoding
|
||||
import os
|
||||
|
||||
@@ -203,9 +203,9 @@ def modules_refresh(*, module_cache=addons_fake_modules):
|
||||
del modules_stale
|
||||
|
||||
|
||||
def modules(*, module_cache=addons_fake_modules, refresh=True):
|
||||
def modules(module_cache=addons_fake_modules, *, refresh=True):
|
||||
if refresh or ((module_cache is addons_fake_modules) and modules._is_first):
|
||||
modules_refresh(module_cache=module_cache)
|
||||
modules_refresh(module_cache)
|
||||
modules._is_first = False
|
||||
|
||||
mod_list = list(module_cache.values())
|
||||
@@ -512,7 +512,7 @@ def _blender_manual_url_prefix():
|
||||
return "https://docs.blender.org/manual/en/" + manual_version
|
||||
|
||||
|
||||
def module_bl_info(mod, *, info_basis=None):
|
||||
def module_bl_info(mod, info_basis=None):
|
||||
if info_basis is None:
|
||||
info_basis = {
|
||||
"name": "",
|
||||
|
@@ -134,7 +134,7 @@ def _disable(template_id, *, handle_error=None):
|
||||
print("\tapp_template_utils.disable", template_id)
|
||||
|
||||
|
||||
def import_from_path(path, *, ignore_not_found=False):
|
||||
def import_from_path(path, ignore_not_found=False):
|
||||
import os
|
||||
from importlib import import_module
|
||||
base_module, template_id = path.rsplit(os.sep, 2)[-2:]
|
||||
@@ -148,9 +148,9 @@ def import_from_path(path, *, ignore_not_found=False):
|
||||
raise ex
|
||||
|
||||
|
||||
def import_from_id(template_id, *, ignore_not_found=False):
|
||||
def import_from_id(template_id, ignore_not_found=False):
|
||||
import os
|
||||
path = next(iter(_bpy.utils.app_template_paths(path=template_id)), None)
|
||||
path = next(iter(_bpy.utils.app_template_paths(template_id)), None)
|
||||
if path is None:
|
||||
if ignore_not_found:
|
||||
return None
|
||||
@@ -163,7 +163,7 @@ def import_from_id(template_id, *, ignore_not_found=False):
|
||||
return import_from_path(path, ignore_not_found=ignore_not_found)
|
||||
|
||||
|
||||
def activate(*, template_id=None):
|
||||
def activate(template_id=None):
|
||||
template_id_prev = _app_template["id"]
|
||||
|
||||
# not needed but may as well avoids redundant
|
||||
@@ -190,4 +190,4 @@ def reset(*, reload_scripts=False):
|
||||
|
||||
# TODO reload_scripts
|
||||
|
||||
activate(template_id=template_id)
|
||||
activate(template_id)
|
||||
|
@@ -26,7 +26,7 @@ __all__ = (
|
||||
)
|
||||
|
||||
|
||||
def generate(context, space_type, *, use_fallback_keys=True, use_reset=True):
|
||||
def generate(context, space_type, use_fallback_keys=True, use_reset=True):
|
||||
"""
|
||||
Keymap for popup toolbar, currently generated each time.
|
||||
"""
|
||||
|
@@ -17,7 +17,7 @@
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
def keyconfig_data_oskey_from_ctrl(keyconfig_data_src, *, filter_fn=None):
|
||||
def keyconfig_data_oskey_from_ctrl(keyconfig_data_src, filter_fn=None):
|
||||
keyconfig_data_dst = []
|
||||
for km_name, km_parms, km_items_data_src in keyconfig_data_src:
|
||||
km_items_data_dst = km_items_data_src.copy()
|
||||
@@ -61,4 +61,4 @@ def keyconfig_data_oskey_from_ctrl_for_macos(keyconfig_data_src):
|
||||
return False
|
||||
return True
|
||||
|
||||
return keyconfig_data_oskey_from_ctrl(keyconfig_data_src, filter_fn=filter_fn)
|
||||
return keyconfig_data_oskey_from_ctrl(keyconfig_data_src, filter_fn)
|
||||
|
@@ -19,7 +19,7 @@
|
||||
# <pep8-80 compliant>
|
||||
|
||||
|
||||
def url_prefill_from_blender(*, addon_info=None):
|
||||
def url_prefill_from_blender(addon_info=None):
|
||||
import bpy
|
||||
import gpu
|
||||
import struct
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user