Compare commits
6 Commits
temp-icons
...
temp-exper
Author | SHA1 | Date | |
---|---|---|---|
b3d34a6690 | |||
3c112c5e46 | |||
28f99dda6c | |||
98262bb8cf | |||
3e251da3aa | |||
a37164e11b |
@@ -39,8 +39,9 @@ Checks: >
|
||||
-modernize-use-nodiscard,
|
||||
-modernize-loop-convert,
|
||||
-modernize-pass-by-value,
|
||||
-modernize-use-default-member-init,
|
||||
-modernize-raw-string-literal,
|
||||
-modernize-avoid-bind,
|
||||
-modernize-use-transparent-functors,
|
||||
|
||||
WarningsAsErrors: '*'
|
||||
CheckOptions:
|
||||
- key: modernize-use-default-member-init.UseAssignment
|
||||
value: 1
|
||||
|
@@ -10,9 +10,8 @@
|
||||
# Changes that belong here:
|
||||
# - Massive comment, doxy-sections, or spelling corrections.
|
||||
# - Clang-format, PEP8 or other automated changes which are *strictly* "no functional change".
|
||||
# - Several commits should be added to this list at once, because adding
|
||||
# one extra commit (to edit this file) after every cleanup is noisy.
|
||||
# - No clang-tidy changes.
|
||||
# - Several smaller commits should be added to this list at once, because adding
|
||||
# one extra commit (to edit this file) after every small cleanup is noisy.
|
||||
#
|
||||
# Note:
|
||||
# - The comment above the SHA should be the first line of the commit.
|
||||
@@ -93,12 +92,78 @@ c42a6b77b52560d257279de2cb624b4ef2c0d24c
|
||||
# Cleanup: use doxy sections for imbuf
|
||||
c207f7c22e1439e0b285fba5d2c072bdae23f981
|
||||
|
||||
# Cleanup: Clang-Tidy, modernize-use-bool-literals
|
||||
af35ada2f3fa8da4d46b3a71de724d353d716820
|
||||
|
||||
# Cleanup: Use nullptr everywhere in fluid code
|
||||
311031ecd03dbfbf43e1df672a395f24b2e7d4d3
|
||||
|
||||
# Cleanup: Clang-Tidy, modernize-redundant-void-arg
|
||||
a331d5c99299c4514ca33c843b1c79b872f2728d
|
||||
|
||||
# Cleanup: Clang-Tidy modernize-use-nullptr
|
||||
16732def37c5a66f3ea28dbe247b09cc6bca6677
|
||||
|
||||
# Cleanup: Clang-tidy, modernize-concat-nested-namespaces
|
||||
4525049aa0cf818f6483dce589ac9791eb562338
|
||||
|
||||
# Cleanup: Clang-tidy else-after-return
|
||||
ae342ed4511cf2e144dcd27ce2c635d3d536f9ad
|
||||
|
||||
# Cleanup: Clang-Tidy, readability-redundant-member-init
|
||||
190170d4cc92ff34abe1744a10474ac4f1074086
|
||||
|
||||
# Cleanup: use 'filepath' instead of 'name' for ImBuf utilities
|
||||
99f56b4c16323f96c0cbf54e392fb509fcac5bda
|
||||
|
||||
# Cleanup: clang-format
|
||||
c4d8f6a4a8ddc29ed27311ed7578b3c8c31399d2
|
||||
b5d310b569e07a937798a2d38539cfd290149f1c
|
||||
8c846cccd6bdfd3e90a695fabbf05f53e5466a57
|
||||
4eac03d821fa17546f562485f7d073813a5e5943
|
||||
1166110a9d66af9c5a47cee2be591f50fdc445e8
|
||||
|
||||
# Cleanup: clang-format.
|
||||
40d4a4cb1a6b4c3c2a486e8f2868f547530e0811
|
||||
4eac03d821fa17546f562485f7d073813a5e5943
|
||||
|
||||
# Cleanup: use preprocessor version check for PyTypeObject declaration
|
||||
cd9acfed4f7674b84be965d469a367aef96f8af3
|
||||
|
||||
# Cycles: fix compilation of OSL shaders following API change
|
||||
b980cd163a9d5d77eeffc2e353333e739fa9e719
|
||||
|
||||
# Cleanup: clang-tidy suppress warnings for PyTypeObject.tp_print
|
||||
efd71aad4f22ec0073d80b8dd296015d3f395aa8
|
||||
|
||||
# Cleanup: fix wrong merge, remove extra unique_ptr.
|
||||
6507449e54a167c63a72229e4d0119dd2af68ae5
|
||||
|
||||
# Cleanup: fix some clang tidy issues
|
||||
525a042c5c7513c41240b118acca002f6c60cc12
|
||||
|
||||
# Fix T82520: error building freestyle with Python3.8
|
||||
e118426e4695a97d67e65d69677f3c4e2db50a56
|
||||
|
||||
# Cleanup: Clang-tidy, readability-else-after-return
|
||||
7be47dadea5066ae095c644e0b4f1f10d75f5ab3
|
||||
|
||||
# Cleanup: Add `r_` to return parameter
|
||||
45dca05b1cd2a5ead59144c93d790fdfe7c35ee6
|
||||
|
||||
# Cleanup: Typo in `print_default_info` function name.
|
||||
41a73909dec716642f044e60b40a28335c9fdb10
|
||||
|
||||
# Cleanup: Reduce indentation
|
||||
1cc3a0e2cf73a5ff4f9e0a7f5338eda77266b300
|
||||
|
||||
# Build-system: Force C linkage for all DNA type headers
|
||||
ad4b7741dba45a2be210942c18af6b6e4438f129
|
||||
|
||||
# Cleanup: Move function to proper section
|
||||
c126e27cdc8b28365a9d5f9fafc4d521d1eb83df
|
||||
|
||||
# Cleanup: remove break after return statements
|
||||
bbdfeb751e16d939482d2e4b95c4d470f53f18a5
|
||||
|
||||
# Cleanup: clang-tidy
|
||||
af013ff76feef7e8b8ba642279c62a5dc275d59f
|
||||
|
||||
# Cleanup: Make panel type flag names more clear
|
||||
9d28353b525ecfbcca1501be72e4276dfb2bbc2a
|
||||
|
113
CMakeLists.txt
113
CMakeLists.txt
@@ -63,9 +63,6 @@ list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/build_files/cmake/platform")
|
||||
# avoid having empty buildtype
|
||||
if(NOT DEFINED CMAKE_BUILD_TYPE_INIT)
|
||||
set(CMAKE_BUILD_TYPE_INIT "Release")
|
||||
# Internal logic caches this variable, avoid showing it by default
|
||||
# since it's easy to accidentally set instead of the build type.
|
||||
mark_as_advanced(CMAKE_BUILD_TYPE_INIT)
|
||||
endif()
|
||||
|
||||
# Omit superfluous "Up-to-date" messages.
|
||||
@@ -167,6 +164,10 @@ if(APPLE)
|
||||
endif()
|
||||
|
||||
option(WITH_BUILDINFO "Include extra build details (only disable for development & faster builds)" ON)
|
||||
if(${CMAKE_VERSION} VERSION_LESS 2.8.8)
|
||||
# add_library OBJECT arg unsupported
|
||||
set(WITH_BUILDINFO OFF)
|
||||
endif()
|
||||
set(BUILDINFO_OVERRIDE_DATE "" CACHE STRING "Use instead of the current date for reproducible builds (empty string disables this option)")
|
||||
set(BUILDINFO_OVERRIDE_TIME "" CACHE STRING "Use instead of the current time for reproducible builds (empty string disables this option)")
|
||||
set(CPACK_OVERRIDE_PACKAGENAME "" CACHE STRING "Use instead of the standard packagename (empty string disables this option)")
|
||||
@@ -204,7 +205,6 @@ option(WITH_OPENVDB_BLOSC "Enable blosc compression for OpenVDB, only enable if
|
||||
option(WITH_OPENVDB_3_ABI_COMPATIBLE "Assume OpenVDB library has been compiled with version 3 ABI compatibility" OFF)
|
||||
mark_as_advanced(WITH_OPENVDB_3_ABI_COMPATIBLE)
|
||||
option(WITH_NANOVDB "Enable usage of NanoVDB data structure for rendering on the GPU" ON)
|
||||
option(WITH_HARU "Enable features relying on Libharu (Grease pencil PDF export)" ON)
|
||||
|
||||
# GHOST Windowing Library Options
|
||||
option(WITH_GHOST_DEBUG "Enable debugging output for the GHOST library" OFF)
|
||||
@@ -370,8 +370,8 @@ if(WITH_PYTHON_INSTALL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
option(WITH_CPU_SIMD "Enable SIMD instruction if they're detected on the host machine" ON)
|
||||
mark_as_advanced(WITH_CPU_SIMD)
|
||||
option(WITH_CPU_SSE "Enable SIMD instruction if they're detected on the host machine" ON)
|
||||
mark_as_advanced(WITH_CPU_SSE)
|
||||
|
||||
# Cycles
|
||||
option(WITH_CYCLES "Enable Cycles Render Engine" ON)
|
||||
@@ -613,7 +613,6 @@ endif()
|
||||
if(UNIX)
|
||||
# See WITH_WINDOWS_SCCACHE for Windows.
|
||||
option(WITH_COMPILER_CCACHE "Use ccache to improve rebuild times (Works with Ninja, Makefiles and Xcode)" OFF)
|
||||
mark_as_advanced(WITH_COMPILER_CCACHE)
|
||||
endif()
|
||||
|
||||
# The following only works with the Ninja generator in CMake >= 3.0.
|
||||
@@ -728,12 +727,8 @@ set_and_warn_dependency(WITH_TBB WITH_MOD_FLUID OFF)
|
||||
# NanoVDB requires OpenVDB to convert the data structure
|
||||
set_and_warn_dependency(WITH_OPENVDB WITH_NANOVDB OFF)
|
||||
|
||||
# OpenVDB and OpenColorIO uses 'half' type from OpenEXR
|
||||
# OpenVDB uses 'half' type from OpenEXR & fails to link without OpenEXR enabled.
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENVDB OFF)
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENCOLORIO OFF)
|
||||
|
||||
# Haru needs `TIFFFaxBlackCodes` & `TIFFFaxWhiteCodes` symbols from TIFF.
|
||||
set_and_warn_dependency(WITH_IMAGE_TIFF WITH_HARU OFF)
|
||||
|
||||
# auto enable openimageio for cycles
|
||||
if(WITH_CYCLES)
|
||||
@@ -775,6 +770,14 @@ if(WITH_GHOST_SDL OR WITH_HEADLESS)
|
||||
set(WITH_XR_OPENXR OFF)
|
||||
endif()
|
||||
|
||||
if(WITH_CPU_SSE)
|
||||
TEST_SSE_SUPPORT(COMPILER_SSE_FLAG COMPILER_SSE2_FLAG)
|
||||
else()
|
||||
message(STATUS "SSE and SSE2 optimizations are DISABLED!")
|
||||
set(COMPILER_SSE_FLAG)
|
||||
set(COMPILER_SSE2_FLAG)
|
||||
endif()
|
||||
|
||||
if(WITH_BUILDINFO)
|
||||
find_package(Git)
|
||||
if(NOT GIT_FOUND)
|
||||
@@ -823,8 +826,8 @@ if(WITH_PYTHON)
|
||||
# Do this before main 'platform_*' checks,
|
||||
# because UNIX will search for the old Python paths which may not exist.
|
||||
# giving errors about missing paths before this case is met.
|
||||
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.9")
|
||||
message(FATAL_ERROR "At least Python 3.9 is required to build")
|
||||
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.7")
|
||||
message(FATAL_ERROR "At least Python 3.7 is required to build")
|
||||
endif()
|
||||
|
||||
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/scripts/addons")
|
||||
@@ -954,55 +957,22 @@ if(WITH_INTERNATIONAL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# See TEST_SSE_SUPPORT() and TEST_NEON_SUPPORT() for how these are defined.
|
||||
#
|
||||
# This is done globally, so that all modules can use it if available, and
|
||||
# because these are used in headers used by many modules.
|
||||
if(WITH_CPU_SIMD)
|
||||
set(COMPILER_SSE_FLAG)
|
||||
set(COMPILER_SSE2_FLAG)
|
||||
# See TEST_SSE_SUPPORT() for how this is defined.
|
||||
|
||||
# Test Neon first since macOS Arm can compile and run x86-64 SSE binaries.
|
||||
TEST_NEON_SUPPORT()
|
||||
if(SUPPORT_NEON_BUILD)
|
||||
# Neon
|
||||
if(SSE2NEON_FOUND)
|
||||
blender_include_dirs_sys("${SSE2NEON_INCLUDE_DIRS}")
|
||||
add_definitions(-DWITH_SSE2NEON)
|
||||
endif()
|
||||
else()
|
||||
# SSE
|
||||
TEST_SSE_SUPPORT(COMPILER_SSE_FLAG COMPILER_SSE2_FLAG)
|
||||
if(SUPPORT_SSE_BUILD)
|
||||
string(PREPEND PLATFORM_CFLAGS "${COMPILER_SSE_FLAG} ")
|
||||
add_definitions(-D__SSE__ -D__MMX__)
|
||||
endif()
|
||||
if(SUPPORT_SSE2_BUILD)
|
||||
string(APPEND PLATFORM_CFLAGS " ${COMPILER_SSE2_FLAG}")
|
||||
add_definitions(-D__SSE2__)
|
||||
if(NOT SUPPORT_SSE_BUILD) # don't double up
|
||||
add_definitions(-D__MMX__)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Print instructions used
|
||||
if(SUPPORT_NEON_BUILD)
|
||||
if(SSE2NEON_FOUND)
|
||||
message(STATUS "Neon SIMD instructions enabled")
|
||||
else()
|
||||
message(STATUS "Neon SIMD instructions detected but unused, requires sse2neon")
|
||||
endif()
|
||||
elseif(SUPPORT_SSE2_BUILD)
|
||||
message(STATUS "SSE2 SIMD instructions enabled")
|
||||
elseif(SUPPORT_SSE_BUILD)
|
||||
message(STATUS "SSE SIMD instructions enabled")
|
||||
else()
|
||||
message(STATUS "No SIMD instructions detected")
|
||||
endif()
|
||||
else()
|
||||
message(STATUS "SIMD instructions disabled")
|
||||
# Do it globally, SSE2 is required for quite some time now.
|
||||
# Doing it now allows to use SSE/SSE2 in inline headers.
|
||||
if(SUPPORT_SSE_BUILD)
|
||||
string(PREPEND PLATFORM_CFLAGS "${COMPILER_SSE_FLAG} ")
|
||||
add_definitions(-D__SSE__ -D__MMX__)
|
||||
endif()
|
||||
if(SUPPORT_SSE2_BUILD)
|
||||
string(APPEND PLATFORM_CFLAGS " ${COMPILER_SSE2_FLAG}")
|
||||
add_definitions(-D__SSE2__)
|
||||
if(NOT SUPPORT_SSE_BUILD) # don't double up
|
||||
add_definitions(-D__MMX__)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
||||
# set the endian define
|
||||
if(MSVC)
|
||||
@@ -1048,9 +1018,6 @@ if(WITH_OPENVDB)
|
||||
list(APPEND OPENVDB_DEFINITIONS -DOPENVDB_3_ABI_COMPATIBLE)
|
||||
endif()
|
||||
|
||||
# OpenVDB headers use deprecated TBB headers, silence warning.
|
||||
list(APPEND OPENVDB_DEFINITIONS -DTBB_SUPPRESS_DEPRECATED_MESSAGES=1)
|
||||
|
||||
list(APPEND OPENVDB_INCLUDE_DIRS
|
||||
${BOOST_INCLUDE_DIR}
|
||||
${TBB_INCLUDE_DIRS}
|
||||
@@ -1466,7 +1433,6 @@ if(CMAKE_COMPILER_IS_GNUCC)
|
||||
# gcc 4.2 gives annoying warnings on every file with this
|
||||
if(NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "4.3")
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_WARNINGS C_WARN_UNINITIALIZED -Wuninitialized)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_WARNINGS CXX_WARN_UNINITIALIZED -Wuninitialized)
|
||||
endif()
|
||||
|
||||
# versions before gcc4.6 give many BLI_math warnings
|
||||
@@ -1531,13 +1497,11 @@ if(CMAKE_COMPILER_IS_GNUCC)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_FORMAT -Wno-format)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_SWITCH -Wno-switch)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_UNUSED_VARIABLE -Wno-unused-variable)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_UNUSED_VARIABLE -Wno-uninitialized)
|
||||
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_CLASS_MEMACCESS -Wno-class-memaccess)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_COMMENT -Wno-comment)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_TYPEDEFS -Wno-unused-local-typedefs)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_VARIABLE -Wno-unused-variable)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_VARIABLE -Wno-uninitialized)
|
||||
|
||||
if(CMAKE_COMPILER_IS_GNUCC AND (NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "7.0"))
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_IMPLICIT_FALLTHROUGH -Wno-implicit-fallthrough)
|
||||
@@ -1799,20 +1763,8 @@ if(WITH_BLENDER)
|
||||
# internal and external library information first, for test linking
|
||||
add_subdirectory(source)
|
||||
elseif(WITH_CYCLES_STANDALONE)
|
||||
add_subdirectory(intern/glew-mx)
|
||||
add_subdirectory(intern/guardedalloc)
|
||||
add_subdirectory(intern/libc_compat)
|
||||
add_subdirectory(intern/numaapi)
|
||||
add_subdirectory(intern/sky)
|
||||
|
||||
add_subdirectory(intern/cycles)
|
||||
add_subdirectory(extern/clew)
|
||||
if(WITH_CYCLES_LOGGING)
|
||||
if(NOT WITH_SYSTEM_GFLAGS)
|
||||
add_subdirectory(extern/gflags)
|
||||
endif()
|
||||
add_subdirectory(extern/glog)
|
||||
endif()
|
||||
if(WITH_CUDA_DYNLOAD)
|
||||
add_subdirectory(extern/cuew)
|
||||
endif()
|
||||
@@ -1885,7 +1837,6 @@ if(FIRST_RUN)
|
||||
info_cfg_option(WITH_FFTW3)
|
||||
info_cfg_option(WITH_FREESTYLE)
|
||||
info_cfg_option(WITH_GMP)
|
||||
info_cfg_option(WITH_HARU)
|
||||
info_cfg_option(WITH_IK_ITASC)
|
||||
info_cfg_option(WITH_IK_SOLVER)
|
||||
info_cfg_option(WITH_INPUT_NDOF)
|
||||
@@ -1894,8 +1845,6 @@ if(FIRST_RUN)
|
||||
info_cfg_option(WITH_OPENCOLORIO)
|
||||
info_cfg_option(WITH_OPENIMAGEDENOISE)
|
||||
info_cfg_option(WITH_OPENVDB)
|
||||
info_cfg_option(WITH_POTRACE)
|
||||
info_cfg_option(WITH_PUGIXML)
|
||||
info_cfg_option(WITH_QUADRIFLOW)
|
||||
info_cfg_option(WITH_TBB)
|
||||
info_cfg_option(WITH_USD)
|
||||
|
35
GNUmakefile
35
GNUmakefile
@@ -26,31 +26,26 @@
|
||||
|
||||
define HELP_TEXT
|
||||
|
||||
Blender Convenience Targets
|
||||
Convenience Targets
|
||||
Provided for building Blender, (multiple at once can be used).
|
||||
|
||||
* debug: Build a debug binary.
|
||||
* full: Enable all supported dependencies & options.
|
||||
* lite: Disable non essential features for a smaller binary and faster build.
|
||||
* release: Complete build with all options enabled including CUDA and Optix, matching the releases on blender.org
|
||||
* release Complete build with all options enabled including CUDA and Optix, matching the releases on blender.org
|
||||
* headless: Build without an interface (renderfarm or server automation).
|
||||
* cycles: Build Cycles standalone only, without Blender.
|
||||
* bpy: Build as a python module which can be loaded from python directly.
|
||||
* deps: Build library dependencies (intended only for platform maintainers).
|
||||
|
||||
* developer: Enable faster builds, error checking and tests, recommended for developers.
|
||||
* config: Run cmake configuration tool to set build options.
|
||||
* ninja: Use ninja build tool for faster builds.
|
||||
* ccache: Use ccache for faster rebuilds.
|
||||
|
||||
Note: passing the argument 'BUILD_DIR=path' when calling make will override the default build dir.
|
||||
Note: passing the argument 'BUILD_CMAKE_ARGS=args' lets you add cmake arguments.
|
||||
|
||||
Other Convenience Targets
|
||||
Provided for other building operations.
|
||||
|
||||
* config: Run cmake configuration tool to set build options.
|
||||
* deps: Build library dependencies (intended only for platform maintainers).
|
||||
|
||||
The existance of locally build dependancies overrides the pre-built dependencies from subversion.
|
||||
These must be manually removed from '../lib/' to go back to using the pre-compiled libraries.
|
||||
|
||||
Project Files
|
||||
Generate project files for development environments.
|
||||
@@ -90,15 +85,12 @@ Static Source Code Checking
|
||||
* check_descriptions: Check for duplicate/invalid descriptions.
|
||||
|
||||
Spell Checkers
|
||||
This runs the spell checker from the developer tools repositor.
|
||||
|
||||
* check_spelling_c: Check for spelling errors (C/C++ only),
|
||||
* check_spelling_osl: Check for spelling errors (OSL only).
|
||||
* check_spelling_py: Check for spelling errors (Python only).
|
||||
|
||||
Note: an additional word-list is maintained at: 'source/tools/check_source/check_spelling_c_config.py'
|
||||
|
||||
Note: that spell checkers can take a 'CHECK_SPELLING_CACHE' filepath argument,
|
||||
Note that spell checkers can take a 'CHECK_SPELLING_CACHE' filepath argument,
|
||||
so re-running does not need to re-check unchanged files.
|
||||
|
||||
Example:
|
||||
@@ -131,7 +123,7 @@ Utilities
|
||||
* update:
|
||||
updates git and all submodules
|
||||
|
||||
* format:
|
||||
* format
|
||||
Format source code using clang (uses PATHS if passed in). For example::
|
||||
|
||||
make format PATHS="source/blender/blenlib source/blender/blenkernel"
|
||||
@@ -191,13 +183,8 @@ endif
|
||||
ifndef DEPS_INSTALL_DIR
|
||||
DEPS_INSTALL_DIR:=$(shell dirname "$(BLENDER_DIR)")/lib/$(OS_NCASE)
|
||||
|
||||
# Add processor type to directory name, except for darwin x86_64
|
||||
# which by convention does not have it.
|
||||
ifeq ($(OS_NCASE),darwin)
|
||||
ifneq ($(CPU),x86_64)
|
||||
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
|
||||
endif
|
||||
else
|
||||
ifneq ($(OS_NCASE),darwin)
|
||||
# Add processor type to directory name
|
||||
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
|
||||
endif
|
||||
endif
|
||||
@@ -211,7 +198,7 @@ endif
|
||||
# in libraries, or python 2 for running make update to get it.
|
||||
ifeq ($(OS_NCASE),darwin)
|
||||
ifeq (, $(shell command -v $(PYTHON)))
|
||||
PYTHON:=$(DEPS_INSTALL_DIR)/python/bin/python3.7m
|
||||
PYTHON:=../lib/darwin/python/bin/python3.7m
|
||||
ifeq (, $(shell command -v $(PYTHON)))
|
||||
PYTHON:=python
|
||||
endif
|
||||
@@ -533,7 +520,7 @@ format: .FORCE
|
||||
|
||||
# Simple version of ./doc/python_api/sphinx_doc_gen.sh with no PDF generation.
|
||||
doc_py: .FORCE
|
||||
ASAN_OPTIONS=halt_on_error=0:${ASAN_OPTIONS} \
|
||||
ASAN_OPTIONS=halt_on_error=0 \
|
||||
$(BLENDER_BIN) --background -noaudio --factory-startup \
|
||||
--python doc/python_api/sphinx_doc_gen.py
|
||||
sphinx-build -b html -j $(NPROCS) doc/python_api/sphinx-in doc/python_api/sphinx-out
|
||||
|
@@ -70,6 +70,7 @@ include(cmake/cuew.cmake)
|
||||
include(cmake/opensubdiv.cmake)
|
||||
include(cmake/sdl.cmake)
|
||||
include(cmake/opencollada.cmake)
|
||||
include(cmake/opencolorio.cmake)
|
||||
include(cmake/llvm.cmake)
|
||||
include(cmake/clang.cmake)
|
||||
if(APPLE)
|
||||
@@ -86,40 +87,35 @@ include(cmake/tbb.cmake)
|
||||
include(cmake/openvdb.cmake)
|
||||
include(cmake/nanovdb.cmake)
|
||||
include(cmake/python.cmake)
|
||||
option(USE_PIP_NUMPY "Install NumPy using pip wheel instead of building from source" OFF)
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
set(USE_PIP_NUMPY ON)
|
||||
else()
|
||||
include(cmake/numpy.cmake)
|
||||
endif()
|
||||
include(cmake/python_site_packages.cmake)
|
||||
include(cmake/package_python.cmake)
|
||||
include(cmake/numpy.cmake)
|
||||
include(cmake/usd.cmake)
|
||||
include(cmake/potrace.cmake)
|
||||
include(cmake/haru.cmake)
|
||||
# Boost needs to be included after python.cmake due to the PYTHON_BINARY variable being needed.
|
||||
include(cmake/boost.cmake)
|
||||
include(cmake/pugixml.cmake)
|
||||
include(cmake/ispc.cmake)
|
||||
include(cmake/openimagedenoise.cmake)
|
||||
include(cmake/embree.cmake)
|
||||
if((NOT APPLE) OR ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
include(cmake/ispc.cmake)
|
||||
include(cmake/openimagedenoise.cmake)
|
||||
include(cmake/embree.cmake)
|
||||
endif()
|
||||
if(NOT APPLE)
|
||||
include(cmake/xr_openxr.cmake)
|
||||
endif()
|
||||
|
||||
# OpenColorIO and dependencies.
|
||||
include(cmake/expat.cmake)
|
||||
include(cmake/yamlcpp.cmake)
|
||||
include(cmake/opencolorio.cmake)
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
include(cmake/sse2neon.cmake)
|
||||
endif()
|
||||
|
||||
if(WITH_WEBP)
|
||||
include(cmake/webp.cmake)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
# OCIO deps
|
||||
include(cmake/tinyxml.cmake)
|
||||
include(cmake/yamlcpp.cmake)
|
||||
# LCMS is an OCIO dep, but only if you build the apps, leaving it here for convenience
|
||||
# include(cmake/lcms.cmake)
|
||||
endif()
|
||||
|
||||
if(NOT WIN32 OR ENABLE_MINGW64)
|
||||
include(cmake/gmp.cmake)
|
||||
include(cmake/openjpeg.cmake)
|
||||
|
@@ -19,6 +19,16 @@
|
||||
set(ALEMBIC_EXTRA_ARGS
|
||||
-DBUILDSTATIC=ON
|
||||
-DLINKSTATIC=ON
|
||||
-DALEMBIC_LIB_USES_BOOST=ON
|
||||
-DBoost_COMPILER:STRING=${BOOST_COMPILER_STRING}
|
||||
-DBoost_USE_MULTITHREADED=ON
|
||||
-DUSE_STATIC_BOOST=On
|
||||
-DBoost_USE_STATIC_LIBS=ON
|
||||
-DBoost_USE_STATIC_RUNTIME=OFF
|
||||
-DBoost_DEBUG=ON
|
||||
-DBOOST_ROOT=${LIBDIR}/boost
|
||||
-DBoost_NO_SYSTEM_PATHS=ON
|
||||
-DBoost_NO_BOOST_CMAKE=ON
|
||||
-DILMBASE_ROOT=${LIBDIR}/openexr
|
||||
-DALEMBIC_ILMBASE_INCLUDE_DIRECTORY=${LIBDIR}/openexr/include/OpenEXR
|
||||
-DALEMBIC_ILMBASE_HALF_LIB=${LIBDIR}/openexr/lib/${LIBPREFIX}Half${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
@@ -71,6 +81,7 @@ endif()
|
||||
|
||||
add_dependencies(
|
||||
external_alembic
|
||||
external_boost
|
||||
external_zlib
|
||||
external_openexr
|
||||
)
|
||||
|
@@ -23,7 +23,7 @@ if(WIN32)
|
||||
set(BOOST_COMPILER_STRING -vc141)
|
||||
|
||||
set(BOOST_CONFIGURE_COMMAND bootstrap.bat)
|
||||
set(BOOST_BUILD_COMMAND b2)
|
||||
set(BOOST_BUILD_COMMAND bjam)
|
||||
set(BOOST_BUILD_OPTIONS runtime-link=shared )
|
||||
set(BOOST_HARVEST_CMD ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/boost/lib/ ${HARVEST_TARGET}/boost/lib/ )
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
|
@@ -33,16 +33,6 @@ if(UNIX)
|
||||
yasm
|
||||
)
|
||||
|
||||
if(NOT APPLE)
|
||||
set(_required_software
|
||||
${_required_software}
|
||||
|
||||
# Needed for Mesa.
|
||||
meson
|
||||
ninja
|
||||
)
|
||||
endif()
|
||||
|
||||
foreach(_software ${_required_software})
|
||||
find_program(_software_find NAMES ${_software})
|
||||
if(NOT _software_find)
|
||||
@@ -70,7 +60,7 @@ if(UNIX)
|
||||
" ${_software_missing}\n"
|
||||
"\n"
|
||||
"On Debian and Ubuntu:\n"
|
||||
" apt install autoconf automake libtool yasm tcl ninja-build meson python3-mako\n"
|
||||
" apt install autoconf automake libtool yasm tcl\n"
|
||||
"\n"
|
||||
"On macOS (with homebrew):\n"
|
||||
" brew install autoconf automake bison libtool pkg-config yasm\n"
|
||||
|
@@ -29,7 +29,6 @@ set(EMBREE_EXTRA_ARGS
|
||||
-DEMBREE_MAX_ISA=AVX2
|
||||
-DEMBREE_TASKING_SYSTEM=TBB
|
||||
-DEMBREE_TBB_ROOT=${LIBDIR}/tbb
|
||||
-DTBB_ROOT=${LIBDIR}/tbb
|
||||
-DTBB_STATIC_LIB=${TBB_STATIC_LIBRARY}
|
||||
)
|
||||
|
||||
@@ -47,26 +46,15 @@ else()
|
||||
set(EMBREE_BUILD_DIR)
|
||||
endif()
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
ExternalProject_Add(external_embree
|
||||
GIT_REPOSITORY ${EMBREE_ARM_GIT}
|
||||
GIT_TAG "blender-arm"
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
PREFIX ${BUILD_DIR}/embree
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/embree
|
||||
)
|
||||
else()
|
||||
ExternalProject_Add(external_embree
|
||||
URL ${EMBREE_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${EMBREE_HASH}
|
||||
PREFIX ${BUILD_DIR}/embree
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/embree/src/external_embree < ${PATCH_DIR}/embree.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/embree
|
||||
)
|
||||
endif()
|
||||
ExternalProject_Add(external_embree
|
||||
URL ${EMBREE_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${EMBREE_HASH}
|
||||
PREFIX ${BUILD_DIR}/embree
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/embree/src/external_embree < ${PATCH_DIR}/embree.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/embree
|
||||
)
|
||||
|
||||
add_dependencies(
|
||||
external_embree
|
||||
|
@@ -1,46 +0,0 @@
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(HARU_EXTRA_ARGS
|
||||
-DLIBHPDF_SHARED=OFF
|
||||
-DLIBHPDF_STATIC=ON
|
||||
-DLIBHPDF_EXAMPLES=OFF
|
||||
-DLIBHPDF_ENABLE_EXCEPTIONS=ON
|
||||
)
|
||||
|
||||
ExternalProject_Add(external_haru
|
||||
URL ${HARU_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${HARU_HASH}
|
||||
PREFIX ${BUILD_DIR}/haru
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/haru/src/external_haru < ${PATCH_DIR}/haru.diff
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_INSTALL_PREFIX=${LIBDIR}/haru
|
||||
${DEFAULT_CMAKE_FLAGS} ${HARU_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/haru
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
ExternalProject_Add_Step(external_haru after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/haru/include ${HARVEST_TARGET}/haru/include
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/haru/lib/libhpdfs.lib ${HARVEST_TARGET}/haru/lib/libhpdfs.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
endif()
|
@@ -109,9 +109,6 @@ harvest(llvm/lib llvm/lib "libLLVM*.a")
|
||||
if(APPLE)
|
||||
harvest(openmp/lib openmp/lib "*")
|
||||
harvest(openmp/include openmp/include "*.h")
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
harvest(sse2neon sse2neon "*.h")
|
||||
endif()
|
||||
endif()
|
||||
harvest(ogg/lib ffmpeg/lib "*.a")
|
||||
harvest(openal/include openal/include "*.h")
|
||||
@@ -142,10 +139,12 @@ harvest(openimageio/bin openimageio/bin "maketx")
|
||||
harvest(openimageio/bin openimageio/bin "oiiotool")
|
||||
harvest(openimageio/include openimageio/include "*")
|
||||
harvest(openimageio/lib openimageio/lib "*.a")
|
||||
harvest(openimagedenoise/include openimagedenoise/include "*")
|
||||
harvest(openimagedenoise/lib openimagedenoise/lib "*.a")
|
||||
harvest(embree/include embree/include "*.h")
|
||||
harvest(embree/lib embree/lib "*.a")
|
||||
if((NOT APPLE) OR ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
harvest(openimagedenoise/include openimagedenoise/include "*")
|
||||
harvest(openimagedenoise/lib openimagedenoise/lib "*.a")
|
||||
harvest(embree/include embree/include "*.h")
|
||||
harvest(embree/lib embree/lib "*.a")
|
||||
endif()
|
||||
harvest(openjpeg/include/openjpeg-2.3 openjpeg/include "*.h")
|
||||
harvest(openjpeg/lib openjpeg/lib "*.a")
|
||||
harvest(opensubdiv/include opensubdiv/include "*.h")
|
||||
@@ -163,7 +162,7 @@ harvest(png/include png/include "*.h")
|
||||
harvest(png/lib png/lib "*.a")
|
||||
harvest(pugixml/include pugixml/include "*.hpp")
|
||||
harvest(pugixml/lib pugixml/lib "*.a")
|
||||
harvest(python/bin python/bin "python${PYTHON_SHORT_VERSION}")
|
||||
harvest(python/bin python/bin "python${PYTHON_SHORT_VERSION}m")
|
||||
harvest(python/include python/include "*h")
|
||||
harvest(python/lib python/lib "*")
|
||||
harvest(sdl/include/SDL2 sdl/include "*.h")
|
||||
@@ -188,12 +187,10 @@ harvest(usd/lib/usd usd/lib/usd "*")
|
||||
harvest(usd/plugin usd/plugin "*")
|
||||
harvest(potrace/include potrace/include "*.h")
|
||||
harvest(potrace/lib potrace/lib "*.a")
|
||||
harvest(haru/include haru/include "*.h")
|
||||
harvest(haru/lib haru/lib "*.a")
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
harvest(libglu/lib mesa/lib "*.so*")
|
||||
harvest(mesa/lib64 mesa/lib "*.so*")
|
||||
harvest(mesa/lib mesa/lib "*.so*")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
@@ -21,31 +21,27 @@ if(WIN32)
|
||||
-DFLEX_EXECUTABLE=${LIBDIR}/flexbison/win_flex.exe
|
||||
-DBISON_EXECUTABLE=${LIBDIR}/flexbison/win_bison.exe
|
||||
-DM4_EXECUTABLE=${DOWNLOAD_DIR}/mingw/mingw64/msys/1.0/bin/m4.exe
|
||||
-DARM_ENABLED=Off
|
||||
)
|
||||
elseif(APPLE)
|
||||
# Use bison installed via Homebrew.
|
||||
# The one which comes which Xcode toolset is too old.
|
||||
if("${CMAKE_HOST_SYSTEM_PROCESSOR}" STREQUAL "arm64")
|
||||
set(ISPC_EXTRA_ARGS_APPLE
|
||||
-DBISON_EXECUTABLE=/opt/homebrew/opt/bison/bin/bison
|
||||
-DARM_ENABLED=On
|
||||
)
|
||||
set(HOMEBREW_LOCATION "/opt/homebrew")
|
||||
else()
|
||||
set(ISPC_EXTRA_ARGS_APPLE
|
||||
-DBISON_EXECUTABLE=/usr/local/opt/bison/bin/bison
|
||||
-DARM_ENABLED=Off
|
||||
)
|
||||
set(HOMEBREW_LOCATION "/usr/local")
|
||||
endif()
|
||||
set(ISPC_EXTRA_ARGS_APPLE
|
||||
-DBISON_EXECUTABLE=${HOMEBREW_LOCATION}/opt/bison/bin/bison
|
||||
)
|
||||
elseif(UNIX)
|
||||
set(ISPC_EXTRA_ARGS_UNIX
|
||||
-DCMAKE_C_COMPILER=${LIBDIR}/clang/bin/clang
|
||||
-DCMAKE_CXX_COMPILER=${LIBDIR}/clang/bin/clang++
|
||||
-DARM_ENABLED=Off
|
||||
)
|
||||
endif()
|
||||
|
||||
set(ISPC_EXTRA_ARGS
|
||||
-DARM_ENABLED=Off
|
||||
-DISPC_NO_DUMPS=On
|
||||
-DISPC_INCLUDE_EXAMPLES=Off
|
||||
-DISPC_INCLUDE_TESTS=Off
|
||||
|
@@ -16,20 +16,16 @@
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(EXPAT_EXTRA_ARGS
|
||||
-DEXPAT_BUILD_DOCS=OFF
|
||||
-DEXPAT_BUILD_EXAMPLES=OFF
|
||||
-DEXPAT_BUILD_TESTS=OFF
|
||||
-DEXPAT_BUILD_TOOLS=OFF
|
||||
-DEXPAT_SHARED_LIBS=OFF
|
||||
set(LCMS_EXTRA_ARGS
|
||||
)
|
||||
|
||||
ExternalProject_Add(external_expat
|
||||
URL ${EXPAT_URI}
|
||||
ExternalProject_Add(external_lcms
|
||||
URL ${LCMS_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${EXPAT_HASH}
|
||||
PREFIX ${BUILD_DIR}/expat
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/expat ${DEFAULT_CMAKE_FLAGS} ${EXPAT_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/expat
|
||||
SOURCE_SUBDIR expat
|
||||
URL_HASH MD5=${LCMS_HASH}
|
||||
PREFIX ${BUILD_DIR}/lcms
|
||||
# Patch taken from ocio.
|
||||
PATCH_COMMAND ${CMAKE_COMMAND} -E copy ${PATCH_DIR}/cmakelists_lcms.txt ${BUILD_DIR}/lcms/src/external_lcms/CMakeLists.txt
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/lcms ${DEFAULT_CMAKE_FLAGS} ${LCMS_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/lcms
|
||||
)
|
@@ -17,7 +17,7 @@
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
if(APPLE AND "${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
set(LLVM_TARGETS AArch64$<SEMICOLON>ARM)
|
||||
set(LLVM_TARGETS AArch64)
|
||||
else()
|
||||
set(LLVM_TARGETS X86)
|
||||
endif()
|
||||
|
@@ -20,36 +20,19 @@ set(MESA_CFLAGS "-static-libgcc")
|
||||
set(MESA_CXXFLAGS "-static-libgcc -static-libstdc++ -Bstatic -lstdc++ -Bdynamic -l:libstdc++.a")
|
||||
set(MESA_LDFLAGS "-L${LIBDIR}/zlib/lib -pthread -static-libgcc -static-libstdc++ -Bstatic -lstdc++ -Bdynamic -l:libstdc++.a -l:libz_pic.a")
|
||||
|
||||
# The 'native-file', used for overrides with the meson build system.
|
||||
# meson does not provide a way to do this using command line arguments.
|
||||
#
|
||||
# Note that we can't output to "${BUILD_DIR}/mesa/src/external_mesa" as
|
||||
# it doesn't exist when CMake first executes.
|
||||
file(WRITE ${BUILD_DIR}/mesa/tmp/native-file.ini "\
|
||||
[binaries]
|
||||
llvm-config = '${LIBDIR}/llvm/bin/llvm-config'"
|
||||
)
|
||||
|
||||
set(MESA_EXTRA_FLAGS
|
||||
-Dbuildtype=release
|
||||
-Dc_args=${MESA_CFLAGS}
|
||||
-Dcpp_args=${MESA_CXXFLAGS}
|
||||
-Dc_link_args=${MESA_LDFLAGS}
|
||||
-Dcpp_link_args=${MESA_LDFLAGS}
|
||||
-Dglx=gallium-xlib
|
||||
-Dgallium-drivers=swrast
|
||||
-Ddri-drivers=
|
||||
-Dvulkan-drivers=
|
||||
-Dgbm=disabled
|
||||
-Degl=disabled
|
||||
-Dgles1=disabled
|
||||
-Dgles2=disabled
|
||||
-Dshared-llvm=disabled
|
||||
# Without this, the build fails when: `wayland-scanner` is not found.
|
||||
# At some point we will likely want to support Wayland.
|
||||
# Disable for now since it's not officially supported.
|
||||
-Dplatforms=x11
|
||||
--native-file ${BUILD_DIR}/mesa/tmp/native-file.ini
|
||||
CFLAGS=${MESA_CFLAGS}
|
||||
CXXFLAGS=${MESA_CXXFLAGS}
|
||||
LDFLAGS=${MESA_LDFLAGS}
|
||||
--enable-glx=gallium-xlib
|
||||
--with-gallium-drivers=swrast
|
||||
--disable-dri
|
||||
--disable-gbm
|
||||
--disable-egl
|
||||
--disable-gles1
|
||||
--disable-gles2
|
||||
--disable-llvm-shared-libs
|
||||
--with-llvm-prefix=${LIBDIR}/llvm
|
||||
)
|
||||
|
||||
ExternalProject_Add(external_mesa
|
||||
@@ -59,9 +42,9 @@ ExternalProject_Add(external_mesa
|
||||
PREFIX ${BUILD_DIR}/mesa
|
||||
CONFIGURE_COMMAND ${CONFIGURE_ENV} &&
|
||||
cd ${BUILD_DIR}/mesa/src/external_mesa/ &&
|
||||
meson ${BUILD_DIR}/mesa/src/external_mesa-build --prefix=${LIBDIR}/mesa ${MESA_EXTRA_FLAGS}
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa-build && ninja -j${MAKE_THREADS}
|
||||
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa-build && ninja install
|
||||
${CONFIGURE_COMMAND_NO_TARGET} --prefix=${LIBDIR}/mesa ${MESA_EXTRA_FLAGS}
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa/ && make -j${MAKE_THREADS}
|
||||
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa/ && make install
|
||||
INSTALL_DIR ${LIBDIR}/mesa
|
||||
)
|
||||
|
||||
|
@@ -22,17 +22,8 @@ ExternalProject_Add(external_nasm
|
||||
URL_HASH SHA256=${NASM_HASH}
|
||||
PREFIX ${BUILD_DIR}/nasm
|
||||
PATCH_COMMAND ${PATCH_CMD} --verbose -p 1 -N -d ${BUILD_DIR}/nasm/src/external_nasm < ${PATCH_DIR}/nasm.diff
|
||||
CONFIGURE_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && ./autogen.sh && ${CONFIGURE_COMMAND} --prefix=${LIBDIR}/nasm
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make -j${MAKE_THREADS} && make manpages
|
||||
CONFIGURE_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && ${CONFIGURE_COMMAND} --prefix=${LIBDIR}/nasm
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make -j${MAKE_THREADS}
|
||||
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make install
|
||||
INSTALL_DIR ${LIBDIR}/nasm
|
||||
)
|
||||
|
||||
if(UNIX)
|
||||
# `touch nasm.1 ndisasm.1` helps to create the manual pages files, even when
|
||||
# local `asciidoc` and `xmlto` packages are not installed.
|
||||
ExternalProject_Add_Step(external_nasm after_configure
|
||||
COMMAND ${CMAKE_COMMAND} -E touch ${BUILD_DIR}/nasm/src/external_nasm/nasm.1 ${BUILD_DIR}/nasm/src/external_nasm/ndisasm.1
|
||||
DEPENDEES configure
|
||||
)
|
||||
endif()
|
||||
|
@@ -47,5 +47,4 @@ ExternalProject_Add(external_numpy
|
||||
add_dependencies(
|
||||
external_numpy
|
||||
external_python
|
||||
external_python_site_packages
|
||||
)
|
||||
|
@@ -18,37 +18,50 @@
|
||||
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
-DOCIO_BUILD_APPS=OFF
|
||||
-DOCIO_BUILD_PYTHON=OFF
|
||||
-DOCIO_BUILD_PYGLUE=OFF
|
||||
-DOCIO_BUILD_NUKE=OFF
|
||||
-DOCIO_BUILD_JAVA=OFF
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DOCIO_USE_BOOST_PTR=OFF
|
||||
-DOCIO_BUILD_STATIC=ON
|
||||
-DOCIO_BUILD_SHARED=OFF
|
||||
-DOCIO_BUILD_TRUELIGHT=OFF
|
||||
-DOCIO_BUILD_DOCS=OFF
|
||||
-DOCIO_BUILD_TESTS=OFF
|
||||
-DOCIO_BUILD_GPU_TESTS=OFF
|
||||
-DOCIO_USE_SSE=ON
|
||||
|
||||
# Manually build ext packages except for pystring, which does not have
|
||||
# a CMake or autotools build system that we can easily use.
|
||||
-DOCIO_INSTALL_EXT_PACKAGES=MISSING
|
||||
-DHalf_ROOT=${LIBDIR}/openexr
|
||||
-DHalf_STATIC_LIBRARY=ON
|
||||
-Dexpat_ROOT=${LIBDIR}/expat
|
||||
-Dyaml-cpp_ROOT=${LIBDIR}/yamlcpp
|
||||
-DOCIO_BUILD_PYGLUE=OFF
|
||||
-DOCIO_BUILD_JNIGLUE=OFF
|
||||
-DOCIO_STATIC_JNIGLUE=OFF
|
||||
)
|
||||
|
||||
if(APPLE AND NOT("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
-DOCIO_USE_SSE=OFF
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
-DOCIO_USE_SSE=OFF
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
set(OCIO_PATCH opencolorio_win.diff)
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
-DOCIO_BUILD_TESTS=OFF
|
||||
-DOCIO_USE_SSE=ON
|
||||
-DOCIO_INLINES_HIDDEN=OFF
|
||||
-DOCIO_PYGLUE_LINK=OFF
|
||||
-DOCIO_PYGLUE_RESPECT_ABI=OFF
|
||||
-DOCIO_PYGLUE_SONAME=OFF
|
||||
-DOCIO_PYGLUE_LIB_PREFIX=OFF
|
||||
-DUSE_EXTERNAL_TINYXML=ON
|
||||
-DTINYXML_INCLUDE_DIR=${LIBDIR}/tinyxml/include
|
||||
-DTINYXML_LIBRARY=${LIBDIR}/tinyxml/lib/tinyxml${libext}
|
||||
-DUSE_EXTERNAL_YAML=ON
|
||||
-DYAML_CPP_FOUND=ON
|
||||
-DYAML_CPP_VERSION=${YAMLCPP_VERSION}
|
||||
-DUSE_EXTERNAL_LCMS=ON
|
||||
-DINC_1=${LIBDIR}/tinyxml/include
|
||||
-DINC_2=${LIBDIR}/yamlcpp/include
|
||||
# Lie because ocio cmake is demanding boost even though it is not needed.
|
||||
-DYAML_CPP_VERSION=0.5.0
|
||||
)
|
||||
else()
|
||||
set(OCIO_PATCH opencolorio.diff)
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
)
|
||||
@@ -59,43 +72,48 @@ ExternalProject_Add(external_opencolorio
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${OPENCOLORIO_HASH}
|
||||
PREFIX ${BUILD_DIR}/opencolorio
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/opencolorio/src/external_opencolorio < ${PATCH_DIR}/opencolorio.diff
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/opencolorio/src/external_opencolorio < ${PATCH_DIR}/${OCIO_PATCH}
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/opencolorio ${DEFAULT_CMAKE_FLAGS} ${OPENCOLORIO_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/opencolorio
|
||||
)
|
||||
|
||||
if(NOT WIN32)
|
||||
add_custom_command(
|
||||
OUTPUT ${LIBDIR}/opencolorio/lib/libtinyxml.a
|
||||
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libtinyxml.a ${LIBDIR}/opencolorio/lib/libtinyxml.a
|
||||
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libyaml-cpp.a ${LIBDIR}/opencolorio/lib/libyaml-cpp.a
|
||||
)
|
||||
add_custom_target(external_opencolorio_extra ALL DEPENDS external_opencolorio ${LIBDIR}/opencolorio/lib/libtinyxml.a)
|
||||
endif()
|
||||
|
||||
add_dependencies(
|
||||
external_opencolorio
|
||||
external_yamlcpp
|
||||
external_expat
|
||||
external_openexr
|
||||
external_boost
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
add_dependencies(
|
||||
external_opencolorio
|
||||
external_tinyxml
|
||||
external_yamlcpp
|
||||
|
||||
)
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
ExternalProject_Add_Step(external_opencolorio after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/include ${HARVEST_TARGET}/opencolorio/include
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/lib ${HARVEST_TARGET}/opencolorio/lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/lib/static ${HARVEST_TARGET}/opencolorio/lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/yamlcpp/lib/libyaml-cppmd.lib ${HARVEST_TARGET}/opencolorio/lib/libyaml-cpp.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/expat/lib/libexpatMD.lib ${HARVEST_TARGET}/opencolorio/lib/libexpatMD.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/pystring.lib ${HARVEST_TARGET}/opencolorio/lib/pystring.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tinyxml/lib/tinyxml.lib ${HARVEST_TARGET}/opencolorio/lib/tinyxml.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
if(BUILD_MODE STREQUAL Debug)
|
||||
ExternalProject_Add_Step(external_opencolorio after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/opencolorio/lib/Opencolorio.lib ${HARVEST_TARGET}/opencolorio/lib/OpencolorIO_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/opencolorio/lib/static/Opencolorio.lib ${HARVEST_TARGET}/opencolorio/lib/OpencolorIO_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/yamlcpp/lib/libyaml-cppmdd.lib ${HARVEST_TARGET}/opencolorio/lib/libyaml-cpp_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/expat/lib/libexpatdMD.lib ${HARVEST_TARGET}/opencolorio/lib/libexpatdMD.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/pystring.lib ${HARVEST_TARGET}/opencolorio/lib/pystring_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tinyxml/lib/tinyxml.lib ${HARVEST_TARGET}/opencolorio/lib/tinyxml_d.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
else()
|
||||
ExternalProject_Add_Step(external_opencolorio after_install
|
||||
COMMAND cp ${LIBDIR}/yamlcpp/lib/libyaml-cpp.a ${LIBDIR}/opencolorio/lib/
|
||||
COMMAND cp ${LIBDIR}/expat/lib/libexpat.a ${LIBDIR}/opencolorio/lib/
|
||||
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libpystring.a ${LIBDIR}/opencolorio/lib/
|
||||
DEPENDEES install
|
||||
)
|
||||
|
||||
endif()
|
||||
|
@@ -45,7 +45,6 @@ ExternalProject_Add(external_openimagedenoise
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${OIDN_HASH}
|
||||
PREFIX ${BUILD_DIR}/openimagedenoise
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/openimagedenoise/src/external_openimagedenoise < ${PATCH_DIR}/oidn.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/openimagedenoise ${DEFAULT_CMAKE_FLAGS} ${OIDN_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/openimagedenoise
|
||||
)
|
||||
|
@@ -54,8 +54,6 @@ set(OPENVDB_EXTRA_ARGS
|
||||
-DOPENVDB_CORE_STATIC=${OPENVDB_STATIC}
|
||||
-DOPENVDB_BUILD_BINARIES=Off
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
-DILMBASE_USE_STATIC_LIBS=On
|
||||
-DOPENEXR_USE_STATIC_LIBS=On
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
@@ -43,7 +43,7 @@ if(WIN32)
|
||||
PREFIX ${BUILD_DIR}/python
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND cd ${BUILD_DIR}/python/src/external_python/pcbuild/ && set IncludeTkinter=false && call build.bat -e -p x64 -c ${BUILD_MODE}
|
||||
INSTALL_COMMAND ${PYTHON_BINARY_INTERNAL} ${PYTHON_SRC}/PC/layout/main.py -b ${PYTHON_SRC}/PCbuild/amd64 -s ${PYTHON_SRC} -t ${PYTHON_SRC}/tmp/ --include-stable --include-pip --include-dev --include-launchers --include-venv --include-symbols ${PYTHON_EXTRA_INSTLAL_FLAGS} --copy ${LIBDIR}/python
|
||||
INSTALL_COMMAND ${PYTHON_BINARY_INTERNAL} ${PYTHON_SRC}/PC/layout/main.py -b ${PYTHON_SRC}/PCbuild/amd64 -s ${PYTHON_SRC} -t ${PYTHON_SRC}/tmp/ --include-underpth --include-stable --include-pip --include-dev --include-launchers --include-venv --include-symbols ${PYTHON_EXTRA_INSTLAL_FLAGS} --copy ${LIBDIR}/python
|
||||
)
|
||||
|
||||
else()
|
||||
@@ -74,12 +74,13 @@ else()
|
||||
endif()
|
||||
set(PYTHON_CONFIGURE_ENV ${CONFIGURE_ENV} && ${PYTHON_FUNC_CONFIGS})
|
||||
set(PYTHON_BINARY ${BUILD_DIR}/python/src/external_python/python.exe)
|
||||
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_macos.diff)
|
||||
else()
|
||||
set(PYTHON_CONFIGURE_ENV ${CONFIGURE_ENV})
|
||||
set(PYTHON_BINARY ${BUILD_DIR}/python/src/external_python/python)
|
||||
endif()
|
||||
# Link against zlib statically (Unix). Avoid rpath issues (macOS).
|
||||
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_unix.diff)
|
||||
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_linux.diff)
|
||||
endif()
|
||||
|
||||
set(PYTHON_CONFIGURE_EXTRA_ARGS "--with-openssl=${LIBDIR}/ssl")
|
||||
set(PYTHON_CFLAGS "-I${LIBDIR}/sqlite/include -I${LIBDIR}/bzip2/include -I${LIBDIR}/lzma/include -I${LIBDIR}/zlib/include")
|
||||
set(PYTHON_LDFLAGS "-L${LIBDIR}/ffi/lib -L${LIBDIR}/sqlite/lib -L${LIBDIR}/bzip2/lib -L${LIBDIR}/lzma/lib -L${LIBDIR}/zlib/lib")
|
||||
|
@@ -16,27 +16,14 @@
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
if(WIN32 AND BUILD_MODE STREQUAL Debug)
|
||||
set(SITE_PACKAGES_EXTRA --global-option build --global-option --debug)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_python_site_packages
|
||||
DOWNLOAD_COMMAND ""
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
PREFIX ${BUILD_DIR}/site_packages
|
||||
INSTALL_COMMAND ${PYTHON_BINARY} -m pip install ${SITE_PACKAGES_EXTRA} cython==${CYTHON_VERSION} idna==${IDNA_VERSION} chardet==${CHARDET_VERSION} urllib3==${URLLIB3_VERSION} certifi==${CERTIFI_VERSION} requests==${REQUESTS_VERSION} --no-binary :all:
|
||||
INSTALL_COMMAND ${PYTHON_BINARY} -m pip install idna==${IDNA_VERSION} chardet==${CHARDET_VERSION} urllib3==${URLLIB3_VERSION} certifi==${CERTIFI_VERSION} requests==${REQUESTS_VERSION} --no-binary :all:
|
||||
)
|
||||
|
||||
if(USE_PIP_NUMPY)
|
||||
# Use only wheel (and not build from source) to stop NumPy from linking against buggy
|
||||
# Accelerate framework backend on macOS. Official wheels are built with OpenBLAS.
|
||||
ExternalProject_Add_Step(external_python_site_packages after_install
|
||||
COMMAND ${PYTHON_BINARY} -m pip install --no-cache-dir numpy==${NUMPY_VERSION} --only-binary :all:
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
|
||||
add_dependencies(
|
||||
external_python_site_packages
|
||||
external_python
|
||||
|
@@ -42,21 +42,8 @@ if(UNIX)
|
||||
-DSQLITE_MAX_VARIABLE_NUMBER=250000 \
|
||||
-fPIC")
|
||||
set(SQLITE_CONFIGURE_ENV ${SQLITE_CONFIGURE_ENV} && export LDFLAGS=${SQLITE_LDFLAGS} && export CFLAGS=${SQLITE_CFLAGS})
|
||||
set(SQLITE_CONFIGURATION_ARGS
|
||||
${SQLITE_CONFIGURATION_ARGS}
|
||||
--enable-threadsafe
|
||||
--enable-load-extension
|
||||
--enable-json1
|
||||
--enable-fts4
|
||||
--enable-fts5
|
||||
# While building `tcl` is harmless, it causes problems when the install step
|
||||
# tries to copy the files into the system path.
|
||||
# Since this isn't required by Python or Blender this can be disabled.
|
||||
# Note that Debian (for example), splits this off into a separate package,
|
||||
# so it's safe to turn off.
|
||||
--disable-tcl
|
||||
--enable-shared=no
|
||||
)
|
||||
set(SQLITE_CONFIGURATION_ARGS ${SQLITE_CONFIGURATION_ARGS} --enable-threadsafe --enable-load-extension --enable-json1 --enable-fts4 --enable-fts5
|
||||
--enable-shared=no)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_sqlite
|
||||
|
@@ -20,7 +20,7 @@ set(SSL_CONFIGURE_COMMAND ./Configure)
|
||||
set(SSL_PATCH_CMD echo .)
|
||||
|
||||
if(APPLE)
|
||||
set(SSL_OS_COMPILER "blender-darwin-${CMAKE_OSX_ARCHITECTURES}")
|
||||
set(SSL_OS_COMPILER "blender-darwin-${CMAKE_OSX_ARCHITECTURES}")
|
||||
else()
|
||||
if("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
|
||||
set(SSL_EXTRA_ARGS enable-ec_nistp_64_gcc_128)
|
||||
|
@@ -42,8 +42,7 @@ ExternalProject_Add(external_tbb
|
||||
URL_HASH MD5=${TBB_HASH}
|
||||
PREFIX ${BUILD_DIR}/tbb
|
||||
PATCH_COMMAND COMMAND ${CMAKE_COMMAND} -E copy ${PATCH_DIR}/cmakelists_tbb.txt ${BUILD_DIR}/tbb/src/external_tbb/CMakeLists.txt &&
|
||||
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/tbb/src/external_tbb/build/vs2013/version_string.ver ${BUILD_DIR}/tbb/src/external_tbb/src/tbb/version_string.ver &&
|
||||
${PATCH_CMD} -p 1 -d ${BUILD_DIR}/tbb/src/external_tbb < ${PATCH_DIR}/tbb.diff
|
||||
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/tbb/src/external_tbb/build/vs2013/version_string.ver ${BUILD_DIR}/tbb/src/external_tbb/src/tbb/version_string.ver
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/tbb ${DEFAULT_CMAKE_FLAGS} ${TBB_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/tbb
|
||||
)
|
||||
|
@@ -16,15 +16,16 @@
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
ExternalProject_Add(external_sse2neon
|
||||
GIT_REPOSITORY ${SSE2NEON_GIT}
|
||||
GIT_TAG ${SSE2NEON_GIT_HASH}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
PREFIX ${BUILD_DIR}/sse2neon
|
||||
CONFIGURE_COMMAND echo sse2neon - Nothing to configure
|
||||
BUILD_COMMAND echo sse2neon - nothing to build
|
||||
INSTALL_COMMAND mkdir -p ${LIBDIR}/sse2neon && cp ${BUILD_DIR}/sse2neon/src/external_sse2neon/sse2neon.h ${LIBDIR}/sse2neon
|
||||
INSTALL_DIR ${LIBDIR}/sse2neon
|
||||
)
|
||||
endif()
|
||||
set(TINYXML_EXTRA_ARGS
|
||||
)
|
||||
|
||||
ExternalProject_Add(external_tinyxml
|
||||
URL ${TINYXML_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${TINYXML_HASH}
|
||||
PREFIX ${BUILD_DIR}/tinyxml
|
||||
# patch taken from ocio
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/tinyxml/src/external_tinyxml < ${PATCH_DIR}/tinyxml.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/tinyxml ${DEFAULT_CMAKE_FLAGS} ${TINYXML_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/tinyxml
|
||||
)
|
@@ -32,11 +32,11 @@ set(JPEG_VERSION 2.0.4)
|
||||
set(JPEG_URI https://github.com/libjpeg-turbo/libjpeg-turbo/archive/${JPEG_VERSION}.tar.gz)
|
||||
set(JPEG_HASH 44c43e4a9fb352f47090804529317c88)
|
||||
|
||||
set(BOOST_VERSION 1.73.0)
|
||||
set(BOOST_VERSION_NODOTS 1_73_0)
|
||||
set(BOOST_VERSION_NODOTS_SHORT 1_73)
|
||||
set(BOOST_VERSION 1.70.0)
|
||||
set(BOOST_VERSION_NODOTS 1_70_0)
|
||||
set(BOOST_VERSION_NODOTS_SHORT 1_70)
|
||||
set(BOOST_URI https://dl.bintray.com/boostorg/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz)
|
||||
set(BOOST_HASH 4036cd27ef7548b8d29c30ea10956196)
|
||||
set(BOOST_HASH fea771fe8176828fabf9c09242ee8c26)
|
||||
|
||||
# Using old version as recommended by OpenVDB build documentation.
|
||||
set(BLOSC_VERSION 1.5.0)
|
||||
@@ -47,9 +47,9 @@ set(PTHREADS_VERSION 3.0.0)
|
||||
set(PTHREADS_URI http://sourceforge.mirrorservice.org/p/pt/pthreads4w/pthreads4w-code-v${PTHREADS_VERSION}.zip)
|
||||
set(PTHREADS_HASH f3bf81bb395840b3446197bcf4ecd653)
|
||||
|
||||
set(OPENEXR_VERSION 2.5.5)
|
||||
set(OPENEXR_VERSION 2.4.0)
|
||||
set(OPENEXR_URI https://github.com/AcademySoftwareFoundation/openexr/archive/v${OPENEXR_VERSION}.tar.gz)
|
||||
set(OPENEXR_HASH 85e8a979092c9055d10ed103062d31a0)
|
||||
set(OPENEXR_HASH 9e4d69cf2a12c6fb19b98af7c5e0eaee)
|
||||
if(WIN32)
|
||||
# Openexr started appending _d on its own so now
|
||||
# we need to tell the build the postfix is _s while
|
||||
@@ -78,9 +78,9 @@ set(FREEGLUT_VERSION 3.0.0)
|
||||
set(FREEGLUT_URI http://pilotfiber.dl.sourceforge.net/project/freeglut/freeglut/${FREEGLUT_VERSION}/freeglut-${FREEGLUT_VERSION}.tar.gz)
|
||||
set(FREEGLUT_HASH 90c3ca4dd9d51cf32276bc5344ec9754)
|
||||
|
||||
set(ALEMBIC_VERSION 1.7.16)
|
||||
set(ALEMBIC_VERSION 1.7.12)
|
||||
set(ALEMBIC_URI https://github.com/alembic/alembic/archive/${ALEMBIC_VERSION}.tar.gz)
|
||||
set(ALEMBIC_MD5 effcc86e42fe6605588e3de57bde6677)
|
||||
set(ALEMBIC_MD5 e2b3777f23c5c09481a008cc6f0f8a40)
|
||||
|
||||
# hash is for 3.1.2
|
||||
set(GLFW_GIT_UID 30306e54705c3adae9fe082c816a3be71963485c)
|
||||
@@ -109,39 +109,22 @@ set(OPENCOLLADA_VERSION v1.6.68)
|
||||
set(OPENCOLLADA_URI https://github.com/KhronosGroup/OpenCOLLADA/archive/${OPENCOLLADA_VERSION}.tar.gz)
|
||||
set(OPENCOLLADA_HASH ee7dae874019fea7be11613d07567493)
|
||||
|
||||
set(OPENCOLORIO_VERSION 2.0.0)
|
||||
set(OPENCOLORIO_VERSION 1.1.1)
|
||||
set(OPENCOLORIO_URI https://github.com/AcademySoftwareFoundation/OpenColorIO/archive/v${OPENCOLORIO_VERSION}.tar.gz)
|
||||
set(OPENCOLORIO_HASH 1a2e3478b6cd9a1549f24e1b2205e3f0)
|
||||
set(OPENCOLORIO_HASH 23d8b9ac81599305539a5a8674b94a3d)
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
# Newer version required by ISPC with arm support.
|
||||
set(LLVM_VERSION 11.0.1)
|
||||
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-${LLVM_VERSION}.src.tar.xz)
|
||||
set(LLVM_HASH 6ec7ae9fd43da9b87cda15b3ab9cc7af)
|
||||
set(LLVM_VERSION 9.0.1)
|
||||
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-${LLVM_VERSION}.src.tar.xz)
|
||||
set(LLVM_HASH 31eb9ce73dd2a0f8dcab8319fb03f8fc)
|
||||
|
||||
set(CLANG_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-${LLVM_VERSION}.src.tar.xz)
|
||||
set(CLANG_HASH b4cb0b74b1f3292a89c9720f3e1e2934)
|
||||
set(CLANG_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-${LLVM_VERSION}.src.tar.xz)
|
||||
set(CLANG_HASH 13468e4a44940efef1b75e8641752f90)
|
||||
|
||||
set(CLANG_TOOLS_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-tools-extra-${LLVM_VERSION}.src.tar.xz)
|
||||
set(CLANG_TOOLS_HASH 1e577a85948a0f07483b7c405e59a0ca)
|
||||
set(CLANG_TOOLS_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-tools-extra-${LLVM_VERSION}.src.tar.xz)
|
||||
set(CLANG_TOOLS_HASH c76293870b564c6a7968622b475b7646)
|
||||
|
||||
set(OPENMP_VERSION 9.0.1)
|
||||
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${OPENMP_VERSION}/openmp-${OPENMP_VERSION}.src.tar.xz)
|
||||
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
|
||||
else()
|
||||
set(LLVM_VERSION 9.0.1)
|
||||
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-${LLVM_VERSION}.src.tar.xz)
|
||||
set(LLVM_HASH 31eb9ce73dd2a0f8dcab8319fb03f8fc)
|
||||
|
||||
set(CLANG_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-${LLVM_VERSION}.src.tar.xz)
|
||||
set(CLANG_HASH 13468e4a44940efef1b75e8641752f90)
|
||||
|
||||
set(CLANG_TOOLS_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-tools-extra-${LLVM_VERSION}.src.tar.xz)
|
||||
set(CLANG_TOOLS_HASH c76293870b564c6a7968622b475b7646)
|
||||
|
||||
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/openmp-${LLVM_VERSION}.src.tar.xz)
|
||||
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
|
||||
endif()
|
||||
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/openmp-${LLVM_VERSION}.src.tar.xz)
|
||||
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
|
||||
|
||||
set(OPENIMAGEIO_VERSION 2.1.15.0)
|
||||
set(OPENIMAGEIO_URI https://github.com/OpenImageIO/oiio/archive/Release-${OPENIMAGEIO_VERSION}.tar.gz)
|
||||
@@ -155,35 +138,34 @@ set(OSL_VERSION 1.10.10)
|
||||
set(OSL_URI https://github.com/imageworks/OpenShadingLanguage/archive/Release-${OSL_VERSION}.tar.gz)
|
||||
set(OSL_HASH 00dec08a93c8084e53848b9ad047889f)
|
||||
|
||||
set(PYTHON_VERSION 3.9.1)
|
||||
set(PYTHON_SHORT_VERSION 3.9)
|
||||
set(PYTHON_SHORT_VERSION_NO_DOTS 39)
|
||||
set(PYTHON_VERSION 3.7.7)
|
||||
set(PYTHON_SHORT_VERSION 3.7)
|
||||
set(PYTHON_SHORT_VERSION_NO_DOTS 37)
|
||||
set(PYTHON_URI https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tar.xz)
|
||||
set(PYTHON_HASH 61981498e75ac8f00adcb908281fadb6)
|
||||
set(PYTHON_HASH 172c650156f7bea68ce31b2fd01fa766)
|
||||
|
||||
set(TBB_VERSION 2020_U2)
|
||||
set(TBB_VERSION 2019_U9)
|
||||
set(TBB_URI https://github.com/oneapi-src/oneTBB/archive/${TBB_VERSION}.tar.gz)
|
||||
set(TBB_HASH 1b711ae956524855088df3bbf5ec65dc)
|
||||
set(TBB_HASH 26263622e9187212ec240dcf01b66207)
|
||||
|
||||
set(OPENVDB_VERSION 8.0.1)
|
||||
set(OPENVDB_VERSION 7.0.0)
|
||||
set(OPENVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/v${OPENVDB_VERSION}.tar.gz)
|
||||
set(OPENVDB_HASH 01b490be16cc0e15c690f9a153c21461)
|
||||
set(OPENVDB_HASH fd6c4f168282f7e0e494d290cd531fa8)
|
||||
|
||||
set(NANOVDB_GIT_UID e62f7a0bf1e27397223c61ddeaaf57edf111b77f)
|
||||
set(NANOVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_GIT_UID}.tar.gz)
|
||||
set(NANOVDB_HASH 90919510bc6ccd630fedc56f748cb199)
|
||||
|
||||
set(IDNA_VERSION 2.10)
|
||||
set(CHARDET_VERSION 4.0.0)
|
||||
set(URLLIB3_VERSION 1.26.3)
|
||||
set(CERTIFI_VERSION 2020.12.5)
|
||||
set(REQUESTS_VERSION 2.25.1)
|
||||
set(CYTHON_VERSION 0.29.21)
|
||||
set(IDNA_VERSION 2.9)
|
||||
set(CHARDET_VERSION 3.0.4)
|
||||
set(URLLIB3_VERSION 1.25.9)
|
||||
set(CERTIFI_VERSION 2020.4.5.2)
|
||||
set(REQUESTS_VERSION 2.23.0)
|
||||
|
||||
set(NUMPY_VERSION 1.19.5)
|
||||
set(NUMPY_SHORT_VERSION 1.19)
|
||||
set(NUMPY_VERSION 1.17.5)
|
||||
set(NUMPY_SHORT_VERSION 1.17)
|
||||
set(NUMPY_URI https://github.com/numpy/numpy/releases/download/v${NUMPY_VERSION}/numpy-${NUMPY_VERSION}.zip)
|
||||
set(NUMPY_HASH f6a1b48717c552bbc18f1adc3cc1fe0e)
|
||||
set(NUMPY_HASH 763a5646fa6eef7a22f4895bca0524f2)
|
||||
|
||||
set(LAME_VERSION 3.100)
|
||||
set(LAME_URI http://downloads.sourceforge.net/project/lame/lame/3.100/lame-${LAME_VERSION}.tar.gz)
|
||||
@@ -274,16 +256,16 @@ set(YAMLCPP_VERSION 0.6.3)
|
||||
set(YAMLCPP_URI https://codeload.github.com/jbeder/yaml-cpp/tar.gz/yaml-cpp-${YAMLCPP_VERSION})
|
||||
set(YAMLCPP_HASH b45bf1089a382e81f6b661062c10d0c2)
|
||||
|
||||
set(EXPAT_VERSION 2_2_10)
|
||||
set(EXPAT_URI https://github.com/libexpat/libexpat/archive/R_${EXPAT_VERSION}.tar.gz)
|
||||
set(EXPAT_HASH 7ca5f09959fcb9a57618368deb627b9f)
|
||||
set(LCMS_VERSION 2.9)
|
||||
set(LCMS_URI https://nchc.dl.sourceforge.net/project/lcms/lcms/${LCMS_VERSION}/lcms2-${LCMS_VERSION}.tar.gz)
|
||||
set(LCMS_HASH 8de1b7724f578d2995c8fdfa35c3ad0e)
|
||||
|
||||
set(PUGIXML_VERSION 1.10)
|
||||
set(PUGIXML_URI https://github.com/zeux/pugixml/archive/v${PUGIXML_VERSION}.tar.gz)
|
||||
set(PUGIXML_HASH 0c208b0664c7fb822bf1b49ad035e8fd)
|
||||
|
||||
set(FLEXBISON_VERSION 2.5.5)
|
||||
set(FLEXBISON_URI http://prdownloads.sourceforge.net/winflexbison/win_flex_bison-2.5.5.zip)
|
||||
set(FLEXBISON_URI http://prdownloads.sourceforge.net/winflexbison//win_flex_bison-2.5.5.zip)
|
||||
set(FLEXBISON_HASH d87a3938194520d904013abef3df10ce)
|
||||
|
||||
# Libraries to keep Python modules static on Linux.
|
||||
@@ -313,41 +295,34 @@ set(SQLITE_HASH fb558c49ee21a837713c4f1e7e413309aabdd9c7)
|
||||
set(EMBREE_VERSION 3.10.0)
|
||||
set(EMBREE_URI https://github.com/embree/embree/archive/v${EMBREE_VERSION}.zip)
|
||||
set(EMBREE_HASH 4bbe29e7eaa46417efc75fc5f1e8eb87)
|
||||
set(EMBREE_ARM_GIT https://github.com/brechtvl/embree.git)
|
||||
|
||||
set(USD_VERSION 21.02)
|
||||
set(USD_VERSION 20.05)
|
||||
set(USD_URI https://github.com/PixarAnimationStudios/USD/archive/v${USD_VERSION}.tar.gz)
|
||||
set(USD_HASH 1dd1e2092d085ed393c1f7c450a4155a)
|
||||
set(USD_HASH 6d679e739e7f65725d9c029e37dda9fc)
|
||||
|
||||
set(OIDN_VERSION 1.3.0)
|
||||
set(OIDN_VERSION 1.2.3)
|
||||
set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz)
|
||||
set(OIDN_HASH 301a5a0958d375a942014df0679b9270)
|
||||
set(OIDN_HASH 1f11466c2c3efc27faba5ec7078d12b2)
|
||||
|
||||
set(LIBGLU_VERSION 9.0.1)
|
||||
set(LIBGLU_URI ftp://ftp.freedesktop.org/pub/mesa/glu/glu-${LIBGLU_VERSION}.tar.xz)
|
||||
set(LIBGLU_HASH 151aef599b8259efe9acd599c96ea2a3)
|
||||
|
||||
set(MESA_VERSION 20.3.4)
|
||||
set(MESA_URI ftp://ftp.freedesktop.org/pub/mesa/mesa-${MESA_VERSION}.tar.xz)
|
||||
set(MESA_HASH 556338446aef8ae947a789b3e0b5e056)
|
||||
set(MESA_VERSION 18.3.1)
|
||||
set(MESA_URI ftp://ftp.freedesktop.org/pub/mesa//mesa-${MESA_VERSION}.tar.xz)
|
||||
set(MESA_HASH d60828056d77bfdbae0970f9b15fb1be)
|
||||
|
||||
set(NASM_VERSION 2.15.02)
|
||||
set(NASM_URI https://github.com/netwide-assembler/nasm/archive/nasm-${NASM_VERSION}.tar.gz)
|
||||
set(NASM_HASH aded8b796c996a486a56e0515c83e414116decc3b184d88043480b32eb0a8589)
|
||||
set(NASM_URI https://www.nasm.us/pub/nasm/releasebuilds/${NASM_VERSION}/nasm-${NASM_VERSION}.tar.xz)
|
||||
set(NASM_HASH f4fd1329b1713e1ccd34b2fc121c4bcd278c9f91cc4cb205ae8fcd2e4728dd14)
|
||||
|
||||
set(XR_OPENXR_SDK_VERSION 1.0.14)
|
||||
set(XR_OPENXR_SDK_VERSION 1.0.8)
|
||||
set(XR_OPENXR_SDK_URI https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_SDK_VERSION}.tar.gz)
|
||||
set(XR_OPENXR_SDK_HASH 0df6b2fd6045423451a77ff6bc3e1a75)
|
||||
set(XR_OPENXR_SDK_HASH c6de63d2e0f9029aa58dfa97cad8ce07)
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
# Unreleased version with macOS arm support.
|
||||
set(ISPC_URI https://github.com/ispc/ispc/archive/f5949c055eb9eeb93696978a3da4bfb3a6a30b35.zip)
|
||||
set(ISPC_HASH d382fea18d01dbd0cd05d9e1ede36d7d)
|
||||
else()
|
||||
set(ISPC_VERSION v1.14.1)
|
||||
set(ISPC_URI https://github.com/ispc/ispc/archive/${ISPC_VERSION}.tar.gz)
|
||||
set(ISPC_HASH 968fbc8dfd16a60ba4e32d2e0e03ea7a)
|
||||
endif()
|
||||
set(ISPC_VERSION v1.14.1)
|
||||
set(ISPC_URI https://github.com/ispc/ispc/archive/${ISPC_VERSION}.tar.gz)
|
||||
set(ISPC_HASH 968fbc8dfd16a60ba4e32d2e0e03ea7a)
|
||||
|
||||
set(GMP_VERSION 6.2.0)
|
||||
set(GMP_URI https://gmplib.org/download/gmp/gmp-${GMP_VERSION}.tar.xz)
|
||||
@@ -356,10 +331,3 @@ set(GMP_HASH a325e3f09e6d91e62101e59f9bda3ec1)
|
||||
set(POTRACE_VERSION 1.16)
|
||||
set(POTRACE_URI http://potrace.sourceforge.net/download/${POTRACE_VERSION}/potrace-${POTRACE_VERSION}.tar.gz)
|
||||
set(POTRACE_HASH 5f0bd87ddd9a620b0c4e65652ef93d69)
|
||||
|
||||
set(HARU_VERSION 2_3_0)
|
||||
set(HARU_URI https://github.com/libharu/libharu/archive/RELEASE_${HARU_VERSION}.tar.gz)
|
||||
set(HARU_HASH 4f916aa49c3069b3a10850013c507460)
|
||||
|
||||
set(SSE2NEON_GIT https://github.com/DLTcollab/sse2neon.git)
|
||||
set(SSE2NEON_GIT_HASH fe5ff00bb8d19b327714a3c290f3e2ce81ba3525)
|
||||
|
@@ -53,7 +53,7 @@ if(WIN32)
|
||||
endif()
|
||||
if(BUILD_MODE STREQUAL Debug)
|
||||
ExternalProject_Add_Step(external_xr_openxr_sdk after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/xr_openxr_sdk/lib/openxr_loaderd.lib ${HARVEST_TARGET}/xr_openxr_sdk/lib/openxr_loaderd.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/xr_openxr_sdk/lib/openxr_loader.lib ${HARVEST_TARGET}/xr_openxr_sdk/lib/openxr_loader_d.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
|
@@ -17,18 +17,13 @@
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(YAMLCPP_EXTRA_ARGS
|
||||
-DBUILD_GMOCK=OFF
|
||||
-DYAML_CPP_BUILD_TESTS=OFF
|
||||
-DYAML_CPP_BUILD_TOOLS=OFF
|
||||
-DYAML_CPP_BUILD_CONTRIB=OFF
|
||||
-DYAML_MSVC_SHARED_RT=ON
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
set(YAMLCPP_EXTRA_ARGS
|
||||
${YAMLCPP_EXTRA_ARGS}
|
||||
-DBUILD_GMOCK=OFF
|
||||
-DYAML_MSVC_SHARED_RT=ON)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_yamlcpp
|
||||
URL ${YAMLCPP_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -20,7 +20,6 @@ if(APPLE)
|
||||
endif()
|
||||
|
||||
file(GLOB tbb_src "${CMAKE_CURRENT_SOURCE_DIR}/src/tbb/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/src/old/*.cpp")
|
||||
list(REMOVE_ITEM tbb_src ${CMAKE_CURRENT_SOURCE_DIR}/src/tbb/tbb_bind.cpp)
|
||||
list(APPEND tbb_src ${CMAKE_CURRENT_SOURCE_DIR}/src/rml/client/rml_tbb.cpp)
|
||||
file(GLOB to_remove "${CMAKE_CURRENT_SOURCE_DIR}/src/old/test*.cpp")
|
||||
list(REMOVE_ITEM tbb_src ${to_remove})
|
||||
|
@@ -1,12 +0,0 @@
|
||||
diff --git a/src/hpdf_image_ccitt.c b/src/hpdf_image_ccitt.c
|
||||
index 8672763..9be531a 100644
|
||||
--- a/src/hpdf_image_ccitt.c
|
||||
+++ b/src/hpdf_image_ccitt.c
|
||||
@@ -21,7 +21,6 @@
|
||||
#include <memory.h>
|
||||
#include <assert.h>
|
||||
|
||||
-#define G3CODES
|
||||
#include "t4.h"
|
||||
|
||||
typedef unsigned int uint32;
|
@@ -1,40 +0,0 @@
|
||||
diff -Naur oidn-1.3.0/cmake/FindTBB.cmake external_openimagedenoise/cmake/FindTBB.cmake
|
||||
--- oidn-1.3.0/cmake/FindTBB.cmake 2021-02-04 16:20:26 -0700
|
||||
+++ external_openimagedenoise/cmake/FindTBB.cmake 2021-02-12 09:35:53 -0700
|
||||
@@ -332,20 +332,22 @@
|
||||
${TBB_ROOT}/lib/${TBB_ARCH}/${TBB_VCVER}
|
||||
${TBB_ROOT}/lib
|
||||
)
|
||||
-
|
||||
# On Windows, also search the DLL so that the client may install it.
|
||||
file(GLOB DLL_NAMES
|
||||
${TBB_ROOT}/bin/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/bin/${LIB_NAME}.dll
|
||||
+ ${TBB_ROOT}/lib/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB1}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB2}.dll
|
||||
${TBB_ROOT}/../redist/${TBB_ARCH}/tbb/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/../redist/${TBB_ARCH}_win/tbb/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
)
|
||||
- list(GET DLL_NAMES 0 DLL_NAME)
|
||||
- get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
|
||||
- set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
|
||||
+ if (DLL_NAMES)
|
||||
+ list(GET DLL_NAMES 0 DLL_NAME)
|
||||
+ get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
|
||||
+ set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
|
||||
+ endif()
|
||||
elseif(APPLE)
|
||||
set(LIB_PATHS ${TBB_ROOT}/lib)
|
||||
else()
|
||||
--- external_openimagedenoise/cmake/oidn_ispc.cmake 2021-02-15 17:29:34.000000000 +0100
|
||||
+++ external_openimagedenoise/cmake/oidn_ispc.cmake2 2021-02-15 17:29:28.000000000 +0100
|
||||
@@ -98,7 +98,7 @@
|
||||
elseif(OIDN_ARCH STREQUAL "ARM64")
|
||||
set(ISPC_ARCHITECTURE "aarch64")
|
||||
if(APPLE)
|
||||
- set(ISPC_TARGET_OS "--target-os=ios")
|
||||
+ set(ISPC_TARGET_OS "--target-os=macos")
|
||||
endif()
|
||||
endif()
|
@@ -1,16 +1,33 @@
|
||||
diff --git a/share/cmake/modules/Findpystring.cmake b/share/cmake/modules/Findpystring.cmake
|
||||
index 7b894a45..92618215 100644
|
||||
--- a/share/cmake/modules/Findpystring.cmake
|
||||
+++ b/share/cmake/modules/Findpystring.cmake
|
||||
@@ -113,6 +113,11 @@ if(NOT pystring_FOUND)
|
||||
-DCMAKE_INSTALL_MESSAGE=${CMAKE_INSTALL_MESSAGE}
|
||||
-DCMAKE_INSTALL_PREFIX=${_EXT_DIST_ROOT}
|
||||
-DCMAKE_OBJECT_PATH_MAX=${CMAKE_OBJECT_PATH_MAX}
|
||||
+ -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES}
|
||||
+ -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET}
|
||||
+ -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT}
|
||||
+ -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG}
|
||||
+ -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE}
|
||||
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
|
||||
index 1eb691b..cff9bd8 100644
|
||||
--- a/src/core/CMakeLists.txt
|
||||
+++ b/src/core/CMakeLists.txt
|
||||
@@ -23,8 +23,6 @@ if(WIN32)
|
||||
if("${CMAKE_BUILD_TYPE}" STREQUAL "Release")
|
||||
set(EXTERNAL_COMPILE_FLAGS "${EXTERNAL_COMPILE_FLAGS} /WX")
|
||||
endif()
|
||||
-else()
|
||||
- set(EXTERNAL_COMPILE_FLAGS "${EXTERNAL_COMPILE_FLAGS} -Werror")
|
||||
endif()
|
||||
|
||||
# SHARED
|
||||
--- a/CMakeLists.txt 2018-09-10 22:15:29.000000000 +0200
|
||||
+++ b/CMakeLists.txt 2018-09-10 22:17:40.000000000 +0200
|
||||
@@ -229,7 +229,7 @@
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-whitespace ${TINYXML_PATCHFILE}
|
||||
BINARY_DIR ext/build/tinyxml
|
||||
INSTALL_DIR ext/dist
|
||||
- CMAKE_ARGS ${TINYXML_CMAKE_ARGS}
|
||||
+ CMAKE_ARGS ${TINYXML_CMAKE_ARGS} -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET} -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} -DCMAKE_C_FLAGS_DEBUG=${CMAKE_C_FLAGS_DEBUG} -DCMAKE_C_FLAGS_RELEASE=${CMAKE_C_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
|
||||
)
|
||||
if(CMAKE_TOOLCHAIN_FILE)
|
||||
set(pystring_CMAKE_ARGS
|
||||
if(WIN32)
|
||||
set(TINYXML_STATIC_LIBRARIES ${PROJECT_BINARY_DIR}/ext/dist/lib/tinyxml.lib)
|
||||
@@ -343,7 +343,7 @@
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-whitespace ${YAML_CPP_PATCHFILE}
|
||||
BINARY_DIR ext/build/yaml-cpp
|
||||
INSTALL_DIR ext/dist
|
||||
- CMAKE_ARGS ${YAML_CPP_CMAKE_ARGS}
|
||||
+ CMAKE_ARGS ${YAML_CPP_CMAKE_ARGS} -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET} -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} -DCMAKE_C_FLAGS_DEBUG=${CMAKE_C_FLAGS_DEBUG} -DCMAKE_C_FLAGS_RELEASE=${CMAKE_C_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
|
||||
)
|
||||
set(YAML_CPP_INCLUDE_DIRS ${PROJECT_BINARY_DIR}/ext/dist/include)
|
||||
set(YAML_CPP_LIBRARY_DIRS ${PROJECT_BINARY_DIR}/ext/dist/lib)
|
||||
|
51
build_files/build_environment/patches/opencolorio_win.diff
Normal file
51
build_files/build_environment/patches/opencolorio_win.diff
Normal file
@@ -0,0 +1,51 @@
|
||||
diff -Naur external_opencolorio/CMakeLists.txt external_opencolorio.patched/CMakeLists.txt
|
||||
--- external_opencolorio/CMakeLists.txt 2018-01-04 18:38:27 -0700
|
||||
+++ external_opencolorio.patched/CMakeLists.txt 2018-08-15 11:46:53 -0600
|
||||
@@ -251,25 +251,30 @@
|
||||
if(USE_EXTERNAL_YAML)
|
||||
# Set minimum yaml version for non-patched sources.
|
||||
set(YAML_VERSION_MIN "0.3.0")
|
||||
- include(FindPkgConfig)
|
||||
- pkg_check_modules(PC_YAML_CPP REQUIRED QUIET yaml-cpp)
|
||||
- find_path(YAML_CPP_INCLUDE_DIR yaml-cpp/yaml.h
|
||||
- HINTS ${PC_YAML_CPP_INCLUDEDIR} ${PC_YAML_CPP_INCLUDE_DIRS} )
|
||||
- find_library(YAML_CPP_LIBRARY LIBRARY_NAMES yaml-cpp libyaml-cpp
|
||||
- HINTS ${PC_YAML_CPP_LIBRARY_DIRS} )
|
||||
- set(YAML_CPP_LIBRARIES ${YAML_CPP_LIBRARY})
|
||||
- set(YAML_CPP_INCLUDE_DIRS ${YAML_CPP_INCLUDE_DIR})
|
||||
- set(YAML_CPP_VERSION ${PC_YAML_CPP_VERSION})
|
||||
+ if(NOT WIN32)
|
||||
+ include(FindPkgConfig)
|
||||
+ pkg_check_modules(PC_YAML_CPP REQUIRED QUIET yaml-cpp)
|
||||
+ find_path(YAML_CPP_INCLUDE_DIR yaml-cpp/yaml.h
|
||||
+ HINTS ${PC_YAML_CPP_INCLUDEDIR} ${PC_YAML_CPP_INCLUDE_DIRS} )
|
||||
+ find_library(YAML_CPP_LIBRARY LIBRARY_NAMES yaml-cpp libyaml-cpp
|
||||
+ HINTS ${PC_YAML_CPP_LIBRARY_DIRS} )
|
||||
+ set(YAML_CPP_LIBRARIES ${YAML_CPP_LIBRARY})
|
||||
+ set(YAML_CPP_INCLUDE_DIRS ${YAML_CPP_INCLUDE_DIR})
|
||||
+ set(YAML_CPP_VERSION ${PC_YAML_CPP_VERSION})
|
||||
|
||||
- if(YAML_CPP_VERSION VERSION_LESS ${YAML_VERSION_MIN})
|
||||
- message(FATAL_ERROR "ERROR: yaml-cpp ${YAML_VERSION_MIN} or greater is required.")
|
||||
- endif()
|
||||
-
|
||||
- find_package_handle_standard_args(yaml-cpp
|
||||
- REQUIRED_VARS YAML_CPP_LIBRARIES YAML_CPP_INCLUDE_DIRS )
|
||||
- set(YAML_CPP_FOUND ${YAML-CPP_FOUND})
|
||||
- mark_as_advanced(YAML_CPP_INCLUDE_DIR YAML_CPP_LIBRARY YAML-CPP_FOUND)
|
||||
+ if(YAML_CPP_VERSION VERSION_LESS ${YAML_VERSION_MIN})
|
||||
+ message(FATAL_ERROR "ERROR: yaml-cpp ${YAML_VERSION_MIN} or greater is required.")
|
||||
+ endif()
|
||||
|
||||
+ find_package_handle_standard_args(yaml-cpp
|
||||
+ REQUIRED_VARS YAML_CPP_LIBRARIES YAML_CPP_INCLUDE_DIRS )
|
||||
+ set(YAML_CPP_FOUND ${YAML-CPP_FOUND})
|
||||
+ mark_as_advanced(YAML_CPP_INCLUDE_DIR YAML_CPP_LIBRARY YAML-CPP_FOUND)
|
||||
+ else()
|
||||
+ set(EXTERNAL_INCLUDE_DIRS ${EXTERNAL_INCLUDE_DIRS} ${INC_1})
|
||||
+ set(EXTERNAL_INCLUDE_DIRS ${EXTERNAL_INCLUDE_DIRS} ${INC_2})
|
||||
+ message("INCLUDE DIRS = i:${EXTERNAL_INCLUDE_DIRS} |1:${INC_1} |2:${INC_2}")
|
||||
+ endif()
|
||||
if(YAML_CPP_FOUND)
|
||||
if(YAML_CPP_VERSION VERSION_GREATER "0.5.0")
|
||||
# Need to also get the boost headers here, as yaml-cpp 0.5.0+ requires them.
|
@@ -1,10 +1,10 @@
|
||||
diff -Naur openvdb-8.0.0/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
|
||||
--- openvdb-8.0.0/cmake/FindIlmBase.cmake 2020-12-24 10:13:14 -0700
|
||||
+++ openvdb/cmake/FindIlmBase.cmake 2021-02-05 12:07:49 -0700
|
||||
@@ -217,6 +217,12 @@
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib")
|
||||
endif()
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${_IlmBase_Version_Suffix}.lib")
|
||||
diff -Naur orig/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
|
||||
--- orig/cmake/FindIlmBase.cmake 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/cmake/FindIlmBase.cmake 2020-08-12 12:48:44 -0600
|
||||
@@ -225,6 +225,12 @@
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
"-${IlmBase_VERSION_MAJOR}_${IlmBase_VERSION_MINOR}.lib"
|
||||
)
|
||||
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
+ "_s.lib"
|
||||
+ )
|
||||
@@ -13,14 +13,14 @@ diff -Naur openvdb-8.0.0/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
|
||||
+ )
|
||||
else()
|
||||
if(ILMBASE_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
||||
diff -Naur openvdb-8.0.0/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
|
||||
--- openvdb-8.0.0/cmake/FindOpenEXR.cmake 2020-12-24 10:13:14 -0700
|
||||
+++ openvdb/cmake/FindOpenEXR.cmake 2021-02-05 12:23:39 -0700
|
||||
@@ -210,6 +210,12 @@
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib")
|
||||
endif()
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${_OpenEXR_Version_Suffix}.lib")
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
diff -Naur orig/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
|
||||
--- orig/cmake/FindOpenEXR.cmake 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/cmake/FindOpenEXR.cmake 2020-08-12 12:48:44 -0600
|
||||
@@ -218,6 +218,12 @@
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
"-${OpenEXR_VERSION_MAJOR}_${OpenEXR_VERSION_MINOR}.lib"
|
||||
)
|
||||
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
+ "_s.lib"
|
||||
+ )
|
||||
@@ -29,11 +29,11 @@ diff -Naur openvdb-8.0.0/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
|
||||
+ )
|
||||
else()
|
||||
if(OPENEXR_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
||||
diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/CMakeLists.txt
|
||||
--- openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt 2020-12-24 10:13:14 -0700
|
||||
+++ openvdb/openvdb/openvdb/CMakeLists.txt 2021-02-05 11:18:33 -0700
|
||||
@@ -107,7 +107,9 @@
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
diff -Naur orig/openvdb/CMakeLists.txt openvdb/openvdb/CMakeLists.txt
|
||||
--- orig/openvdb/CMakeLists.txt 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/openvdb/CMakeLists.txt 2020-08-12 14:12:26 -0600
|
||||
@@ -105,7 +105,9 @@
|
||||
# http://boost.2283326.n4.nabble.com/CMake-config-scripts-broken-in-1-70-td4708957.html
|
||||
# https://github.com/boostorg/boost_install/commit/160c7cb2b2c720e74463865ef0454d4c4cd9ae7c
|
||||
set(BUILD_SHARED_LIBS ON)
|
||||
@@ -44,15 +44,15 @@ diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/
|
||||
endif()
|
||||
|
||||
find_package(Boost ${MINIMUM_BOOST_VERSION} REQUIRED COMPONENTS iostreams system)
|
||||
@@ -146,6 +148,7 @@
|
||||
Boost::disable_autolinking # add -DBOOST_ALL_NO_LIB
|
||||
)
|
||||
@@ -193,6 +195,7 @@
|
||||
if(OPENVDB_DISABLE_BOOST_IMPLICIT_LINKING)
|
||||
add_definitions(-DBOOST_ALL_NO_LIB)
|
||||
endif()
|
||||
+ add_definitions(-D__TBB_NO_IMPLICIT_LINKAGE -DOPENVDB_OPENEXR_STATICLIB)
|
||||
endif()
|
||||
|
||||
if(USE_EXR)
|
||||
@@ -379,7 +382,12 @@
|
||||
# @todo Should be target definitions
|
||||
@@ -383,7 +386,12 @@
|
||||
# imported targets.
|
||||
|
||||
if(OPENVDB_CORE_SHARED)
|
||||
@@ -66,9 +66,9 @@ diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/
|
||||
endif()
|
||||
|
||||
if(OPENVDB_CORE_STATIC)
|
||||
diff -Naur openvdb-8.0.0/openvdb/openvdb/version.rc.in openvdb/openvdb/openvdb/version.rc.in
|
||||
--- openvdb-8.0.0/openvdb/openvdb/version.rc.in 1969-12-31 17:00:00 -0700
|
||||
+++ openvdb/openvdb/openvdb/version.rc.in 2021-02-05 11:18:33 -0700
|
||||
diff -Naur orig/openvdb/version.rc.in openvdb/openvdb/version.rc.in
|
||||
--- orig/openvdb/version.rc.in 1969-12-31 17:00:00 -0700
|
||||
+++ openvdb/openvdb/version.rc.in 2020-08-12 14:15:01 -0600
|
||||
@@ -0,0 +1,48 @@
|
||||
+#include <winver.h>
|
||||
+
|
||||
|
@@ -2,23 +2,23 @@ diff --git a/setup.py.orig b/setup.py
|
||||
index a97a755..07ce853 100644
|
||||
--- a/setup.py.orig
|
||||
+++ b/setup.py
|
||||
@@ -1603,13 +1603,13 @@
|
||||
@@ -1422,13 +1422,13 @@ class PyBuildExt(build_ext):
|
||||
version = line.split()[2]
|
||||
break
|
||||
if version >= version_req:
|
||||
- if (self.compiler.find_library_file(self.lib_dirs, 'z')):
|
||||
+ if (self.compiler.find_library_file(self.lib_dirs, 'z_pic')):
|
||||
if MACOS:
|
||||
- if (self.compiler.find_library_file(lib_dirs, 'z')):
|
||||
+ if (self.compiler.find_library_file(lib_dirs, 'z_pic')):
|
||||
if host_platform == "darwin":
|
||||
zlib_extra_link_args = ('-Wl,-search_paths_first',)
|
||||
else:
|
||||
zlib_extra_link_args = ()
|
||||
self.add(Extension('zlib', ['zlibmodule.c'],
|
||||
- libraries=['z'],
|
||||
+ libraries=['z_pic'],
|
||||
extra_link_args=zlib_extra_link_args))
|
||||
exts.append( Extension('zlib', ['zlibmodule.c'],
|
||||
- libraries = ['z'],
|
||||
+ libraries = ['z_pic'],
|
||||
extra_link_args = zlib_extra_link_args))
|
||||
have_zlib = True
|
||||
else:
|
||||
@@ -1623,7 +1623,7 @@
|
||||
@@ -1442,7 +1442,7 @@ class PyBuildExt(build_ext):
|
||||
# crc32 if we have it. Otherwise binascii uses its own.
|
||||
if have_zlib:
|
||||
extra_compile_args = ['-DUSE_ZLIB_CRC32']
|
||||
@@ -27,12 +27,12 @@ index a97a755..07ce853 100644
|
||||
extra_link_args = zlib_extra_link_args
|
||||
else:
|
||||
extra_compile_args = []
|
||||
@@ -2168,7 +2168,7 @@
|
||||
ffi_inc = None
|
||||
@@ -1991,7 +1991,7 @@ class PyBuildExt(build_ext):
|
||||
print('Header file {} does not exist'.format(ffi_h))
|
||||
if ffi_lib is None and ffi_inc:
|
||||
ffi_lib = None
|
||||
if ffi_inc is not None:
|
||||
- for lib_name in ('ffi', 'ffi_pic'):
|
||||
+ for lib_name in ('ffi_pic', ):
|
||||
if (self.compiler.find_library_file(self.lib_dirs, lib_name)):
|
||||
if (self.compiler.find_library_file(lib_dirs, lib_name)):
|
||||
ffi_lib = lib_name
|
||||
break
|
289
build_files/build_environment/patches/python_macos.diff
Normal file
289
build_files/build_environment/patches/python_macos.diff
Normal file
@@ -0,0 +1,289 @@
|
||||
diff -ru a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst
|
||||
--- a/Doc/library/ctypes.rst 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Doc/library/ctypes.rst 2020-07-14 08:10:10.000000000 +0200
|
||||
@@ -1551,6 +1551,13 @@
|
||||
value usable as argument (integer, string, ctypes instance). This allows
|
||||
defining adapters that can adapt custom objects as function parameters.
|
||||
|
||||
+ .. attribute:: variadic
|
||||
+
|
||||
+ Assign a boolean to specify that the function takes a variable number of
|
||||
+ arguments. This does not matter on most platforms, but for Apple arm64
|
||||
+ platforms variadic functions have a different calling convention than
|
||||
+ normal functions.
|
||||
+
|
||||
.. attribute:: errcheck
|
||||
|
||||
Assign a Python function or another callable to this attribute. The
|
||||
diff -ru a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
|
||||
--- a/Modules/_ctypes/_ctypes.c 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Modules/_ctypes/_ctypes.c 2020-07-14 08:14:41.000000000 +0200
|
||||
@@ -3175,6 +3175,35 @@
|
||||
}
|
||||
|
||||
static int
|
||||
+PyCFuncPtr_set_variadic(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ignored))
|
||||
+{
|
||||
+ StgDictObject *dict = PyObject_stgdict((PyObject *)self);
|
||||
+ assert(dict);
|
||||
+ int r = PyObject_IsTrue(ob);
|
||||
+ if (r == 1) {
|
||||
+ dict->flags |= FUNCFLAG_VARIADIC;
|
||||
+ return 0;
|
||||
+ } else if (r == 0) {
|
||||
+ dict->flags &= ~FUNCFLAG_VARIADIC;
|
||||
+ return 0;
|
||||
+ } else {
|
||||
+ return -1;
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+static PyObject *
|
||||
+PyCFuncPtr_get_variadic(PyCFuncPtrObject *self, void *Py_UNUSED(ignored))
|
||||
+{
|
||||
+ StgDictObject *dict = PyObject_stgdict((PyObject *)self);
|
||||
+ assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */
|
||||
+ if (dict->flags & FUNCFLAG_VARIADIC)
|
||||
+ Py_RETURN_TRUE;
|
||||
+ else
|
||||
+ Py_RETURN_FALSE;
|
||||
+}
|
||||
+
|
||||
+
|
||||
+static int
|
||||
PyCFuncPtr_set_argtypes(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ignored))
|
||||
{
|
||||
PyObject *converters;
|
||||
@@ -5632,6 +5661,7 @@
|
||||
PyModule_AddObject(m, "FUNCFLAG_USE_ERRNO", PyLong_FromLong(FUNCFLAG_USE_ERRNO));
|
||||
PyModule_AddObject(m, "FUNCFLAG_USE_LASTERROR", PyLong_FromLong(FUNCFLAG_USE_LASTERROR));
|
||||
PyModule_AddObject(m, "FUNCFLAG_PYTHONAPI", PyLong_FromLong(FUNCFLAG_PYTHONAPI));
|
||||
+ PyModule_AddObject(m, "FUNCFLAG_VARIADIC", PyLong_FromLong(FUNCFLAG_VARIADIC));
|
||||
PyModule_AddStringConstant(m, "__version__", "1.1.0");
|
||||
|
||||
PyModule_AddObject(m, "_memmove_addr", PyLong_FromVoidPtr(memmove));
|
||||
diff -ru a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
|
||||
--- a/Modules/_ctypes/callproc.c 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Modules/_ctypes/callproc.c 2020-07-14 08:18:33.000000000 +0200
|
||||
@@ -767,7 +767,8 @@
|
||||
ffi_type **atypes,
|
||||
ffi_type *restype,
|
||||
void *resmem,
|
||||
- int argcount)
|
||||
+ int argcount,
|
||||
+ int argtypecount)
|
||||
{
|
||||
PyThreadState *_save = NULL; /* For Py_BLOCK_THREADS and Py_UNBLOCK_THREADS */
|
||||
PyObject *error_object = NULL;
|
||||
@@ -793,15 +794,38 @@
|
||||
if ((flags & FUNCFLAG_CDECL) == 0)
|
||||
cc = FFI_STDCALL;
|
||||
#endif
|
||||
- if (FFI_OK != ffi_prep_cif(&cif,
|
||||
- cc,
|
||||
- argcount,
|
||||
- restype,
|
||||
- atypes)) {
|
||||
- PyErr_SetString(PyExc_RuntimeError,
|
||||
- "ffi_prep_cif failed");
|
||||
- return -1;
|
||||
- }
|
||||
+#if HAVE_FFI_PREP_CIF_VAR
|
||||
+ /* Everyone SHOULD set f.variadic=True on variadic function pointers, but
|
||||
+ * lots of existing code will not. If there's at least one arg and more
|
||||
+ * args are passed than are defined in the prototype, then it must be a
|
||||
+ * variadic function. */
|
||||
+ if ((flags & FUNCFLAG_VARIADIC) ||
|
||||
+ (argtypecount != 0 && argcount > argtypecount))
|
||||
+ {
|
||||
+ if (FFI_OK != ffi_prep_cif_var(&cif,
|
||||
+ cc,
|
||||
+ argtypecount,
|
||||
+ argcount,
|
||||
+ restype,
|
||||
+ atypes)) {
|
||||
+ PyErr_SetString(PyExc_RuntimeError,
|
||||
+ "ffi_prep_cif_var failed");
|
||||
+ return -1;
|
||||
+ }
|
||||
+ } else {
|
||||
+#endif
|
||||
+ if (FFI_OK != ffi_prep_cif(&cif,
|
||||
+ cc,
|
||||
+ argcount,
|
||||
+ restype,
|
||||
+ atypes)) {
|
||||
+ PyErr_SetString(PyExc_RuntimeError,
|
||||
+ "ffi_prep_cif failed");
|
||||
+ return -1;
|
||||
+ }
|
||||
+#if HAVE_FFI_PREP_CIF_VAR
|
||||
+ }
|
||||
+#endif
|
||||
|
||||
if (flags & (FUNCFLAG_USE_ERRNO | FUNCFLAG_USE_LASTERROR)) {
|
||||
error_object = _ctypes_get_errobj(&space);
|
||||
@@ -1185,9 +1209,8 @@
|
||||
|
||||
if (-1 == _call_function_pointer(flags, pProc, avalues, atypes,
|
||||
rtype, resbuf,
|
||||
- Py_SAFE_DOWNCAST(argcount,
|
||||
- Py_ssize_t,
|
||||
- int)))
|
||||
+ Py_SAFE_DOWNCAST(argcount, Py_ssize_t, int),
|
||||
+ Py_SAFE_DOWNCAST(argtype_count, Py_ssize_t, int)))
|
||||
goto cleanup;
|
||||
|
||||
#ifdef WORDS_BIGENDIAN
|
||||
diff -ru a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h
|
||||
--- a/Modules/_ctypes/ctypes.h 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Modules/_ctypes/ctypes.h 2020-07-14 08:30:53.000000000 +0200
|
||||
@@ -285,6 +285,7 @@
|
||||
#define FUNCFLAG_PYTHONAPI 0x4
|
||||
#define FUNCFLAG_USE_ERRNO 0x8
|
||||
#define FUNCFLAG_USE_LASTERROR 0x10
|
||||
+#define FUNCFLAG_VARIADIC 0x20
|
||||
|
||||
#define TYPEFLAG_ISPOINTER 0x100
|
||||
#define TYPEFLAG_HASPOINTER 0x200
|
||||
diff -ru a/configure b/configure
|
||||
--- a/configure 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/configure 2020-07-14 08:03:27.000000000 +0200
|
||||
@@ -3374,7 +3374,7 @@
|
||||
# has no effect, don't bother defining them
|
||||
Darwin/[6789].*)
|
||||
define_xopen_source=no;;
|
||||
- Darwin/1[0-9].*)
|
||||
+ Darwin/[12][0-9].*)
|
||||
define_xopen_source=no;;
|
||||
# On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but
|
||||
# used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined
|
||||
@@ -9251,6 +9251,9 @@
|
||||
ppc)
|
||||
MACOSX_DEFAULT_ARCH="ppc64"
|
||||
;;
|
||||
+ arm64)
|
||||
+ MACOSX_DEFAULT_ARCH="arm64"
|
||||
+ ;;
|
||||
*)
|
||||
as_fn_error $? "Unexpected output of 'arch' on OSX" "$LINENO" 5
|
||||
;;
|
||||
diff -ru a/configure.ac b/configure.ac
|
||||
--- a/configure.ac 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/configure.ac 2020-07-14 08:03:27.000000000 +0200
|
||||
@@ -2456,6 +2456,9 @@
|
||||
ppc)
|
||||
MACOSX_DEFAULT_ARCH="ppc64"
|
||||
;;
|
||||
+ arm64)
|
||||
+ MACOSX_DEFAULT_ARCH="arm64"
|
||||
+ ;;
|
||||
*)
|
||||
AC_MSG_ERROR([Unexpected output of 'arch' on OSX])
|
||||
;;
|
||||
diff -ru a/setup.py b/setup.py
|
||||
--- a/setup.py 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/setup.py 2020-07-14 08:28:12.000000000 +0200
|
||||
@@ -141,6 +141,13 @@
|
||||
os.unlink(tmpfile)
|
||||
|
||||
return MACOS_SDK_ROOT
|
||||
+
|
||||
+def is_macosx_at_least(vers):
|
||||
+ if host_platform == 'darwin':
|
||||
+ dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
|
||||
+ if dep_target:
|
||||
+ return tuple(map(int, dep_target.split('.'))) >= vers
|
||||
+ return False
|
||||
|
||||
def is_macosx_sdk_path(path):
|
||||
"""
|
||||
@@ -150,6 +157,13 @@
|
||||
or path.startswith('/System/')
|
||||
or path.startswith('/Library/') )
|
||||
|
||||
+def grep_headers_for(function, headers):
|
||||
+ for header in headers:
|
||||
+ with open(header, 'r') as f:
|
||||
+ if function in f.read():
|
||||
+ return True
|
||||
+ return False
|
||||
+
|
||||
def find_file(filename, std_dirs, paths):
|
||||
"""Searches for the directory where a given file is located,
|
||||
and returns a possibly-empty list of additional directories, or None
|
||||
@@ -1972,7 +1986,11 @@
|
||||
return True
|
||||
|
||||
def detect_ctypes(self, inc_dirs, lib_dirs):
|
||||
- self.use_system_libffi = False
|
||||
+ if not sysconfig.get_config_var("LIBFFI_INCLUDEDIR") and is_macosx_at_least((10,15)):
|
||||
+ self.use_system_libffi = True
|
||||
+ else:
|
||||
+ self.use_system_libffi = '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS")
|
||||
+
|
||||
include_dirs = []
|
||||
extra_compile_args = []
|
||||
extra_link_args = []
|
||||
@@ -2016,32 +2034,48 @@
|
||||
ext_test = Extension('_ctypes_test',
|
||||
sources=['_ctypes/_ctypes_test.c'],
|
||||
libraries=['m'])
|
||||
+ ffi_inc = sysconfig.get_config_var("LIBFFI_INCLUDEDIR")
|
||||
+ ffi_lib = None
|
||||
+
|
||||
self.extensions.extend([ext, ext_test])
|
||||
|
||||
if host_platform == 'darwin':
|
||||
- if '--with-system-ffi' not in sysconfig.get_config_var("CONFIG_ARGS"):
|
||||
+ if not self.use_system_libffi:
|
||||
return
|
||||
- # OS X 10.5 comes with libffi.dylib; the include files are
|
||||
- # in /usr/include/ffi
|
||||
- inc_dirs.append('/usr/include/ffi')
|
||||
-
|
||||
- ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
|
||||
- if not ffi_inc or ffi_inc[0] == '':
|
||||
- ffi_inc = find_file('ffi.h', [], inc_dirs)
|
||||
- if ffi_inc is not None:
|
||||
- ffi_h = ffi_inc[0] + '/ffi.h'
|
||||
+ ffi_in_sdk = os.path.join(macosx_sdk_root(), "usr/include/ffi")
|
||||
+ if os.path.exists(ffi_in_sdk):
|
||||
+ ffi_inc = ffi_in_sdk
|
||||
+ ffi_lib = 'ffi'
|
||||
+ else:
|
||||
+ # OS X 10.5 comes with libffi.dylib; the include files are
|
||||
+ # in /usr/include/ffi
|
||||
+ ffi_inc_dirs.append('/usr/include/ffi')
|
||||
+
|
||||
+ if not ffi_inc:
|
||||
+ found = find_file('ffi.h', [], ffi_inc_dirs)
|
||||
+ if found:
|
||||
+ ffi_inc = found[0]
|
||||
+ if ffi_inc:
|
||||
+ ffi_h = ffi_inc + '/ffi.h'
|
||||
if not os.path.exists(ffi_h):
|
||||
ffi_inc = None
|
||||
print('Header file {} does not exist'.format(ffi_h))
|
||||
- ffi_lib = None
|
||||
- if ffi_inc is not None:
|
||||
+ if ffi_lib is None and ffi_inc:
|
||||
for lib_name in ('ffi', 'ffi_pic'):
|
||||
if (self.compiler.find_library_file(lib_dirs, lib_name)):
|
||||
ffi_lib = lib_name
|
||||
break
|
||||
|
||||
if ffi_inc and ffi_lib:
|
||||
- ext.include_dirs.extend(ffi_inc)
|
||||
+ ffi_headers = glob(os.path.join(ffi_inc, '*.h'))
|
||||
+ if grep_headers_for('ffi_closure_alloc', ffi_headers):
|
||||
+ try:
|
||||
+ sources.remove('_ctypes/malloc_closure.c')
|
||||
+ except ValueError:
|
||||
+ pass
|
||||
+ if grep_headers_for('ffi_prep_cif_var', ffi_headers):
|
||||
+ ext.extra_compile_args.append("-DHAVE_FFI_PREP_CIF_VAR=1")
|
||||
+ ext.include_dirs.append(ffi_inc)
|
||||
ext.libraries.append(ffi_lib)
|
||||
self.use_system_libffi = True
|
||||
|
@@ -1,13 +0,0 @@
|
||||
diff --git a/include/tbb/tbb_config.h b/include/tbb/tbb_config.h
|
||||
index 7a8d06a0..886699d8 100644
|
||||
--- a/include/tbb/tbb_config.h
|
||||
+++ b/include/tbb/tbb_config.h
|
||||
@@ -620,7 +620,7 @@ There are four cases that are supported:
|
||||
// instantiation site, which is too late for suppression of the corresponding messages for internal
|
||||
// stuff.
|
||||
#if !defined(__INTEL_COMPILER) && (!defined(TBB_SUPPRESS_DEPRECATED_MESSAGES) || (TBB_SUPPRESS_DEPRECATED_MESSAGES == 0))
|
||||
- #if (__cplusplus >= 201402L)
|
||||
+ #if (__cplusplus >= 201402L && (!defined(_MSC_VER) || _MSC_VER >= 1920))
|
||||
#define __TBB_DEPRECATED [[deprecated]]
|
||||
#define __TBB_DEPRECATED_MSG(msg) [[deprecated(msg)]]
|
||||
#elif _MSC_VER
|
@@ -26,30 +26,36 @@ diff -Naur external_usd_base/cmake/macros/Public.cmake external_usd/cmake/macros
|
||||
endforeach()
|
||||
foreach(lib ${PXR_OBJECT_LIBS})
|
||||
set(objects "${objects};\$<TARGET_OBJECTS:${lib}>")
|
||||
diff -ru USD-20.11/pxr/base/tf/pxrLZ4/lz4.cpp external_usd/pxr/base/tf/pxrLZ4/lz4.cpp
|
||||
--- USD-20.11/pxr/base/tf/pxrLZ4/lz4.cpp 2020-10-14 19:25:19.000000000 +0100
|
||||
+++ external_usd/pxr/base/tf/pxrLZ4/lz4.cpp 2021-02-09 09:28:51.496190085 +0000
|
||||
@@ -614,6 +614,15 @@
|
||||
/*-************************************
|
||||
* Internal Definitions used in Tests
|
||||
**************************************/
|
||||
+
|
||||
+/*******************************************************************
|
||||
+ * Disabled in Blender. The BLOSC library also exposes these
|
||||
+ * functions, and this causes 'duplicate symbol' linker errors.
|
||||
+ *
|
||||
+ * This problem has been reported upstream at
|
||||
+ * https://github.com/PixarAnimationStudios/USD/issues/1447
|
||||
+ *
|
||||
+ *******************************************************************
|
||||
#if defined (__cplusplus)
|
||||
extern "C" {
|
||||
#endif
|
||||
@@ -627,6 +636,7 @@
|
||||
#if defined (__cplusplus)
|
||||
}
|
||||
#endif
|
||||
+********************************************************************/
|
||||
|
||||
diff --git a/pxr/base/arch/align.h b/pxr/base/arch/align.h
|
||||
index f3cabf4..ebc8a69 100644
|
||||
--- a/pxr/base/arch/align.h
|
||||
+++ b/pxr/base/arch/align.h
|
||||
@@ -77,7 +77,11 @@ ArchAlignMemory(void *base)
|
||||
/// The size of a CPU cache line on the current processor architecture in bytes.
|
||||
///
|
||||
/// \hideinitializer
|
||||
+#if defined(ARCH_OS_DARWIN) && defined(ARCH_CPU_ARM)
|
||||
+#define ARCH_CACHE_LINE_SIZE 128
|
||||
+#else
|
||||
#define ARCH_CACHE_LINE_SIZE 64
|
||||
+#endif
|
||||
|
||||
/*-******************************
|
||||
* Compression functions
|
||||
///@}
|
||||
|
||||
diff --git a/pxr/base/arch/math.h b/pxr/base/arch/math.h
|
||||
index 3e66c37..64a052c 100644
|
||||
--- a/pxr/base/arch/math.h
|
||||
+++ b/pxr/base/arch/math.h
|
||||
@@ -42,7 +42,7 @@ PXR_NAMESPACE_OPEN_SCOPE
|
||||
/// \addtogroup group_arch_Math
|
||||
///@{
|
||||
|
||||
-#if defined (ARCH_CPU_INTEL) || defined(doxygen)
|
||||
+#if defined (ARCH_CPU_INTEL) || defined(ARCH_CPU_ARM) || defined(doxygen)
|
||||
|
||||
/// This is the smallest value e such that 1+e^2 == 1, using floats.
|
||||
/// True for all IEEE754 chipsets.
|
||||
|
||||
|
||||
|
||||
|
@@ -88,6 +88,7 @@ class VersionInfo:
|
||||
self.short_version = "%d.%02d" % (version_numbers[0], version_numbers[1])
|
||||
self.version = "%d.%02d.%d" % version_numbers
|
||||
self.version_cycle = self._parse_header_file(blender_h, 'BLENDER_VERSION_CYCLE')
|
||||
self.version_cycle_number = self._parse_header_file(blender_h, 'BLENDER_VERSION_CYCLE_NUMBER')
|
||||
self.hash = self._parse_header_file(buildinfo_h, 'BUILD_HASH')[1:-1]
|
||||
|
||||
if self.version_cycle == "release":
|
||||
@@ -96,7 +97,8 @@ class VersionInfo:
|
||||
self.is_development_build = False
|
||||
elif self.version_cycle == "rc":
|
||||
# Release candidate
|
||||
self.full_version = self.version + self.version_cycle
|
||||
version_cycle = self.version_cycle + self.version_cycle_number
|
||||
self.full_version = self.version + version_cycle
|
||||
self.is_development_build = False
|
||||
else:
|
||||
# Development build
|
||||
|
@@ -42,7 +42,7 @@ def get_cmake_options(builder):
|
||||
elif builder.platform == 'linux':
|
||||
config_file = "build_files/buildbot/config/blender_linux.cmake"
|
||||
|
||||
optix_sdk_dir = os.path.join(builder.blender_dir, '..', '..', 'NVIDIA-Optix-SDK-7.1')
|
||||
optix_sdk_dir = os.path.join(builder.blender_dir, '..', '..', 'NVIDIA-Optix-SDK')
|
||||
options.append('-DOPTIX_ROOT_DIR:PATH=' + optix_sdk_dir)
|
||||
|
||||
# Workaround to build sm_30 kernels with CUDA 10, since CUDA 11 no longer supports that architecture
|
||||
|
@@ -19,7 +19,7 @@
|
||||
#=============================================================================
|
||||
|
||||
IF(NOT AUDASPACE_ROOT_DIR AND NOT $ENV{AUDASPACE_ROOT_DIR} STREQUAL "")
|
||||
SET(AUDASPACE_ROOT_DIR $ENV{AUDASPACE_ROOT_DIR})
|
||||
SET(AUDASPACE_ROOT_DIR $ENV{AUDASPACE_ROOT_DIR})
|
||||
ENDIF()
|
||||
|
||||
SET(_audaspace_SEARCH_DIRS
|
||||
|
@@ -34,8 +34,6 @@ set(_clang_tidy_SEARCH_DIRS
|
||||
# TODO(sergey): Find more reliable way of finding the latest clang-tidy.
|
||||
find_program(CLANG_TIDY_EXECUTABLE
|
||||
NAMES
|
||||
clang-tidy-12
|
||||
clang-tidy-11
|
||||
clang-tidy-10
|
||||
clang-tidy-9
|
||||
clang-tidy-8
|
||||
|
@@ -34,17 +34,12 @@ FIND_PATH(EMBREE_INCLUDE_DIR
|
||||
include
|
||||
)
|
||||
|
||||
IF(NOT (APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")))
|
||||
SET(_embree_SIMD_COMPONENTS
|
||||
embree_sse42
|
||||
embree_avx
|
||||
embree_avx2
|
||||
)
|
||||
ENDIF()
|
||||
|
||||
SET(_embree_FIND_COMPONENTS
|
||||
embree3
|
||||
${_embree_SIMD_COMPONENTS}
|
||||
embree_sse42
|
||||
embree_avx
|
||||
embree_avx2
|
||||
lexers
|
||||
math
|
||||
simd
|
||||
@@ -64,14 +59,14 @@ FOREACH(COMPONENT ${_embree_FIND_COMPONENTS})
|
||||
PATH_SUFFIXES
|
||||
lib64 lib
|
||||
)
|
||||
IF(NOT EMBREE_${UPPERCOMPONENT}_LIBRARY)
|
||||
IF(EMBREE_EMBREE3_LIBRARY)
|
||||
IF (NOT EMBREE_${UPPERCOMPONENT}_LIBRARY)
|
||||
IF (EMBREE_EMBREE3_LIBRARY)
|
||||
# If we can't find all the static libraries, try to fall back to the shared library if found.
|
||||
# This allows building with a shared embree library
|
||||
SET(_embree_LIBRARIES ${EMBREE_EMBREE3_LIBRARY})
|
||||
BREAK()
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
ENDIF ()
|
||||
ENDIF ()
|
||||
LIST(APPEND _embree_LIBRARIES "${EMBREE_${UPPERCOMPONENT}_LIBRARY}")
|
||||
ENDFOREACH()
|
||||
|
||||
|
@@ -1,66 +0,0 @@
|
||||
# - Find HARU library
|
||||
# Find the native Haru includes and library
|
||||
# This module defines
|
||||
# HARU_INCLUDE_DIRS, where to find hpdf.h, set when
|
||||
# HARU_INCLUDE_DIR is found.
|
||||
# HARU_LIBRARIES, libraries to link against to use Haru.
|
||||
# HARU_ROOT_DIR, The base directory to search for Haru.
|
||||
# This can also be an environment variable.
|
||||
# HARU_FOUND, If false, do not try to use Haru.
|
||||
#
|
||||
# also defined, but not for general use are
|
||||
# HARU_LIBRARY, where to find the Haru library.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2021 Blender Foundation.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD 3-Clause License,
|
||||
# see accompanying file BSD-3-Clause-license.txt for details.
|
||||
#=============================================================================
|
||||
|
||||
# If HARU_ROOT_DIR was defined in the environment, use it.
|
||||
if(NOT HARU_ROOT_DIR AND NOT $ENV{HARU_ROOT_DIR} STREQUAL "")
|
||||
set(HARU_ROOT_DIR $ENV{HARU_ROOT_DIR})
|
||||
endif()
|
||||
|
||||
set(_haru_SEARCH_DIRS
|
||||
${HARU_ROOT_DIR}
|
||||
/opt/lib/haru
|
||||
)
|
||||
|
||||
find_path(HARU_INCLUDE_DIR
|
||||
NAMES
|
||||
hpdf.h
|
||||
HINTS
|
||||
${_haru_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
include/haru
|
||||
include
|
||||
)
|
||||
|
||||
find_library(HARU_LIBRARY
|
||||
NAMES
|
||||
hpdfs
|
||||
hpdf
|
||||
HINTS
|
||||
${_haru_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
lib64 lib
|
||||
)
|
||||
|
||||
# Handle the QUIETLY and REQUIRED arguments and set HARU_FOUND to TRUE if
|
||||
# all listed variables are TRUE.
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Haru DEFAULT_MSG HARU_LIBRARY HARU_INCLUDE_DIR)
|
||||
|
||||
if(HARU_FOUND)
|
||||
set(HARU_LIBRARIES ${HARU_LIBRARY})
|
||||
set(HARU_INCLUDE_DIRS ${HARU_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
mark_as_advanced(
|
||||
HARU_INCLUDE_DIR
|
||||
HARU_LIBRARY
|
||||
)
|
||||
|
||||
unset(_haru_SEARCH_DIRS)
|
@@ -26,8 +26,7 @@ ENDIF()
|
||||
SET(_opencolorio_FIND_COMPONENTS
|
||||
OpenColorIO
|
||||
yaml-cpp
|
||||
expat
|
||||
pystring
|
||||
tinyxml
|
||||
)
|
||||
|
||||
SET(_opencolorio_SEARCH_DIRS
|
||||
@@ -61,23 +60,12 @@ FOREACH(COMPONENT ${_opencolorio_FIND_COMPONENTS})
|
||||
ENDIF()
|
||||
ENDFOREACH()
|
||||
|
||||
IF(EXISTS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h")
|
||||
# Search twice, because this symbol changed between OCIO 1.x and 2.x
|
||||
FILE(STRINGS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h" _opencolorio_version
|
||||
REGEX "^#define OCIO_VERSION_STR[ \t].*$")
|
||||
IF(NOT _opencolorio_version)
|
||||
file(STRINGS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h" _opencolorio_version
|
||||
REGEX "^#define OCIO_VERSION[ \t].*$")
|
||||
ENDIF()
|
||||
STRING(REGEX MATCHALL "[0-9]+[.0-9]+" OPENCOLORIO_VERSION ${_opencolorio_version})
|
||||
ENDIF()
|
||||
|
||||
# handle the QUIETLY and REQUIRED arguments and set OPENCOLORIO_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
INCLUDE(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenColorIO
|
||||
REQUIRED_VARS _opencolorio_LIBRARIES OPENCOLORIO_INCLUDE_DIR
|
||||
VERSION_VAR OPENCOLORIO_VERSION)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenColorIO DEFAULT_MSG
|
||||
_opencolorio_LIBRARIES OPENCOLORIO_INCLUDE_DIR)
|
||||
|
||||
IF(OPENCOLORIO_FOUND)
|
||||
SET(OPENCOLORIO_LIBRARIES ${_opencolorio_LIBRARIES})
|
||||
@@ -90,7 +78,6 @@ MARK_AS_ADVANCED(
|
||||
OPENCOLORIO_OPENCOLORIO_LIBRARY
|
||||
OPENCOLORIO_TINYXML_LIBRARY
|
||||
OPENCOLORIO_YAML-CPP_LIBRARY
|
||||
OPENCOLORIO_VERSION
|
||||
)
|
||||
|
||||
UNSET(COMPONENT)
|
||||
|
@@ -34,7 +34,7 @@ IF(NOT PYTHON_ROOT_DIR AND NOT $ENV{PYTHON_ROOT_DIR} STREQUAL "")
|
||||
SET(PYTHON_ROOT_DIR $ENV{PYTHON_ROOT_DIR})
|
||||
ENDIF()
|
||||
|
||||
SET(PYTHON_VERSION 3.9 CACHE STRING "Python Version (major and minor only)")
|
||||
SET(PYTHON_VERSION 3.7 CACHE STRING "Python Version (major and minor only)")
|
||||
MARK_AS_ADVANCED(PYTHON_VERSION)
|
||||
|
||||
|
||||
@@ -73,8 +73,8 @@ SET(_python_SEARCH_DIRS
|
||||
# only search for the dirs if we haven't already
|
||||
IF((NOT _IS_INC_DEF) OR (NOT _IS_INC_CONF_DEF) OR (NOT _IS_LIB_DEF) OR (NOT _IS_LIB_PATH_DEF))
|
||||
SET(_PYTHON_ABI_FLAGS_TEST
|
||||
"u; " # release
|
||||
"du;d" # debug
|
||||
"m;mu;u; " # release
|
||||
"dm;dmu;du;d" # debug
|
||||
)
|
||||
|
||||
FOREACH(_CURRENT_ABI_FLAGS ${_PYTHON_ABI_FLAGS_TEST})
|
||||
|
@@ -1,49 +0,0 @@
|
||||
# - Find sse2neon library
|
||||
# Find the native sse2neon includes and library
|
||||
# This module defines
|
||||
# SSE2NEON_INCLUDE_DIRS, where to find sse2neon.h, Set when
|
||||
# SSE2NEON_INCLUDE_DIR is found.
|
||||
# SSE2NEON_ROOT_DIR, The base directory to search for sse2neon.
|
||||
# This can also be an environment variable.
|
||||
# SSE2NEON_FOUND, If false, do not try to use sse2neon.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2020 Blender Foundation.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD 3-Clause License,
|
||||
# see accompanying file BSD-3-Clause-license.txt for details.
|
||||
#=============================================================================
|
||||
|
||||
# If SSE2NEON_ROOT_DIR was defined in the environment, use it.
|
||||
IF(NOT SSE2NEON_ROOT_DIR AND NOT $ENV{SSE2NEON_ROOT_DIR} STREQUAL "")
|
||||
SET(SSE2NEON_ROOT_DIR $ENV{SSE2NEON_ROOT_DIR})
|
||||
ENDIF()
|
||||
|
||||
SET(_sse2neon_SEARCH_DIRS
|
||||
${SSE2NEON_ROOT_DIR}
|
||||
)
|
||||
|
||||
FIND_PATH(SSE2NEON_INCLUDE_DIR
|
||||
NAMES
|
||||
sse2neon.h
|
||||
HINTS
|
||||
${_sse2neon_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
include
|
||||
)
|
||||
|
||||
# handle the QUIETLY and REQUIRED arguments and set SSE2NEON_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
INCLUDE(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(sse2neon DEFAULT_MSG
|
||||
SSE2NEON_INCLUDE_DIR)
|
||||
|
||||
IF(SSE2NEON_FOUND)
|
||||
SET(SSE2NEON_INCLUDE_DIRS ${SSE2NEON_INCLUDE_DIR})
|
||||
ENDIF(SSE2NEON_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
SSE2NEON_INCLUDE_DIR
|
||||
)
|
||||
|
||||
UNSET(_sse2neon_SEARCH_DIRS)
|
@@ -272,7 +272,7 @@ cmake_policy(SET CMP0057 NEW) # if IN_LIST
|
||||
#------------------------------------------------------------------------------
|
||||
function(gtest_add_tests)
|
||||
|
||||
if(ARGC LESS 1)
|
||||
if (ARGC LESS 1)
|
||||
message(FATAL_ERROR "No arguments supplied to gtest_add_tests()")
|
||||
endif()
|
||||
|
||||
@@ -298,7 +298,7 @@ function(gtest_add_tests)
|
||||
set(autoAddSources YES)
|
||||
else()
|
||||
# Non-keyword syntax, convert to keyword form
|
||||
if(ARGC LESS 3)
|
||||
if (ARGC LESS 3)
|
||||
message(FATAL_ERROR "gtest_add_tests() without keyword options requires at least 3 arguments")
|
||||
endif()
|
||||
set(ARGS_TARGET "${ARGV0}")
|
||||
|
@@ -99,9 +99,7 @@ macro(BLENDER_SRC_GTEST_EX)
|
||||
|
||||
# Don't fail tests on leaks since these often happen in external libraries
|
||||
# that we can't fix.
|
||||
set_tests_properties(${TARGET_NAME} PROPERTIES
|
||||
ENVIRONMENT LSAN_OPTIONS=exitcode=0:$ENV{LSAN_OPTIONS}
|
||||
)
|
||||
set_tests_properties(${TARGET_NAME} PROPERTIES ENVIRONMENT LSAN_OPTIONS=exitcode=0)
|
||||
endif()
|
||||
if(WIN32)
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES VS_GLOBAL_VcpkgEnabled "false")
|
||||
|
@@ -20,8 +20,6 @@
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Note: this code should be cleaned up / refactored.
|
||||
|
||||
import sys
|
||||
if sys.version_info.major < 3:
|
||||
print("\nPython3.x needed, found %s.\nAborting!\n" %
|
||||
@@ -39,23 +37,12 @@ from cmake_consistency_check_config import (
|
||||
|
||||
|
||||
import os
|
||||
from os.path import (
|
||||
dirname,
|
||||
join,
|
||||
normpath,
|
||||
splitext,
|
||||
)
|
||||
from os.path import join, dirname, normpath, splitext
|
||||
|
||||
global_h = set()
|
||||
global_c = set()
|
||||
global_refs = {}
|
||||
|
||||
# Flatten `IGNORE_SOURCE_MISSING` to avoid nested looping.
|
||||
IGNORE_SOURCE_MISSING = [
|
||||
(k, ignore_path) for k, ig_list in IGNORE_SOURCE_MISSING
|
||||
for ignore_path in ig_list
|
||||
]
|
||||
|
||||
# Ignore cmake file, path pairs.
|
||||
global_ignore_source_missing = {}
|
||||
for k, v in IGNORE_SOURCE_MISSING:
|
||||
@@ -191,8 +178,6 @@ def cmake_get_src(f):
|
||||
|
||||
if not l:
|
||||
pass
|
||||
elif l in local_ignore_source_missing:
|
||||
local_ignore_source_missing.remove(l)
|
||||
elif l.startswith("$"):
|
||||
if context_name == "SRC":
|
||||
# assume if it ends with context_name we know about it
|
||||
@@ -242,7 +227,10 @@ def cmake_get_src(f):
|
||||
# replace_line(f, i - 1, new_path_rel)
|
||||
|
||||
else:
|
||||
raise Exception("non existent include %s:%d -> %s" % (f, i, new_file))
|
||||
if l in local_ignore_source_missing:
|
||||
local_ignore_source_missing.remove(l)
|
||||
else:
|
||||
raise Exception("non existent include %s:%d -> %s" % (f, i, new_file))
|
||||
|
||||
# print(new_file)
|
||||
|
||||
@@ -270,16 +258,16 @@ def cmake_get_src(f):
|
||||
|
||||
|
||||
def is_ignore_source(f, ignore_used):
|
||||
for index, ignore_path in enumerate(IGNORE_SOURCE):
|
||||
if ignore_path in f:
|
||||
for index, ig in enumerate(IGNORE_SOURCE):
|
||||
if ig in f:
|
||||
ignore_used[index] = True
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_ignore_cmake(f, ignore_used):
|
||||
for index, ignore_path in enumerate(IGNORE_CMAKE):
|
||||
if ignore_path in f:
|
||||
for index, ig in enumerate(IGNORE_CMAKE):
|
||||
if ig in f:
|
||||
ignore_used[index] = True
|
||||
return True
|
||||
return False
|
||||
@@ -310,7 +298,7 @@ def main():
|
||||
for cf, i in refs:
|
||||
errs.append((cf, i))
|
||||
else:
|
||||
raise Exception("CMake references missing, internal error, aborting!")
|
||||
raise Exception("CMake referenecs missing, internal error, aborting!")
|
||||
is_err = True
|
||||
|
||||
errs.sort()
|
||||
@@ -321,7 +309,7 @@ def main():
|
||||
# print("sed '%dd' '%s' > '%s.tmp' ; mv '%s.tmp' '%s'" % (i, cf, cf, cf, cf))
|
||||
|
||||
if is_err:
|
||||
raise Exception("CMake references missing files, aborting!")
|
||||
raise Exception("CMake referenecs missing files, aborting!")
|
||||
del is_err
|
||||
del errs
|
||||
|
||||
@@ -332,7 +320,7 @@ def main():
|
||||
if cf not in global_c:
|
||||
print("missing_c: ", cf)
|
||||
|
||||
# Check if automake builds a corresponding .o file.
|
||||
# check if automake builds a corrasponding .o file.
|
||||
'''
|
||||
if cf in global_c:
|
||||
out1 = os.path.splitext(cf)[0] + ".o"
|
||||
@@ -368,21 +356,21 @@ def main():
|
||||
|
||||
# Check ignores aren't stale
|
||||
print("\nCheck for unused 'IGNORE_SOURCE' paths...")
|
||||
for index, ignore_path in enumerate(IGNORE_SOURCE):
|
||||
for index, ig in enumerate(IGNORE_SOURCE):
|
||||
if not ignore_used_source[index]:
|
||||
print("unused ignore: %r" % ignore_path)
|
||||
print("unused ignore: %r" % ig)
|
||||
|
||||
# Check ignores aren't stale
|
||||
print("\nCheck for unused 'IGNORE_SOURCE_MISSING' paths...")
|
||||
for k, v in sorted(global_ignore_source_missing.items()):
|
||||
for ignore_path in v:
|
||||
print("unused ignore: %r -> %r" % (ignore_path, k))
|
||||
for ig in v:
|
||||
print("unused ignore: %r -> %r" % (ig, k))
|
||||
|
||||
# Check ignores aren't stale
|
||||
print("\nCheck for unused 'IGNORE_CMAKE' paths...")
|
||||
for index, ignore_path in enumerate(IGNORE_CMAKE):
|
||||
for index, ig in enumerate(IGNORE_CMAKE):
|
||||
if not ignore_used_cmake[index]:
|
||||
print("unused ignore: %r" % ignore_path)
|
||||
print("unused ignore: %r" % ig)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -34,18 +34,8 @@ IGNORE_SOURCE = (
|
||||
|
||||
# Ignore cmake file, path pairs.
|
||||
IGNORE_SOURCE_MISSING = (
|
||||
( # Use for cycles stand-alone.
|
||||
"intern/cycles/util/CMakeLists.txt", (
|
||||
"../../third_party/numaapi/include",
|
||||
)),
|
||||
( # Use for `WITH_NANOVDB`.
|
||||
"intern/cycles/kernel/CMakeLists.txt", (
|
||||
"nanovdb/util/CSampleFromVoxels.h",
|
||||
"nanovdb/util/SampleFromVoxels.h",
|
||||
"nanovdb/NanoVDB.h",
|
||||
"nanovdb/CNanoVDB.h",
|
||||
),
|
||||
),
|
||||
# Use for cycles stand-alone.
|
||||
("intern/cycles/util/CMakeLists.txt", "../../third_party/numaapi/include"),
|
||||
)
|
||||
|
||||
IGNORE_CMAKE = (
|
||||
|
@@ -19,7 +19,6 @@ set(WITH_DRACO ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FFTW3 ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FREESTYLE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_HARU ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_ITASC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_SOLVER ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_CINEON ON CACHE BOOL "" FORCE)
|
||||
@@ -45,8 +44,6 @@ set(WITH_OPENMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB_BLOSC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_PUGIXML ON CACHE BOOL "" FORCE)
|
||||
set(WITH_NANOVDB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_PYTHON_INSTALL ON CACHE BOOL "" FORCE)
|
||||
|
@@ -24,7 +24,6 @@ set(WITH_DRACO OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_FFTW3 OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_FREESTYLE OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_HARU OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_ITASC OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_SOLVER OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_CINEON OFF CACHE BOOL "" FORCE)
|
||||
@@ -52,8 +51,6 @@ set(WITH_OPENIMAGEIO OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENMP OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENSUBDIV OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_PUGIXML OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_NANOVDB OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_QUADRIFLOW OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_SDL OFF CACHE BOOL "" FORCE)
|
||||
|
@@ -20,7 +20,6 @@ set(WITH_DRACO ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FFTW3 ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FREESTYLE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_HARU ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_SOLVER ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_ITASC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_CINEON ON CACHE BOOL "" FORCE)
|
||||
@@ -46,8 +45,6 @@ set(WITH_OPENMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB_BLOSC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_PUGIXML ON CACHE BOOL "" FORCE)
|
||||
set(WITH_NANOVDB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_PYTHON_INSTALL ON CACHE BOOL "" FORCE)
|
||||
|
@@ -389,7 +389,7 @@ function(blender_add_lib
|
||||
endfunction()
|
||||
|
||||
function(blender_add_test_suite)
|
||||
if(ARGC LESS 1)
|
||||
if (ARGC LESS 1)
|
||||
message(FATAL_ERROR "No arguments supplied to blender_add_test_suite()")
|
||||
endif()
|
||||
|
||||
@@ -668,6 +668,12 @@ macro(TEST_SSE_SUPPORT
|
||||
#include <xmmintrin.h>
|
||||
int main(void) { __m128 v = _mm_setzero_ps(); return 0; }"
|
||||
SUPPORT_SSE_BUILD)
|
||||
|
||||
if(SUPPORT_SSE_BUILD)
|
||||
message(STATUS "SSE Support: detected.")
|
||||
else()
|
||||
message(STATUS "SSE Support: missing.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED SUPPORT_SSE2_BUILD)
|
||||
@@ -676,19 +682,17 @@ macro(TEST_SSE_SUPPORT
|
||||
#include <emmintrin.h>
|
||||
int main(void) { __m128d v = _mm_setzero_pd(); return 0; }"
|
||||
SUPPORT_SSE2_BUILD)
|
||||
|
||||
if(SUPPORT_SSE2_BUILD)
|
||||
message(STATUS "SSE2 Support: detected.")
|
||||
else()
|
||||
message(STATUS "SSE2 Support: missing.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
unset(CMAKE_REQUIRED_FLAGS)
|
||||
endmacro()
|
||||
|
||||
macro(TEST_NEON_SUPPORT)
|
||||
include(CheckCXXSourceCompiles)
|
||||
check_cxx_source_compiles(
|
||||
"#include <arm_neon.h>
|
||||
int main() {return vaddvq_s32(vdupq_n_s32(1));}"
|
||||
SUPPORT_NEON_BUILD)
|
||||
endmacro()
|
||||
|
||||
# Only print message if running CMake first time
|
||||
macro(message_first_run)
|
||||
if(FIRST_RUN)
|
||||
@@ -1205,9 +1209,9 @@ function(find_python_package
|
||||
site-packages
|
||||
dist-packages
|
||||
vendor-packages
|
||||
NO_DEFAULT_PATH
|
||||
DOC
|
||||
"Path to python site-packages or dist-packages containing '${package}' module"
|
||||
NO_DEFAULT_PATH
|
||||
DOC
|
||||
"Path to python site-packages or dist-packages containing '${package}' module"
|
||||
)
|
||||
mark_as_advanced(PYTHON_${_upper_package}_PATH)
|
||||
|
||||
@@ -1227,7 +1231,7 @@ function(find_python_package
|
||||
set(WITH_PYTHON_INSTALL_${_upper_package} OFF PARENT_SCOPE)
|
||||
else()
|
||||
message(STATUS "${package} found at '${PYTHON_${_upper_package}_PATH}'")
|
||||
|
||||
|
||||
if(NOT "${relative_include_dir}" STREQUAL "")
|
||||
set(_relative_include_dir "${package}/${relative_include_dir}")
|
||||
unset(PYTHON_${_upper_package}_INCLUDE_DIRS CACHE)
|
||||
|
@@ -72,11 +72,7 @@ if(WITH_JACK)
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED LIBDIR)
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64")
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin)
|
||||
else()
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin_${CMAKE_OSX_ARCHITECTURES})
|
||||
endif()
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin)
|
||||
else()
|
||||
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
|
||||
endif()
|
||||
@@ -131,22 +127,22 @@ if(WITH_CODEC_SNDFILE)
|
||||
endif()
|
||||
|
||||
if(WITH_PYTHON)
|
||||
# we use precompiled libraries for py 3.9 and up by default
|
||||
set(PYTHON_VERSION 3.9)
|
||||
# we use precompiled libraries for py 3.7 and up by default
|
||||
set(PYTHON_VERSION 3.7)
|
||||
if(NOT WITH_PYTHON_MODULE AND NOT WITH_PYTHON_FRAMEWORK)
|
||||
# normally cached but not since we include them with blender
|
||||
set(PYTHON_INCLUDE_DIR "${LIBDIR}/python/include/python${PYTHON_VERSION}")
|
||||
set(PYTHON_EXECUTABLE "${LIBDIR}/python/bin/python${PYTHON_VERSION}")
|
||||
set(PYTHON_LIBRARY ${LIBDIR}/python/lib/libpython${PYTHON_VERSION}.a)
|
||||
set(PYTHON_INCLUDE_DIR "${LIBDIR}/python/include/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_EXECUTABLE "${LIBDIR}/python/bin/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_LIBRARY ${LIBDIR}/python/lib/libpython${PYTHON_VERSION}m.a)
|
||||
set(PYTHON_LIBPATH "${LIBDIR}/python/lib/python${PYTHON_VERSION}")
|
||||
# set(PYTHON_LINKFLAGS "-u _PyMac_Error") # won't build with this enabled
|
||||
else()
|
||||
# module must be compiled against Python framework
|
||||
set(_py_framework "/Library/Frameworks/Python.framework/Versions/${PYTHON_VERSION}")
|
||||
|
||||
set(PYTHON_INCLUDE_DIR "${_py_framework}/include/python${PYTHON_VERSION}")
|
||||
set(PYTHON_EXECUTABLE "${_py_framework}/bin/python${PYTHON_VERSION}")
|
||||
set(PYTHON_LIBPATH "${_py_framework}/lib/python${PYTHON_VERSION}/config-${PYTHON_VERSION}")
|
||||
set(PYTHON_INCLUDE_DIR "${_py_framework}/include/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_EXECUTABLE "${_py_framework}/bin/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_LIBPATH "${_py_framework}/lib/python${PYTHON_VERSION}/config-${PYTHON_VERSION}m")
|
||||
# set(PYTHON_LIBRARY python${PYTHON_VERSION})
|
||||
# set(PYTHON_LINKFLAGS "-u _PyMac_Error -framework Python") # won't build with this enabled
|
||||
|
||||
@@ -206,13 +202,6 @@ set(PLATFORM_LINKFLAGS
|
||||
|
||||
list(APPEND PLATFORM_LINKLIBS c++)
|
||||
|
||||
if(WITH_OPENIMAGEDENOISE)
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
# OpenImageDenoise uses BNNS from the Accelerate framework.
|
||||
string(APPEND PLATFORM_LINKFLAGS " -framework Accelerate")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_JACK)
|
||||
string(APPEND PLATFORM_LINKFLAGS " -F/Library/Frameworks -weak_framework jackmp")
|
||||
endif()
|
||||
@@ -304,12 +293,7 @@ if(WITH_OPENIMAGEIO)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENCOLORIO)
|
||||
find_package(OpenColorIO 2.0.0)
|
||||
|
||||
if(NOT OPENCOLORIO_FOUND)
|
||||
set(WITH_OPENCOLORIO OFF)
|
||||
message(STATUS "OpenColorIO not found")
|
||||
endif()
|
||||
find_package(OpenColorIO)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENVDB)
|
||||
@@ -321,11 +305,8 @@ if(WITH_OPENVDB)
|
||||
endif()
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
find_package(NanoVDB)
|
||||
endif()
|
||||
|
||||
if(WITH_CPU_SIMD)
|
||||
find_package(sse2neon)
|
||||
set(NANOVDB ${LIBDIR}/nanovdb)
|
||||
set(NANOVDB_INCLUDE_DIR ${NANOVDB}/include)
|
||||
endif()
|
||||
|
||||
if(WITH_LLVM)
|
||||
@@ -355,6 +336,12 @@ if(WITH_CYCLES_OSL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
set(WITH_CYCLES_EMBREE OFF)
|
||||
set(WITH_OPENIMAGEDENOISE OFF)
|
||||
set(WITH_CPU_SSE OFF)
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES_EMBREE)
|
||||
find_package(Embree 3.8.0 REQUIRED)
|
||||
# Increase stack size for Embree, only works for executables.
|
||||
@@ -441,14 +428,6 @@ if(WITH_GMP)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_HARU)
|
||||
find_package(Haru)
|
||||
if(NOT HARU_FOUND)
|
||||
message(WARNING "Haru not found, disabling WITH_HARU")
|
||||
set(WITH_HARU OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(EXISTS ${LIBDIR})
|
||||
without_system_libs_end()
|
||||
endif()
|
||||
|
@@ -20,7 +20,6 @@
|
||||
|
||||
# Xcode and system configuration for Apple.
|
||||
|
||||
# Detect processor architecture.
|
||||
if(NOT CMAKE_OSX_ARCHITECTURES)
|
||||
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCHITECTURE OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
message(STATUS "Detected native architecture ${ARCHITECTURE}.")
|
||||
@@ -29,93 +28,56 @@ if(NOT CMAKE_OSX_ARCHITECTURES)
|
||||
FORCE)
|
||||
endif()
|
||||
|
||||
# Detect developer directory. Depending on configuration this may be either
|
||||
# an Xcode or Command Line Tools installation.
|
||||
if(NOT DEFINED OSX_SYSTEM)
|
||||
execute_process(
|
||||
COMMAND xcodebuild -version -sdk macosx SDKVersion
|
||||
OUTPUT_VARIABLE OSX_SYSTEM
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
|
||||
# workaround for incorrect cmake xcode lookup for developer previews - XCODE_VERSION does not
|
||||
# take xcode-select path into account but would always look into /Applications/Xcode.app
|
||||
# while dev versions are named Xcode<version>-DP<preview_number>
|
||||
execute_process(
|
||||
COMMAND xcode-select --print-path
|
||||
OUTPUT_VARIABLE XCODE_DEVELOPER_DIR OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
OUTPUT_VARIABLE XCODE_CHECK OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
string(REPLACE "/Contents/Developer" "" XCODE_BUNDLE ${XCODE_CHECK}) # truncate to bundlepath in any case
|
||||
|
||||
# Detect Xcode version. It is provided by the Xcode generator but not
|
||||
# Unix Makefiles or Ninja.
|
||||
if(NOT ${CMAKE_GENERATOR} MATCHES "Xcode")
|
||||
# Unix makefile generator does not fill XCODE_VERSION var, so we get it with a command.
|
||||
# Note that `xcodebuild -version` gives output in two lines: first line will include
|
||||
# Xcode version, second one will include build number. We are only interested in the
|
||||
# first line. Here is an example of the output:
|
||||
# former one. Here is an example of the output:
|
||||
# Xcode 11.4
|
||||
# Build version 11E146
|
||||
# The expected XCODE_VERSION in this case is 11.4.
|
||||
execute_process(
|
||||
COMMAND xcodebuild -version
|
||||
OUTPUT_VARIABLE _xcode_vers_build_nr
|
||||
RESULT_VARIABLE _xcode_vers_result
|
||||
ERROR_QUIET)
|
||||
|
||||
if(_xcode_vers_result EQUAL 0)
|
||||
# Convert output to a single line by replacing newlines with spaces.
|
||||
# This is needed because regex replace can not operate through the newline character
|
||||
# and applies substitutions for each individual lines.
|
||||
string(REPLACE "\n" " " _xcode_vers_build_nr_single_line "${_xcode_vers_build_nr}")
|
||||
string(REGEX REPLACE "(.*)Xcode ([0-9\\.]+).*" "\\2" XCODE_VERSION "${_xcode_vers_build_nr_single_line}")
|
||||
unset(_xcode_vers_build_nr_single_line)
|
||||
endif()
|
||||
execute_process(COMMAND xcodebuild -version OUTPUT_VARIABLE XCODE_VERS_BUILD_NR)
|
||||
|
||||
unset(_xcode_vers_build_nr)
|
||||
unset(_xcode_vers_result)
|
||||
# Convert output to a single line by replacling newlines with spaces.
|
||||
# This is needed because regex replace can not operate through the newline character
|
||||
# and applies substitutions for each individual lines.
|
||||
string(REPLACE "\n" " " XCODE_VERS_BUILD_NR_SINGLE_LINE "${XCODE_VERS_BUILD_NR}")
|
||||
|
||||
string(REGEX REPLACE "(.*)Xcode ([0-9\\.]+).*" "\\2" XCODE_VERSION "${XCODE_VERS_BUILD_NR_SINGLE_LINE}")
|
||||
|
||||
unset(XCODE_VERS_BUILD_NR)
|
||||
unset(XCODE_VERS_BUILD_NR_SINGLE_LINE)
|
||||
endif()
|
||||
|
||||
if(XCODE_VERSION)
|
||||
# Construct SDKs path ourselves, because xcode-select path could be ambiguous.
|
||||
# Both /Applications/Xcode.app/Contents/Developer or /Applications/Xcode.app would be allowed.
|
||||
set(XCODE_SDK_DIR ${XCODE_DEVELOPER_DIR}/Platforms/MacOSX.platform//Developer/SDKs)
|
||||
|
||||
# Detect SDK version to use
|
||||
if(NOT DEFINED OSX_SYSTEM)
|
||||
execute_process(
|
||||
COMMAND xcodebuild -version -sdk macosx SDKVersion
|
||||
OUTPUT_VARIABLE OSX_SYSTEM
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
|
||||
message(STATUS "Detected OS X ${OSX_SYSTEM} and Xcode ${XCODE_VERSION} at ${XCODE_DEVELOPER_DIR}")
|
||||
message(STATUS "SDKs Directory: " ${XCODE_SDK_DIR})
|
||||
else()
|
||||
# If no Xcode version found, try detecting command line tools.
|
||||
execute_process(
|
||||
COMMAND pkgutil --pkg-info=com.apple.pkg.CLTools_Executables
|
||||
OUTPUT_VARIABLE _cltools_pkg_info
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
RESULT_VARIABLE _cltools_pkg_info_result
|
||||
ERROR_QUIET)
|
||||
|
||||
if(_cltools_pkg_info_result EQUAL 0)
|
||||
# Extract version.
|
||||
string(REGEX REPLACE ".*version: ([0-9]+)\\.([0-9]+).*" "\\1.\\2" XCODE_VERSION "${_cltools_pkg_info}")
|
||||
# SDK directory.
|
||||
set(XCODE_SDK_DIR "${XCODE_DEVELOPER_DIR}/SDKs")
|
||||
|
||||
# Detect SDK version to use.
|
||||
if(NOT DEFINED OSX_SYSTEM)
|
||||
execute_process(
|
||||
COMMAND xcrun --show-sdk-version
|
||||
OUTPUT_VARIABLE OSX_SYSTEM
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
|
||||
message(STATUS "Detected OS X ${OSX_SYSTEM} and Command Line Tools ${XCODE_VERSION} at ${XCODE_DEVELOPER_DIR}")
|
||||
message(STATUS "SDKs Directory: " ${XCODE_SDK_DIR})
|
||||
else()
|
||||
message(FATAL_ERROR "No Xcode or Command Line Tools detected")
|
||||
endif()
|
||||
|
||||
unset( _cltools_pkg_info)
|
||||
unset(__cltools_pkg_info_result)
|
||||
endif()
|
||||
message(STATUS "Detected OS X ${OSX_SYSTEM} and Xcode ${XCODE_VERSION} at ${XCODE_BUNDLE}")
|
||||
|
||||
# Require a relatively recent Xcode version.
|
||||
if(${XCODE_VERSION} VERSION_LESS 10.0)
|
||||
message(FATAL_ERROR "Only Xcode version 10.0 and newer is supported")
|
||||
endif()
|
||||
|
||||
# note: xcode-select path could be ambiguous,
|
||||
# cause /Applications/Xcode.app/Contents/Developer or /Applications/Xcode.app would be allowed
|
||||
# so i use a selfcomposed bundlepath here
|
||||
set(OSX_SYSROOT_PREFIX ${XCODE_BUNDLE}/Contents/Developer/Platforms/MacOSX.platform)
|
||||
message(STATUS "OSX_SYSROOT_PREFIX: " ${OSX_SYSROOT_PREFIX})
|
||||
|
||||
# Collect list of OSX system versions which will be used to detect path to corresponding SDK.
|
||||
# Start with macOS SDK version reported by xcodebuild and include possible extra ones.
|
||||
#
|
||||
@@ -139,9 +101,10 @@ endif()
|
||||
# Loop through all possible versions and pick the first one which resolves to a valid SDK path.
|
||||
set(OSX_SDK_PATH)
|
||||
set(OSX_SDK_FOUND FALSE)
|
||||
set(OSX_SDK_PREFIX ${OSX_SYSROOT_PREFIX}/Developer/SDKs)
|
||||
set(OSX_SDKROOT)
|
||||
foreach(OSX_SDK_VERSION ${OSX_SDK_TEST_VERSIONS})
|
||||
set(CURRENT_OSX_SDK_PATH "${XCODE_SDK_DIR}/MacOSX${OSX_SDK_VERSION}.sdk")
|
||||
set(CURRENT_OSX_SDK_PATH "${OSX_SDK_PREFIX}/MacOSX${OSX_SDK_VERSION}.sdk")
|
||||
if(EXISTS ${CURRENT_OSX_SDK_PATH})
|
||||
set(OSX_SDK_PATH "${CURRENT_OSX_SDK_PATH}")
|
||||
set(OSX_SDKROOT macosx${OSX_SDK_VERSION})
|
||||
@@ -149,6 +112,7 @@ foreach(OSX_SDK_VERSION ${OSX_SDK_TEST_VERSIONS})
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
unset(OSX_SDK_PREFIX)
|
||||
unset(OSX_SDK_TEST_VERSIONS)
|
||||
|
||||
if(NOT OSX_SDK_FOUND)
|
||||
|
@@ -284,10 +284,6 @@ if(WITH_NANOVDB)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_CPU_SIMD)
|
||||
find_package_wrapper(sse2neon)
|
||||
endif()
|
||||
|
||||
if(WITH_ALEMBIC)
|
||||
find_package_wrapper(Alembic)
|
||||
|
||||
@@ -356,11 +352,6 @@ endif()
|
||||
|
||||
if(WITH_PUGIXML)
|
||||
find_package_wrapper(PugiXML)
|
||||
|
||||
if(NOT PUGIXML_FOUND)
|
||||
set(WITH_PUGIXML OFF)
|
||||
message(STATUS "PugiXML not found, disabling WITH_PUGIXML")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_OPENIMAGEIO)
|
||||
@@ -389,7 +380,7 @@ if(WITH_OPENIMAGEIO)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENCOLORIO)
|
||||
find_package_wrapper(OpenColorIO 2.0.0)
|
||||
find_package_wrapper(OpenColorIO)
|
||||
|
||||
set(OPENCOLORIO_LIBRARIES ${OPENCOLORIO_LIBRARIES})
|
||||
set(OPENCOLORIO_LIBPATH) # TODO, remove and reference the absolute path everywhere
|
||||
@@ -474,14 +465,6 @@ if(WITH_POTRACE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_HARU)
|
||||
find_package_wrapper(Haru)
|
||||
if(NOT HARU_FOUND)
|
||||
message(WARNING "Haru not found, disabling WITH_HARU")
|
||||
set(WITH_HARU OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(EXISTS ${LIBDIR})
|
||||
without_system_libs_end()
|
||||
endif()
|
||||
@@ -627,13 +610,7 @@ endif()
|
||||
|
||||
# GNU Compiler
|
||||
if(CMAKE_COMPILER_IS_GNUCC)
|
||||
# ffp-contract=off:
|
||||
# Automatically turned on when building with "-march=native". This is
|
||||
# explicitly turned off here as it will make floating point math give a bit
|
||||
# different results. This will lead to automated test failures. So disable
|
||||
# this until we support it. Seems to default to off in clang and the intel
|
||||
# compiler.
|
||||
set(PLATFORM_CFLAGS "-pipe -fPIC -funsigned-char -fno-strict-aliasing -ffp-contract=off")
|
||||
set(PLATFORM_CFLAGS "-pipe -fPIC -funsigned-char -fno-strict-aliasing")
|
||||
|
||||
# `maybe-uninitialized` is unreliable in release builds, but fine in debug builds.
|
||||
set(GCC_EXTRA_FLAGS_RELEASE "-Wno-maybe-uninitialized")
|
||||
|
@@ -119,7 +119,6 @@ string(APPEND CMAKE_MODULE_LINKER_FLAGS " /SAFESEH:NO /ignore:4099")
|
||||
list(APPEND PLATFORM_LINKLIBS
|
||||
ws2_32 vfw32 winmm kernel32 user32 gdi32 comdlg32 Comctl32 version
|
||||
advapi32 shfolder shell32 ole32 oleaut32 uuid psapi Dbghelp Shlwapi
|
||||
pathcch
|
||||
)
|
||||
|
||||
if(WITH_INPUT_IME)
|
||||
@@ -422,7 +421,7 @@ if(WITH_JACK)
|
||||
endif()
|
||||
|
||||
if(WITH_PYTHON)
|
||||
set(PYTHON_VERSION 3.9) # CACHE STRING)
|
||||
set(PYTHON_VERSION 3.7) # CACHE STRING)
|
||||
|
||||
string(REPLACE "." "" _PYTHON_VERSION_NO_DOTS ${PYTHON_VERSION})
|
||||
set(PYTHON_LIBRARY ${LIBDIR}/python/${_PYTHON_VERSION_NO_DOTS}/libs/python${_PYTHON_VERSION_NO_DOTS}.lib)
|
||||
@@ -455,18 +454,10 @@ if(WITH_BOOST)
|
||||
set(BOOST ${LIBDIR}/boost)
|
||||
set(BOOST_INCLUDE_DIR ${BOOST}/include)
|
||||
set(BOOST_LIBPATH ${BOOST}/lib)
|
||||
set(BOOST_VERSION_HEADER ${BOOST_INCLUDE_DIR}/boost/version.hpp)
|
||||
if(EXISTS ${BOOST_VERSION_HEADER})
|
||||
file(STRINGS "${BOOST_VERSION_HEADER}" BOOST_LIB_VERSION REGEX "#define BOOST_LIB_VERSION ")
|
||||
if(BOOST_LIB_VERSION MATCHES "#define BOOST_LIB_VERSION \"([0-9_]+)\"")
|
||||
set(BOOST_VERSION "${CMAKE_MATCH_1}")
|
||||
endif()
|
||||
if(CMAKE_CL_64)
|
||||
set(BOOST_POSTFIX "vc141-mt-x64-1_70.lib")
|
||||
set(BOOST_DEBUG_POSTFIX "vc141-mt-gd-x64-1_70.lib")
|
||||
endif()
|
||||
if(NOT BOOST_VERSION)
|
||||
message(FATAL_ERROR "Unable to determine Boost version")
|
||||
endif()
|
||||
set(BOOST_POSTFIX "vc141-mt-x64-${BOOST_VERSION}.lib")
|
||||
set(BOOST_DEBUG_POSTFIX "vc141-mt-gd-x64-${BOOST_VERSION}.lib")
|
||||
set(BOOST_LIBRARIES
|
||||
optimized ${BOOST_LIBPATH}/libboost_date_time-${BOOST_POSTFIX}
|
||||
optimized ${BOOST_LIBPATH}/libboost_filesystem-${BOOST_POSTFIX}
|
||||
@@ -509,7 +500,7 @@ if(WITH_OPENIMAGEIO)
|
||||
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
|
||||
|
||||
set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0")
|
||||
set(OPENCOLORIO_DEFINITIONS "-DDOpenColorIO_SKIP_IMPORTS")
|
||||
set(OPENCOLORIO_DEFINITIONS "-DOCIO_STATIC_BUILD")
|
||||
set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe")
|
||||
add_definitions(-DOIIO_STATIC_DEFINE)
|
||||
add_definitions(-DOIIO_NO_SSE=1)
|
||||
@@ -547,13 +538,11 @@ if(WITH_OPENCOLORIO)
|
||||
set(OPENCOLORIO_LIBPATH ${OPENCOLORIO}/lib)
|
||||
set(OPENCOLORIO_LIBRARIES
|
||||
optimized ${OPENCOLORIO_LIBPATH}/OpenColorIO.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/tinyxml.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/libyaml-cpp.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/libexpatMD.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/pystring.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/OpencolorIO_d.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/tinyxml_d.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/libyaml-cpp_d.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/libexpatdMD.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/pystring_d.lib
|
||||
)
|
||||
set(OPENCOLORIO_DEFINITIONS)
|
||||
endif()
|
||||
@@ -792,14 +781,7 @@ if(WITH_XR_OPENXR)
|
||||
set(XR_OPENXR_SDK ${LIBDIR}/xr_openxr_sdk)
|
||||
set(XR_OPENXR_SDK_LIBPATH ${LIBDIR}/xr_openxr_sdk/lib)
|
||||
set(XR_OPENXR_SDK_INCLUDE_DIR ${XR_OPENXR_SDK}/include)
|
||||
# This is the old name of this library, it is checked to
|
||||
# support the transition between the old and new lib versions
|
||||
# this can be removed after the next lib update.
|
||||
if(EXISTS ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
|
||||
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
|
||||
else()
|
||||
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loaderd.lib)
|
||||
endif()
|
||||
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
|
||||
else()
|
||||
message(WARNING "OpenXR-SDK was not found, disabling WITH_XR_OPENXR")
|
||||
set(WITH_XR_OPENXR OFF)
|
||||
@@ -818,15 +800,3 @@ if(WITH_POTRACE)
|
||||
set(POTRACE_LIBRARIES ${LIBDIR}/potrace/lib/potrace.lib)
|
||||
set(POTRACE_FOUND On)
|
||||
endif()
|
||||
|
||||
if(WITH_HARU)
|
||||
if(EXISTS ${LIBDIR}/haru)
|
||||
set(HARU_FOUND On)
|
||||
set(HARU_ROOT_DIR ${LIBDIR}/haru)
|
||||
set(HARU_INCLUDE_DIRS ${HARU_ROOT_DIR}/include)
|
||||
set(HARU_LIBRARIES ${HARU_ROOT_DIR}/lib/libhpdfs.lib)
|
||||
else()
|
||||
message(WARNING "Haru was not found, disabling WITH_HARU")
|
||||
set(WITH_HARU OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
@@ -220,12 +220,14 @@ def cmake_advanced_info():
|
||||
|
||||
|
||||
def cmake_cache_var(var):
|
||||
with open(os.path.join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8') as cache_file:
|
||||
lines = [
|
||||
l_strip for l in cache_file
|
||||
if (l_strip := l.strip())
|
||||
if not l_strip.startswith(("//", "#"))
|
||||
]
|
||||
cache_file = open(join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8')
|
||||
lines = [
|
||||
l_strip for l in cache_file
|
||||
for l_strip in (l.strip(),)
|
||||
if l_strip
|
||||
if not l_strip.startswith(("//", "#"))
|
||||
]
|
||||
cache_file.close()
|
||||
|
||||
for l in lines:
|
||||
if l.split(":")[0] == var:
|
||||
|
@@ -8,7 +8,6 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
@@ -50,12 +49,7 @@ def svn_update(args, release_version):
|
||||
|
||||
# Checkout precompiled libraries
|
||||
if sys.platform == 'darwin':
|
||||
if platform.machine() == 'x86_64':
|
||||
lib_platform = "darwin"
|
||||
elif platform.machine() == 'arm64':
|
||||
lib_platform = "darwin_arm64"
|
||||
else:
|
||||
lib_platform = None
|
||||
lib_platform = "darwin"
|
||||
elif sys.platform == 'win32':
|
||||
# Windows checkout is usually handled by bat scripts since python3 to run
|
||||
# this script is bundled as part of the precompiled libraries. However it
|
||||
|
@@ -38,7 +38,7 @@ PROJECT_NAME = Blender
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = "V2.93"
|
||||
PROJECT_NUMBER = "V2.92"
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
@@ -52,11 +52,10 @@ outfilename = sys.argv[2]
|
||||
|
||||
cmd = [blender_bin, "--help"]
|
||||
print(" executing:", " ".join(cmd))
|
||||
ASAN_OPTIONS = "exitcode=0:" + os.environ.get("ASAN_OPTIONS", "")
|
||||
blender_help = subprocess.run(
|
||||
cmd, env={"ASAN_OPTIONS": ASAN_OPTIONS}, check=True, stdout=subprocess.PIPE).stdout.decode(encoding="utf-8")
|
||||
cmd, env={"ASAN_OPTIONS": "exitcode=0"}, check=True, stdout=subprocess.PIPE).stdout.decode(encoding="utf-8")
|
||||
blender_version = subprocess.run(
|
||||
[blender_bin, "--version"], env={"ASAN_OPTIONS": ASAN_OPTIONS}, check=True, stdout=subprocess.PIPE).stdout.decode(encoding="utf-8").strip()
|
||||
[blender_bin, "--version"], env={"ASAN_OPTIONS": "exitcode=0"}, check=True, stdout=subprocess.PIPE).stdout.decode(encoding="utf-8").strip()
|
||||
blender_version, blender_date = (blender_version.split("build") + [None, None])[0:2]
|
||||
blender_version = blender_version.rstrip().partition(" ")[2] # remove 'Blender' prefix.
|
||||
if blender_date is None:
|
||||
|
@@ -1,64 +0,0 @@
|
||||
"""
|
||||
Dependency graph: Object.to_curve()
|
||||
+++++++++++++++++++++++++++++++++++
|
||||
|
||||
Function to get a curve from text and curve objects. It is typically used by exporters, render
|
||||
engines, and tools that need to access the curve representing the object.
|
||||
|
||||
The function takes the evaluated dependency graph as a required parameter and optionally a boolean
|
||||
apply_modifiers which defaults to false. If apply_modifiers is true and the object is a curve object,
|
||||
the spline deform modifiers are applied on the control points. Note that constructive modifiers and
|
||||
modifiers that are not spline-enabled will not be applied. So modifiers like Array will not be applied
|
||||
and deform modifiers that have Apply On Spline disabled will not be applied.
|
||||
|
||||
If the object is a text object. The text will be converted into a 3D curve and returned. Modifiers are
|
||||
never applied on text objects and apply_modifiers will be ignored. If the object is neither a curve nor
|
||||
a text object, an error will be reported.
|
||||
|
||||
.. note:: The resulting curve is owned by the object. It can be freed by calling `object.to_curve_clear()`.
|
||||
.. note::
|
||||
The resulting curve must be treated as temporary, and can not be referenced from objects in the main
|
||||
database.
|
||||
"""
|
||||
import bpy
|
||||
|
||||
|
||||
class OBJECT_OT_object_to_curve(bpy.types.Operator):
|
||||
"""Convert selected object to curve and show number of splines"""
|
||||
bl_label = "DEG Object to Curve"
|
||||
bl_idname = "object.object_to_curve"
|
||||
|
||||
def execute(self, context):
|
||||
# Access input original object.
|
||||
obj = context.object
|
||||
if obj is None:
|
||||
self.report({'INFO'}, "No active object to convert to curve")
|
||||
return {'CANCELLED'}
|
||||
if obj.type not in {'CURVE', 'FONT'}:
|
||||
self.report({'INFO'}, "Object can not be converted to curve")
|
||||
return {'CANCELLED'}
|
||||
depsgraph = context.evaluated_depsgraph_get()
|
||||
# Invoke to_curve() without applying modifiers.
|
||||
curve_without_modifiers = obj.to_curve(depsgraph)
|
||||
self.report({'INFO'}, f"{len(curve_without_modifiers.splines)} splines in a new curve without modifiers.")
|
||||
# Remove temporary curve.
|
||||
obj.to_curve_clear()
|
||||
# Invoke to_curve() with applying modifiers.
|
||||
curve_with_modifiers = obj.to_curve(depsgraph, apply_modifiers = True)
|
||||
self.report({'INFO'}, f"{len(curve_with_modifiers.splines)} splines in new curve with modifiers.")
|
||||
# Remove temporary curve.
|
||||
obj.to_curve_clear()
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(OBJECT_OT_object_to_curve)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(OBJECT_OT_object_to_curve)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
@@ -1,2 +1,2 @@
|
||||
Sphinx==3.4.3
|
||||
sphinx_rtd_theme==0.5.1
|
||||
Sphinx==3.1.1
|
||||
sphinx_rtd_theme==0.5.0
|
||||
|
@@ -186,7 +186,7 @@ For example, if you want to access the texture of a brush via Python to adjust i
|
||||
#. From the Sidebar expand the Brush Settings panel's *Texture* subpanel and add a new texture.
|
||||
*Notice the texture data-block menu itself doesn't have very useful links (you can check the tooltips).*
|
||||
#. The contrast setting isn't exposed in the Sidebar, so view the texture in the
|
||||
:ref:`Properties Editor <blender_manual:bpy.types.Texture.contrast>`.
|
||||
:ref:`Properties Editor <blender_manual:bpy.types.Texture.contrast`
|
||||
#. Open the context menu of the contrast field and select *Online Python Reference*.
|
||||
This takes you to ``bpy.types.Texture.contrast``. Now you can see that ``contrast`` is a property of texture.
|
||||
#. To find out how to access the texture from the brush check on the references at the bottom of the page.
|
||||
|
@@ -35,7 +35,7 @@ but not to fully cover each topic.
|
||||
|
||||
A quick list of helpful things to know before starting:
|
||||
|
||||
- Enable :ref:`Developer Extra <blender_manual:prefs-interface-dev-extras>`
|
||||
- Enable :ref:`Developer Extra <blender_manual:prefs-interface-dev-extras`
|
||||
and :ref:`Python Tooltips <blender_manual:prefs-interface-tooltips-python>`.
|
||||
- The :ref:`Python Console <blender_manual:bpy.types.SpaceConsole>`
|
||||
is great for testing one-liners; it has autocompletion so you can inspect the API quickly.
|
||||
|
@@ -81,7 +81,6 @@ import sys
|
||||
import inspect
|
||||
import shutil
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
from textwrap import indent
|
||||
|
||||
@@ -1205,7 +1204,7 @@ def pycontext2sphinx(basepath):
|
||||
# for member in sorted(unique):
|
||||
# print(' "%s": ("", False),' % member)
|
||||
if len(context_type_map) > len(unique):
|
||||
warnings.warn(
|
||||
raise Exception(
|
||||
"Some types are not used: %s" %
|
||||
str([member for member in context_type_map if member not in unique]))
|
||||
else:
|
||||
@@ -1715,6 +1714,7 @@ except ModuleNotFoundError:
|
||||
|
||||
fw("if html_theme == 'sphinx_rtd_theme':\n")
|
||||
fw(" html_theme_options = {\n")
|
||||
fw(" 'canonical_url': 'https://docs.blender.org/api/current/',\n")
|
||||
# fw(" 'analytics_id': '',\n")
|
||||
# fw(" 'collapse_navigation': True,\n")
|
||||
fw(" 'sticky_navigation': False,\n")
|
||||
@@ -1726,7 +1726,6 @@ except ModuleNotFoundError:
|
||||
# not helpful since the source is generated, adds to upload size.
|
||||
fw("html_copy_source = False\n")
|
||||
fw("html_show_sphinx = False\n")
|
||||
fw("html_baseurl = 'https://docs.blender.org/api/current/'\n")
|
||||
fw("html_use_opensearch = 'https://docs.blender.org/api/current'\n")
|
||||
fw("html_split_index = True\n")
|
||||
fw("html_static_path = ['static']\n")
|
||||
|
156
doc/python_api/sphinx_doc_gen.sh
Executable file
156
doc/python_api/sphinx_doc_gen.sh
Executable file
@@ -0,0 +1,156 @@
|
||||
#!/bin/sh
|
||||
# run from the blender source dir
|
||||
# bash doc/python_api/sphinx_doc_gen.sh
|
||||
# ssh upload means you need an account on the server
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Upload vars
|
||||
|
||||
# disable for testing
|
||||
DO_UPLOAD=true
|
||||
DO_EXE_BLENDER=true
|
||||
DO_OUT_HTML=true
|
||||
DO_OUT_HTML_ZIP=true
|
||||
DO_OUT_PDF=false
|
||||
|
||||
if [ -z $BLENDER_BIN ] ; then
|
||||
BLENDER_BIN="./blender.bin"
|
||||
fi
|
||||
|
||||
if [ "$1" == "" ] ; then
|
||||
echo "Expected a single argument for the username on blender.org, skipping upload step!"
|
||||
DO_UPLOAD=false
|
||||
else
|
||||
SSH_USER=$1
|
||||
SSH_HOST=$SSH_USER"@blender.org"
|
||||
SSH_UPLOAD="/data/www/vhosts/www.blender.org/api" # blender_python_api_VERSION, added after
|
||||
fi
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Blender Version & Info
|
||||
|
||||
# 'Blender 2.53 (sub 1) Build' --> '2_53_1' as a shell script.
|
||||
# "_".join(str(v) for v in bpy.app.version)
|
||||
# custom blender vars
|
||||
blender_srcdir=$(dirname -- $0)/../..
|
||||
blender_version_header="$blender_srcdir/source/blender/blenkernel/BKE_blender_version.h"
|
||||
blender_version=$(grep "BLENDER_VERSION\s" "$blender_version_header" | awk '{print $3}')
|
||||
blender_version_cycle=$(grep "BLENDER_VERSION_CYCLE\s" "$blender_version_header" | awk '{print $3}')
|
||||
unset blender_version_header
|
||||
|
||||
BLENDER_VERSION=$(expr $blender_version / 100)_$(expr $blender_version % 100)
|
||||
|
||||
SSH_UPLOAD_FULL=$SSH_UPLOAD/"blender_python_api_"$BLENDER_VERSION
|
||||
|
||||
SPHINXBASE=doc/python_api
|
||||
|
||||
SPHINX_WORKDIR="$(mktemp --directory --suffix=.sphinx)"
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Generate reStructuredText (blender/python only)
|
||||
|
||||
if $DO_EXE_BLENDER ; then
|
||||
# Don't delete existing docs, now partial updates are used for quick builds.
|
||||
#
|
||||
# Disable ASAN error halt since it results in nonzero exit code on any minor issue.
|
||||
ASAN_OPTIONS=halt_on_error=0 \
|
||||
$BLENDER_BIN \
|
||||
--background \
|
||||
-noaudio \
|
||||
--factory-startup \
|
||||
--python-exit-code 1 \
|
||||
--python $SPHINXBASE/sphinx_doc_gen.py \
|
||||
-- \
|
||||
--output=$SPHINX_WORKDIR
|
||||
|
||||
|
||||
if (($? != 0)) ; then
|
||||
echo "Generating documentation failed, aborting"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Generate HTML (sphinx)
|
||||
|
||||
if $DO_OUT_HTML ; then
|
||||
sphinx-build -b html -j auto $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
|
||||
|
||||
# XXX, saves space on upload and zip, should move HTML outside
|
||||
# and zip up there, for now this is OK
|
||||
rm -rf sphinx-out/.doctrees
|
||||
|
||||
# in case we have a zip already
|
||||
rm -f blender_python_reference_$BLENDER_VERSION.zip
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# ZIP the HTML dir for upload
|
||||
|
||||
if $DO_OUT_HTML_ZIP ; then
|
||||
# lame, temp rename dir
|
||||
mv sphinx-out blender_python_reference_$BLENDER_VERSION
|
||||
zip -r -9 blender_python_reference_$BLENDER_VERSION.zip blender_python_reference_$BLENDER_VERSION
|
||||
mv blender_python_reference_$BLENDER_VERSION sphinx-out
|
||||
fi
|
||||
|
||||
cd -
|
||||
fi
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Generate PDF (sphinx/laytex)
|
||||
|
||||
if $DO_OUT_PDF ; then
|
||||
sphinx-build -n -b latex -j auto $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
|
||||
make -C $SPHINX_WORKDIR/sphinx-out
|
||||
mv $SPHINX_WORKDIR/sphinx-out/contents.pdf \
|
||||
$SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Upload to blender servers, comment this section for testing
|
||||
|
||||
if $DO_UPLOAD ; then
|
||||
|
||||
ssh $SSH_USER@blender.org 'rm -rf '$SSH_UPLOAD_FULL'/*'
|
||||
rsync --progress -ave "ssh -p 22" $SPHINX_WORKDIR/sphinx-out/* $SSH_HOST:$SSH_UPLOAD_FULL/
|
||||
|
||||
## symlink the dir to a static URL
|
||||
#ssh $SSH_USER@blender.org 'rm '$SSH_UPLOAD'/250PythonDoc && ln -s '$SSH_UPLOAD_FULL' '$SSH_UPLOAD'/250PythonDoc'
|
||||
if [ "$blender_version_cycle" = "release" ] ; then
|
||||
ssh $SSH_USER@blender.org 'rm '$SSH_UPLOAD'/blender_python_api_current && ln -s '$SSH_UPLOAD_FULL' '$SSH_UPLOAD'/blender_python_api_current'
|
||||
fi
|
||||
|
||||
# better redirect
|
||||
ssh $SSH_USER@blender.org 'echo "<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\" content=\"0;url=../blender_python_api_'$BLENDER_VERSION'/\"></head><body>Redirecting...</body></html>" > '$SSH_UPLOAD'/250PythonDoc/index.html'
|
||||
|
||||
# redirect for release only so wiki can point here
|
||||
if [ "$blender_version_cycle" = "release" ] ; then
|
||||
ssh $SSH_USER@blender.org 'echo "<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\" content=\"0;url=../blender_python_api_'$BLENDER_VERSION'/\"></head><body>Redirecting...</body></html>" > '$SSH_UPLOAD'/blender_python_api/index.html'
|
||||
fi
|
||||
|
||||
if $DO_OUT_PDF ; then
|
||||
# rename so local PDF has matching name.
|
||||
rsync --progress -ave "ssh -p 22" \
|
||||
$SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf \
|
||||
$SSH_HOST:$SSH_UPLOAD_FULL/blender_python_reference_$BLENDER_VERSION.pdf
|
||||
fi
|
||||
|
||||
if $DO_OUT_HTML_ZIP ; then
|
||||
rsync --progress -ave "ssh -p 22" \
|
||||
$SPHINX_WORKDIR/blender_python_reference_$BLENDER_VERSION.zip \
|
||||
$SSH_HOST:$SSH_UPLOAD_FULL/blender_python_reference_$BLENDER_VERSION.zip
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Print some useful text
|
||||
|
||||
echo ""
|
||||
echo "Finished! view the docs from: "
|
||||
if $DO_OUT_HTML ; then echo " html:" $SPHINX_WORKDIR/sphinx-out/index.html ; fi
|
||||
if $DO_OUT_PDF ; then echo " pdf:" $SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf ; fi
|
216
doc/python_api/sphinx_doc_update.py
Executable file
216
doc/python_api/sphinx_doc_update.py
Executable file
@@ -0,0 +1,216 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
"""
|
||||
This is a helper script to generate Blender Python API documentation (using Sphinx), and update server data using rsync.
|
||||
|
||||
You'll need to specify your user login and password, obviously.
|
||||
|
||||
Example usage:
|
||||
|
||||
./sphinx_doc_update.py --jobs 16 --mirror ../../../docs/remote_api_backup/ --source ../.. --blender ../../../build_cmake/bin/blender --user foobar --password barfoo
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
|
||||
DEFAULT_RSYNC_SERVER = "docs.blender.org"
|
||||
DEFAULT_RSYNC_ROOT = "/api/"
|
||||
DEFAULT_SYMLINK_ROOT = "/data/www/vhosts/docs.blender.org/api"
|
||||
|
||||
|
||||
def argparse_create():
|
||||
import argparse
|
||||
global __doc__
|
||||
|
||||
# When --help or no args are given, print this help
|
||||
usage_text = __doc__
|
||||
|
||||
parser = argparse.ArgumentParser(description=usage_text,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
|
||||
parser.add_argument(
|
||||
"--mirror", dest="mirror_dir",
|
||||
metavar='PATH', required=True,
|
||||
help="Path to local rsync mirror of api doc server")
|
||||
parser.add_argument(
|
||||
"--source", dest="source_dir",
|
||||
metavar='PATH', required=True,
|
||||
help="Path to Blender git repository")
|
||||
parser.add_argument(
|
||||
"--blender", dest="blender",
|
||||
metavar='PATH', required=True,
|
||||
help="Path to Blender executable")
|
||||
parser.add_argument(
|
||||
"--rsync-server", dest="rsync_server", default=DEFAULT_RSYNC_SERVER,
|
||||
metavar='RSYNCSERVER', type=str, required=False,
|
||||
help=("rsync server address"))
|
||||
parser.add_argument(
|
||||
"--rsync-root", dest="rsync_root", default=DEFAULT_RSYNC_ROOT,
|
||||
metavar='RSYNCROOT', type=str, required=False,
|
||||
help=("Root path of API doc on rsync server"))
|
||||
parser.add_argument(
|
||||
"--user", dest="user",
|
||||
metavar='USER', type=str, required=True,
|
||||
help=("User to login on rsync server"))
|
||||
parser.add_argument(
|
||||
"--password", dest="password",
|
||||
metavar='PASSWORD', type=str, required=True,
|
||||
help=("Password to login on rsync server"))
|
||||
parser.add_argument(
|
||||
"--jobs", dest="jobs_nr",
|
||||
metavar='NR', type=int, required=False, default=1,
|
||||
help="Number of sphinx building jobs to launch in parallel")
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main():
|
||||
# ----------
|
||||
# Parse Args
|
||||
|
||||
args = argparse_create().parse_args()
|
||||
|
||||
rsync_base = "rsync://%s@%s:%s" % (args.user, args.rsync_server, args.rsync_root)
|
||||
|
||||
blenver = api_blenver = api_blenver_zip = ""
|
||||
api_name = ""
|
||||
branch = ""
|
||||
is_release = is_beta = False
|
||||
|
||||
# I) Update local mirror using rsync.
|
||||
rsync_mirror_cmd = ("rsync", "--delete-after", "-avzz", rsync_base, args.mirror_dir)
|
||||
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
# II) Generate doc source in temp dir.
|
||||
doc_gen_cmd = (
|
||||
args.blender, "--background", "-noaudio", "--factory-startup", "--python-exit-code", "1",
|
||||
"--python", "%s/doc/python_api/sphinx_doc_gen.py" % args.source_dir, "--",
|
||||
"--output", tmp_dir
|
||||
)
|
||||
subprocess.run(doc_gen_cmd)
|
||||
|
||||
# III) Get Blender version info.
|
||||
getver_file = os.path.join(tmp_dir, "blendver.txt")
|
||||
getver_script = (r"""import sys, bpy
|
||||
with open(sys.argv[-1], 'w') as f:
|
||||
is_release = bpy.app.version_cycle in {'rc', 'release'}
|
||||
is_beta = bpy.app.version_cycle in {'beta'}
|
||||
branch = bpy.app.build_branch.split()[0].decode()
|
||||
f.write('%d\n' % is_release)
|
||||
f.write('%d\n' % is_beta)
|
||||
f.write('%s\n' % branch)
|
||||
f.write('%d.%d\n' % (bpy.app.version[0], bpy.app.version[1]))
|
||||
f.write('%d.%d\n' % (bpy.app.version[0], bpy.app.version[1])
|
||||
if (is_release or is_beta) else '%s\n' % branch)
|
||||
f.write('%d_%d' % (bpy.app.version[0], bpy.app.version[1]))
|
||||
""")
|
||||
get_ver_cmd = (args.blender, "--background", "-noaudio", "--factory-startup", "--python-exit-code", "1",
|
||||
"--python-expr", getver_script, "--", getver_file)
|
||||
subprocess.run(get_ver_cmd)
|
||||
with open(getver_file) as f:
|
||||
is_release, is_beta, branch, blenver, api_blenver, api_blenver_zip = f.read().split("\n")
|
||||
is_release = bool(int(is_release))
|
||||
is_beta = bool(int(is_beta))
|
||||
os.remove(getver_file)
|
||||
|
||||
# IV) Build doc.
|
||||
curr_dir = os.getcwd()
|
||||
os.chdir(tmp_dir)
|
||||
sphinx_cmd = ("sphinx-build", "-j", str(args.jobs_nr), "-b", "html", "sphinx-in", "sphinx-out")
|
||||
subprocess.run(sphinx_cmd)
|
||||
shutil.rmtree(os.path.join("sphinx-out", ".doctrees"))
|
||||
os.chdir(curr_dir)
|
||||
|
||||
# V) Cleanup existing matching dir in server mirror (if any), and copy new doc.
|
||||
api_name = api_blenver
|
||||
api_dir = os.path.join(args.mirror_dir, api_name)
|
||||
if os.path.exists(api_dir):
|
||||
if os.path.islink(api_dir):
|
||||
os.remove(api_dir)
|
||||
else:
|
||||
shutil.rmtree(api_dir)
|
||||
os.rename(os.path.join(tmp_dir, "sphinx-out"), api_dir)
|
||||
|
||||
# VI) Create zip archive.
|
||||
zip_name = "blender_python_reference_%s" % api_blenver_zip # We can't use 'release' postfix here...
|
||||
zip_path = os.path.join(args.mirror_dir, zip_name)
|
||||
with zipfile.ZipFile(zip_path, 'w') as zf:
|
||||
for dirname, _, filenames in os.walk(api_dir):
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(dirname, filename)
|
||||
zip_filepath = os.path.join(zip_name, os.path.relpath(filepath, api_dir))
|
||||
zf.write(filepath, arcname=zip_filepath)
|
||||
os.rename(zip_path, os.path.join(api_dir, "%s.zip" % zip_name))
|
||||
|
||||
# VII) Create symlinks and html redirects.
|
||||
if is_release:
|
||||
symlink = os.path.join(args.mirror_dir, "current")
|
||||
if os.path.exists(symlink):
|
||||
if os.path.islink(symlink):
|
||||
os.remove(symlink)
|
||||
else:
|
||||
shutil.rmtree(symlink)
|
||||
os.symlink("./%s" % api_name, symlink)
|
||||
with open(os.path.join(args.mirror_dir, "250PythonDoc/index.html"), 'w') as f:
|
||||
f.write("<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\""
|
||||
"content=\"0;url=../%s/\"></head><body>Redirecting...</body></html>" % api_name)
|
||||
elif is_beta:
|
||||
# We do not have any particular symlink for that stage.
|
||||
pass
|
||||
elif branch == "master":
|
||||
# Also create a symlink from version number to actual master api doc.
|
||||
symlink = os.path.join(args.mirror_dir, blenver)
|
||||
if os.path.exists(symlink):
|
||||
if os.path.islink(symlink):
|
||||
os.remove(symlink)
|
||||
else:
|
||||
shutil.rmtree(symlink)
|
||||
os.symlink("./%s" % api_name, symlink)
|
||||
with open(os.path.join(args.mirror_dir, "blender_python_api/index.html"), 'w') as f:
|
||||
f.write("<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\""
|
||||
"content=\"0;url=../%s/\"></head><body>Redirecting...</body></html>" % api_name)
|
||||
|
||||
# VIII) Upload (first do a dry-run so user can ensure everything is OK).
|
||||
print("Doc generated in local mirror %s, please check it before uploading "
|
||||
"(hit [Enter] to continue, [Ctrl-C] to exit):" % api_dir)
|
||||
sys.stdin.read(1)
|
||||
|
||||
rsync_mirror_cmd = ("rsync", "--dry-run", "--delete-after", "-avzz", args.mirror_dir, rsync_base)
|
||||
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
|
||||
|
||||
print("Rsync upload simulated, please check every thing is OK (hit [Enter] to continue, [Ctrl-C] to exit):")
|
||||
sys.stdin.read(1)
|
||||
|
||||
rsync_mirror_cmd = ("rsync", "--delete-after", "-avzz", args.mirror_dir, rsync_base)
|
||||
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
10
extern/mantaflow/CMakeLists.txt
vendored
10
extern/mantaflow/CMakeLists.txt
vendored
@@ -75,8 +75,6 @@ endif()
|
||||
|
||||
if(WITH_OPENVDB)
|
||||
add_definitions(-DOPENVDB=1)
|
||||
# OpenVDB headers use deprecated TBB headers, silence warning.
|
||||
add_definitions(-DTBB_SUPPRESS_DEPRECATED_MESSAGES=1)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENVDB_BLOSC)
|
||||
@@ -133,14 +131,6 @@ if(WITH_OPENVDB)
|
||||
list(APPEND LIB
|
||||
${OPENVDB_LIBRARIES}
|
||||
)
|
||||
if(WIN32)
|
||||
# OpenVDB emits lots of these, they should be suppressed through other
|
||||
# means but MSVC 16.8/16.9 has broken this functionality, so C4251 is
|
||||
# suppressed here explicitly. See
|
||||
# https://developercommunity.visualstudio.com/content/problem/1167590/bug.html
|
||||
# for details.
|
||||
string(APPEND CMAKE_CXX_FLAGS " /wd4251")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(SRC
|
||||
|
5
extern/mantaflow/helper/util/vectorbase.h
vendored
5
extern/mantaflow/helper/util/vectorbase.h
vendored
@@ -664,11 +664,6 @@ template<class T> inline Vec3i toVec3iRound(T v)
|
||||
return Vec3i((int)round(v[0]), (int)round(v[1]), (int)round(v[2]));
|
||||
}
|
||||
|
||||
template<class T> inline Vec3i toVec3iFloor(T v)
|
||||
{
|
||||
return Vec3i((int)floor(v[0]), (int)floor(v[1]), (int)floor(v[2]));
|
||||
}
|
||||
|
||||
//! convert to int Vector if values are close enough to an int
|
||||
template<class T> inline Vec3i toVec3iChecked(T v)
|
||||
{
|
||||
|
16
extern/mantaflow/preprocessed/conjugategrad.h
vendored
16
extern/mantaflow/preprocessed/conjugategrad.h
vendored
@@ -170,7 +170,7 @@ struct ApplyMatrix : public KernelBase {
|
||||
unusedParameter(vecRhs); // Not needed in this matrix application
|
||||
|
||||
if (matrixA.size() != 4)
|
||||
errMsg("ConjugateGrad: Invalid A matrix in apply matrix step");
|
||||
errMsg("ConjugatedGrad: Invalid A matrix in apply matrix step");
|
||||
Grid<Real> &A0 = *matrixA[0];
|
||||
Grid<Real> &Ai = *matrixA[1];
|
||||
Grid<Real> &Aj = *matrixA[2];
|
||||
@@ -256,7 +256,7 @@ struct ApplyMatrix2D : public KernelBase {
|
||||
unusedParameter(vecRhs); // Not needed in this matrix application
|
||||
|
||||
if (matrixA.size() != 3)
|
||||
errMsg("ConjugateGrad: Invalid A matrix in apply matrix step");
|
||||
errMsg("ConjugatedGrad: Invalid A matrix in apply matrix step");
|
||||
Grid<Real> &A0 = *matrixA[0];
|
||||
Grid<Real> &Ai = *matrixA[1];
|
||||
Grid<Real> &Aj = *matrixA[2];
|
||||
@@ -338,7 +338,7 @@ struct ApplyMatrixViscosityU : public KernelBase {
|
||||
const std::vector<Grid<Real> *> vecRhs) const
|
||||
{
|
||||
if (matrixA.size() != 15)
|
||||
errMsg("ConjugateGrad: Invalid A matrix in apply matrix step");
|
||||
errMsg("ConjugatedGrad: Invalid A matrix in apply matrix step");
|
||||
Grid<Real> &A0 = *matrixA[0];
|
||||
Grid<Real> &Aplusi = *matrixA[1];
|
||||
Grid<Real> &Aplusj = *matrixA[2];
|
||||
@@ -348,7 +348,7 @@ struct ApplyMatrixViscosityU : public KernelBase {
|
||||
Grid<Real> &Aminusk = *matrixA[6];
|
||||
|
||||
if (vecRhs.size() != 2)
|
||||
errMsg("ConjugateGrad: Invalid rhs vector in apply matrix step");
|
||||
errMsg("ConjugatedGrad: Invalid rhs vector in apply matrix step");
|
||||
Grid<Real> &srcV = *vecRhs[0];
|
||||
Grid<Real> &srcW = *vecRhs[1];
|
||||
|
||||
@@ -450,7 +450,7 @@ struct ApplyMatrixViscosityV : public KernelBase {
|
||||
const std::vector<Grid<Real> *> vecRhs) const
|
||||
{
|
||||
if (matrixA.size() != 15)
|
||||
errMsg("ConjugateGrad: Invalid A matrix in apply matrix step");
|
||||
errMsg("ConjugatedGrad: Invalid A matrix in apply matrix step");
|
||||
Grid<Real> &A0 = *matrixA[0];
|
||||
Grid<Real> &Aplusi = *matrixA[1];
|
||||
Grid<Real> &Aplusj = *matrixA[2];
|
||||
@@ -460,7 +460,7 @@ struct ApplyMatrixViscosityV : public KernelBase {
|
||||
Grid<Real> &Aminusk = *matrixA[6];
|
||||
|
||||
if (vecRhs.size() != 2)
|
||||
errMsg("ConjugateGrad: Invalid rhs vector in apply matrix step");
|
||||
errMsg("ConjugatedGrad: Invalid rhs vector in apply matrix step");
|
||||
Grid<Real> &srcU = *vecRhs[0];
|
||||
Grid<Real> &srcW = *vecRhs[1];
|
||||
|
||||
@@ -562,7 +562,7 @@ struct ApplyMatrixViscosityW : public KernelBase {
|
||||
const std::vector<Grid<Real> *> vecRhs) const
|
||||
{
|
||||
if (matrixA.size() != 15)
|
||||
errMsg("ConjugateGrad: Invalid A matrix in apply matrix step");
|
||||
errMsg("ConjugatedGrad: Invalid A matrix in apply matrix step");
|
||||
Grid<Real> &A0 = *matrixA[0];
|
||||
Grid<Real> &Aplusi = *matrixA[1];
|
||||
Grid<Real> &Aplusj = *matrixA[2];
|
||||
@@ -572,7 +572,7 @@ struct ApplyMatrixViscosityW : public KernelBase {
|
||||
Grid<Real> &Aminusk = *matrixA[6];
|
||||
|
||||
if (vecRhs.size() != 2)
|
||||
errMsg("ConjugateGrad: Invalid rhs vector in apply matrix step");
|
||||
errMsg("ConjugatedGrad: Invalid rhs vector in apply matrix step");
|
||||
Grid<Real> &srcU = *vecRhs[0];
|
||||
Grid<Real> &srcV = *vecRhs[1];
|
||||
|
||||
|
23
extern/mantaflow/preprocessed/fileio/iogrids.cpp
vendored
23
extern/mantaflow/preprocessed/fileio/iogrids.cpp
vendored
@@ -628,24 +628,13 @@ template<class T> int readGridUni(const string &name, Grid<T> *grid)
|
||||
// current file format
|
||||
UniHeader head;
|
||||
assertMsg(gzread(gzf, &head, sizeof(UniHeader)) == sizeof(UniHeader),
|
||||
"readGridUni: Can't read file, no header present");
|
||||
"can't read file, no header present");
|
||||
assertMsg(head.dimX == grid->getSizeX() && head.dimY == grid->getSizeY() &&
|
||||
head.dimZ == grid->getSizeZ(),
|
||||
"grid dim doesn't match, " << Vec3(head.dimX, head.dimY, head.dimZ) << " vs "
|
||||
<< grid->getSize());
|
||||
assertMsg(unifyGridType(head.gridType) == unifyGridType(grid->getType()),
|
||||
"readGridUni: Grid type doesn't match " << head.gridType << " vs "
|
||||
<< grid->getType());
|
||||
|
||||
const Vec3i curGridSize = grid->getParent()->getGridSize();
|
||||
const Vec3i headGridSize(head.dimX, head.dimY, head.dimZ);
|
||||
# if BLENDER
|
||||
// Correct grid size is only a soft requirement in Blender
|
||||
if (headGridSize != curGridSize) {
|
||||
debMsg("readGridUni: Grid dim doesn't match, " << headGridSize << " vs " << curGridSize, 1);
|
||||
return 0;
|
||||
}
|
||||
# else
|
||||
assertMsg(headGridSize == curGridSize,
|
||||
"readGridUni: Grid dim doesn't match, " << headGridSize << " vs " << curGridSize);
|
||||
# endif
|
||||
|
||||
"grid type doesn't match " << head.gridType << " vs " << grid->getType());
|
||||
# if FLOATINGPOINT_PRECISION != 1
|
||||
// convert float to double
|
||||
Grid<T> temp(grid->getParent());
|
||||
|
@@ -230,19 +230,6 @@ int readParticlesUni(const std::string &name, BasicParticleSystem *parts)
|
||||
assertMsg(((head.bytesPerElement == PartSysSize) && (head.elementType == 0)),
|
||||
"particle type doesn't match");
|
||||
|
||||
const Vec3i curGridSize = parts->getParent()->getGridSize();
|
||||
const Vec3i headGridSize(head.dimX, head.dimY, head.dimZ);
|
||||
# if BLENDER
|
||||
// Correct grid size is only a soft requirement in Blender
|
||||
if (headGridSize != curGridSize) {
|
||||
debMsg("readPdataUni: Grid dim doesn't match, " << headGridSize << " vs " << curGridSize, 1);
|
||||
return 0;
|
||||
}
|
||||
# else
|
||||
assertMsg(headGridSize == curGridSize,
|
||||
"readPdataUni: Grid dim doesn't match, " << headGridSize << " vs " << curGridSize);
|
||||
# endif
|
||||
|
||||
// re-allocate all data
|
||||
parts->resizeAll(head.dim);
|
||||
|
||||
@@ -338,19 +325,6 @@ template<class T> int readPdataUni(const std::string &name, ParticleDataImpl<T>
|
||||
pdata->getParticleSys()->resize(head.dim); // ensure that parent particle system has same size
|
||||
pdata->resize(head.dim);
|
||||
|
||||
const Vec3i curGridSize = pdata->getParent()->getGridSize();
|
||||
const Vec3i headGridSize(head.dimX, head.dimY, head.dimZ);
|
||||
# if BLENDER
|
||||
// Correct grid size is only a soft requirement in Blender
|
||||
if (headGridSize != curGridSize) {
|
||||
debMsg("readPdataUni: Grid dim doesn't match, " << headGridSize << " vs " << curGridSize, 1);
|
||||
return 0;
|
||||
}
|
||||
# else
|
||||
assertMsg(headGridSize == curGridSize,
|
||||
"readPdataUni: Grid dim doesn't match, " << headGridSize << " vs " << curGridSize);
|
||||
# endif
|
||||
|
||||
assertMsg(head.dim == pdata->size(), "pdata size doesn't match");
|
||||
# if FLOATINGPOINT_PRECISION != 1
|
||||
ParticleDataImpl<T> temp(pdata->getParent());
|
||||
|
@@ -90,13 +90,6 @@ template<> void convertFrom(openvdb::Vec3s &in, Vec3 *out)
|
||||
(*out).z = in.z();
|
||||
}
|
||||
|
||||
template<> void convertFrom(openvdb::Vec3i &in, Vec3i *out)
|
||||
{
|
||||
(*out).x = in.x();
|
||||
(*out).y = in.y();
|
||||
(*out).z = in.z();
|
||||
}
|
||||
|
||||
// Convert to OpenVDB value from Manta value.
|
||||
template<class S, class T> void convertTo(S *out, T &in)
|
||||
{
|
||||
|
163
extern/mantaflow/preprocessed/fileio/iovdb.cpp
vendored
163
extern/mantaflow/preprocessed/fileio/iovdb.cpp
vendored
@@ -38,11 +38,6 @@
|
||||
#define POSITION_NAME "P"
|
||||
#define FLAG_NAME "U"
|
||||
|
||||
#define META_BASE_RES "file_base_resolution"
|
||||
#define META_VOXEL_SIZE "file_voxel_size"
|
||||
#define META_BBOX_MAX "file_bbox_max"
|
||||
#define META_BBOX_MIN "file_bbox_min"
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace Manta {
|
||||
@@ -209,11 +204,11 @@ typename GridType::Ptr exportVDB(Grid<T> *from, float clip, openvdb::FloatGrid::
|
||||
openvdb::Coord(from->getSizeX() - 1, from->getSizeY() - 1, from->getSizeZ() - 1));
|
||||
openvdb::tools::Dense<ValueT, openvdb::tools::MemoryLayout::LayoutXYZ> dense(bbox, data);
|
||||
|
||||
// Use clip value, or (when not exporting in sparse mode) clear it in order to copy all values
|
||||
// of dense grid
|
||||
ValueT tmpClip = (from->saveSparse()) ? ValueT(clip) : ValueT(0);
|
||||
// Trick: Set clip value to very small / negative value in order to copy all values of dense
|
||||
// grids
|
||||
float tmpClip = (from->saveSparse()) ? clip : -std::numeric_limits<Real>::max();
|
||||
// Copy from dense to sparse grid structure considering clip value
|
||||
openvdb::tools::copyFromDense(dense, *to, tmpClip);
|
||||
openvdb::tools::copyFromDense(dense, *to, ValueT(tmpClip));
|
||||
|
||||
// If present, use clip grid to trim down current vdb grid even more
|
||||
if (from->saveSparse() && clipGrid && !clipGrid->empty()) {
|
||||
@@ -245,10 +240,10 @@ void exportVDB(ParticleDataImpl<MantaType> *from,
|
||||
std::vector<VDBType> vdbValues;
|
||||
std::string name = from->getName();
|
||||
|
||||
BasicParticleSystem *pp = dynamic_cast<BasicParticleSystem *>(from->getParticleSys());
|
||||
FOR_PARTS(*from)
|
||||
{
|
||||
// Optionally, skip exporting particles that have been marked as deleted
|
||||
BasicParticleSystem *pp = dynamic_cast<BasicParticleSystem *>(from->getParticleSys());
|
||||
if (skipDeletedParts && !pp->isActive(idx)) {
|
||||
continue;
|
||||
}
|
||||
@@ -393,8 +388,7 @@ int writeObjectsVDB(const string &filename,
|
||||
int compression,
|
||||
int precision,
|
||||
float clip,
|
||||
const Grid<Real> *clipGrid,
|
||||
const bool meta)
|
||||
const Grid<Real> *clipGrid)
|
||||
{
|
||||
openvdb::initialize();
|
||||
openvdb::io::File file(filename);
|
||||
@@ -495,16 +489,6 @@ int writeObjectsVDB(const string &filename,
|
||||
// Set additional grid attributes, e.g. name, grid class, compression level, etc.
|
||||
if (vdbGrid) {
|
||||
setGridOptions<openvdb::GridBase>(vdbGrid, objectName, gClass, voxelSize, precision);
|
||||
|
||||
// Optional metadata: Save additional simulation information per vdb object
|
||||
if (meta) {
|
||||
const Vec3i size = object->getParent()->getGridSize();
|
||||
// The (dense) resolution of this grid
|
||||
vdbGrid->insertMeta(META_BASE_RES,
|
||||
openvdb::Vec3IMetadata(openvdb::Vec3i(size.x, size.y, size.z)));
|
||||
// Length of one voxel side
|
||||
vdbGrid->insertMeta(META_VOXEL_SIZE, openvdb::FloatMetadata(voxelSize));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -549,44 +533,6 @@ int writeObjectsVDB(const string &filename,
|
||||
return 1;
|
||||
}
|
||||
|
||||
static void clearAll(std::vector<PbClass *> *objects, std::vector<ParticleDataBase *> pdbBuffer)
|
||||
{
|
||||
// Clear all data loaded into manta objects (e.g. during IO error)
|
||||
for (std::vector<PbClass *>::iterator iter = objects->begin(); iter != objects->end(); ++iter) {
|
||||
if (GridBase *mantaGrid = dynamic_cast<GridBase *>(*iter)) {
|
||||
if (mantaGrid->getType() & GridBase::TypeInt) {
|
||||
Grid<int> *mantaIntGrid = (Grid<int> *)mantaGrid;
|
||||
mantaIntGrid->clear();
|
||||
}
|
||||
else if (mantaGrid->getType() & GridBase::TypeReal) {
|
||||
Grid<Real> *mantaRealGrid = (Grid<Real> *)mantaGrid;
|
||||
mantaRealGrid->clear();
|
||||
}
|
||||
else if (mantaGrid->getType() & GridBase::TypeVec3) {
|
||||
Grid<Vec3> *mantaVec3Grid = (Grid<Vec3> *)mantaGrid;
|
||||
mantaVec3Grid->clear();
|
||||
}
|
||||
}
|
||||
else if (BasicParticleSystem *mantaPP = dynamic_cast<BasicParticleSystem *>(*iter)) {
|
||||
mantaPP->clear();
|
||||
}
|
||||
}
|
||||
for (ParticleDataBase *pdb : pdbBuffer) {
|
||||
if (pdb->getType() == ParticleDataBase::TypeInt) {
|
||||
ParticleDataImpl<int> *mantaPDataInt = (ParticleDataImpl<int> *)pdb;
|
||||
mantaPDataInt->clear();
|
||||
}
|
||||
else if (pdb->getType() == ParticleDataBase::TypeReal) {
|
||||
ParticleDataImpl<Real> *mantaPDataReal = (ParticleDataImpl<Real> *)pdb;
|
||||
mantaPDataReal->clear();
|
||||
}
|
||||
else if (pdb->getType() == ParticleDataBase::TypeVec3) {
|
||||
ParticleDataImpl<Vec3> *mantaPDataVec3 = (ParticleDataImpl<Vec3> *)pdb;
|
||||
mantaPDataVec3->clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int readObjectsVDB(const string &filename, std::vector<PbClass *> *objects, float worldSize)
|
||||
{
|
||||
|
||||
@@ -615,9 +561,6 @@ int readObjectsVDB(const string &filename, std::vector<PbClass *> *objects, floa
|
||||
// A buffer to store a handle to pData objects. These will be read alongside a particle system.
|
||||
std::vector<ParticleDataBase *> pdbBuffer;
|
||||
|
||||
// Count how many objects could not be read correctly
|
||||
int readFailure = 0;
|
||||
|
||||
for (std::vector<PbClass *>::iterator iter = objects->begin(); iter != objects->end(); ++iter) {
|
||||
|
||||
if (gridsVDB.empty()) {
|
||||
@@ -625,12 +568,11 @@ int readObjectsVDB(const string &filename, std::vector<PbClass *> *objects, floa
|
||||
}
|
||||
// If there is just one grid in this file, load it regardless of name match (to vdb caches per
|
||||
// grid).
|
||||
const bool onlyGrid = (gridsVDB.size() == 1);
|
||||
bool onlyGrid = (gridsVDB.size() == 1);
|
||||
|
||||
PbClass *object = dynamic_cast<PbClass *>(*iter);
|
||||
const Real dx = object->getParent()->getDx();
|
||||
const Vec3i origRes = object->getParent()->getGridSize();
|
||||
Real voxelSize = worldSize * dx;
|
||||
const Real voxelSize = worldSize * dx;
|
||||
|
||||
// Particle data objects are treated separately - buffered and inserted when reading the
|
||||
// particle system
|
||||
@@ -654,81 +596,6 @@ int readObjectsVDB(const string &filename, std::vector<PbClass *> *objects, floa
|
||||
if (!nameMatch && !onlyGrid) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Metadata: If present in the file, meta data will be parsed into these fields
|
||||
Real metaVoxelSize(0);
|
||||
Vec3i metaRes(0), metaBBoxMax(0), metaBBoxMin(0);
|
||||
|
||||
// Loop to load all meta data that we care about
|
||||
for (openvdb::MetaMap::MetaIterator iter = vdbGrid->beginMeta(); iter != vdbGrid->endMeta();
|
||||
++iter) {
|
||||
const std::string &name = iter->first;
|
||||
const openvdb::Metadata::Ptr value = iter->second;
|
||||
if (name.compare(META_BASE_RES) == 0) {
|
||||
openvdb::Vec3i tmp = static_cast<openvdb::Vec3IMetadata &>(*value).value();
|
||||
convertFrom(tmp, &metaRes);
|
||||
}
|
||||
else if (name.compare(META_VOXEL_SIZE) == 0) {
|
||||
float tmp = static_cast<openvdb::FloatMetadata &>(*value).value();
|
||||
convertFrom(tmp, &metaVoxelSize);
|
||||
|
||||
voxelSize = metaVoxelSize; // Make sure to update voxel size variable (used in
|
||||
// pointgrid's importVDB())
|
||||
if (worldSize != 1.0)
|
||||
debMsg(
|
||||
"readObjectsVDB: Found voxel size in meta data. worldSize parameter will be "
|
||||
"ignored!",
|
||||
1);
|
||||
}
|
||||
else if (name.compare(META_BBOX_MAX) == 0) {
|
||||
openvdb::Vec3i tmp = static_cast<openvdb::Vec3IMetadata &>(*value).value();
|
||||
convertFrom(tmp, &metaBBoxMax);
|
||||
}
|
||||
else if (name.compare(META_BBOX_MIN) == 0) {
|
||||
openvdb::Vec3i tmp = static_cast<openvdb::Vec3IMetadata &>(*value).value();
|
||||
convertFrom(tmp, &metaBBoxMin);
|
||||
}
|
||||
else {
|
||||
debMsg("readObjectsVDB: Skipping unknown meta information '" << name << "'", 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Compare metadata with allocated grid setup. This prevents invalid index access.
|
||||
if (notZero(metaRes) && metaRes != origRes) {
|
||||
debMsg("readObjectsVDB Warning: Grid '" << vdbGrid->getName()
|
||||
<< "' has not been read. Meta grid res " << metaRes
|
||||
<< " vs " << origRes << " current grid size",
|
||||
1);
|
||||
readFailure++;
|
||||
break;
|
||||
}
|
||||
if (notZero(metaVoxelSize) && metaVoxelSize != voxelSize) {
|
||||
debMsg("readObjectsVDB Warning: Grid '"
|
||||
<< vdbGrid->getName() << "' has not been read. Meta voxel size "
|
||||
<< metaVoxelSize << " vs " << voxelSize << " current voxel size",
|
||||
1);
|
||||
readFailure++;
|
||||
break;
|
||||
}
|
||||
if (metaBBoxMax.x > origRes.x || metaBBoxMax.y > origRes.y || metaBBoxMax.z > origRes.z) {
|
||||
debMsg("readObjectsVDB Warning: Grid '"
|
||||
<< vdbGrid->getName() << "' has not been read. Vdb bbox max " << metaBBoxMax
|
||||
<< " vs " << origRes << " current grid size",
|
||||
1);
|
||||
readFailure++;
|
||||
break;
|
||||
}
|
||||
const Vec3i origOrigin(0);
|
||||
if (metaBBoxMin.x < origOrigin.x || metaBBoxMin.y < origOrigin.y ||
|
||||
metaBBoxMin.z < origOrigin.z) {
|
||||
debMsg("readObjectsVDB Warning: Grid '"
|
||||
<< vdbGrid->getName() << "' has not been read. Vdb bbox min " << metaBBoxMin
|
||||
<< " vs " << origOrigin << " current grid origin",
|
||||
1);
|
||||
readFailure++;
|
||||
break;
|
||||
}
|
||||
|
||||
if (GridBase *mantaGrid = dynamic_cast<GridBase *>(*iter)) {
|
||||
|
||||
if (mantaGrid->getType() & GridBase::TypeInt) {
|
||||
@@ -788,17 +655,6 @@ int readObjectsVDB(const string &filename, std::vector<PbClass *> *objects, floa
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
// Do not continue loading objects in this loop if there was a read error
|
||||
if (readFailure > 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (readFailure > 0) {
|
||||
// Clear all data that has already been loaded into simulation objects
|
||||
clearAll(objects, pdbBuffer);
|
||||
pdbBuffer.clear();
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Give out a warning if pData items were present but could not be read due to missing particle
|
||||
@@ -873,8 +729,7 @@ int writeObjectsVDB(const string &filename,
|
||||
int compression,
|
||||
int precision,
|
||||
float clip,
|
||||
const Grid<Real> *clipGrid,
|
||||
const bool meta)
|
||||
const Grid<Real> *clipGrid)
|
||||
{
|
||||
errMsg("Cannot save to .vdb file. Mantaflow has not been built with OpenVDB support.");
|
||||
return 0;
|
||||
|
@@ -85,8 +85,7 @@ int save(const string &name,
|
||||
bool precisionHalf = true,
|
||||
int precision = PRECISION_HALF,
|
||||
float clip = 1e-4,
|
||||
const Grid<Real> *clipGrid = nullptr,
|
||||
const bool meta = false)
|
||||
const Grid<Real> *clipGrid = nullptr)
|
||||
{
|
||||
|
||||
if (!precisionHalf) {
|
||||
@@ -106,7 +105,7 @@ int save(const string &name,
|
||||
return writeGridsVol(name, &objects);
|
||||
if (ext == ".vdb")
|
||||
return writeObjectsVDB(
|
||||
name, &objects, worldSize, skipDeletedParts, compression, precision, clip, clipGrid, meta);
|
||||
name, &objects, worldSize, skipDeletedParts, compression, precision, clip, clipGrid);
|
||||
else if (ext == ".npz")
|
||||
return writeGridsNumpy(name, &objects);
|
||||
else if (ext == ".txt")
|
||||
@@ -135,7 +134,6 @@ static PyObject *_W_1(PyObject *_self, PyObject *_linargs, PyObject *_kwds)
|
||||
int precision = _args.getOpt<int>("precision", 6, PRECISION_HALF, &_lock);
|
||||
float clip = _args.getOpt<float>("clip", 7, 1e-4, &_lock);
|
||||
const Grid<Real> *clipGrid = _args.getPtrOpt<Grid<Real>>("clipGrid", 8, nullptr, &_lock);
|
||||
const bool meta = _args.getOpt<bool>("meta", 9, false, &_lock);
|
||||
_retval = toPy(save(name,
|
||||
objects,
|
||||
worldSize,
|
||||
@@ -144,8 +142,7 @@ static PyObject *_W_1(PyObject *_self, PyObject *_linargs, PyObject *_kwds)
|
||||
precisionHalf,
|
||||
precision,
|
||||
clip,
|
||||
clipGrid,
|
||||
meta));
|
||||
clipGrid));
|
||||
_args.check();
|
||||
}
|
||||
pbFinalizePlugin(parent, "save", !noTiming);
|
||||
|
@@ -77,8 +77,7 @@ int writeObjectsVDB(const std::string &filename,
|
||||
int compression = COMPRESSION_ZIP,
|
||||
int precision = PRECISION_HALF,
|
||||
float clip = 1e-4,
|
||||
const Grid<Real> *clipGrid = nullptr,
|
||||
const bool meta = false);
|
||||
const Grid<Real> *clipGrid = nullptr);
|
||||
int readObjectsVDB(const std::string &filename,
|
||||
std::vector<PbClass *> *objects,
|
||||
float scale = 1.0);
|
||||
|
2
extern/mantaflow/preprocessed/gitinfo.h
vendored
2
extern/mantaflow/preprocessed/gitinfo.h
vendored
@@ -1,3 +1,3 @@
|
||||
|
||||
|
||||
#define MANTA_GIT_VERSION "commit 39b7a415721ecbf6643612a24e8eadd221aeb934"
|
||||
#define MANTA_GIT_VERSION "commit e2285cb9bc492987f728123be6cfc1fe11fe73d6"
|
||||
|
6
extern/mantaflow/preprocessed/grid.cpp
vendored
6
extern/mantaflow/preprocessed/grid.cpp
vendored
@@ -508,7 +508,8 @@ struct CompMaxVec : public KernelBase {
|
||||
|
||||
template<class T> Grid<T> &Grid<T>::copyFrom(const Grid<T> &a, bool copyType)
|
||||
{
|
||||
assertMsg(a.mSize == mSize, "different grid resolutions " << a.mSize << " vs " << this->mSize);
|
||||
assertMsg(a.mSize.x == mSize.x && a.mSize.y == mSize.y && a.mSize.z == mSize.z,
|
||||
"different grid resolutions " << a.mSize << " vs " << this->mSize);
|
||||
memcpy(mData, a.mData, sizeof(T) * mSize.x * mSize.y * mSize.z);
|
||||
if (copyType)
|
||||
mType = a.mType; // copy type marker
|
||||
@@ -3401,7 +3402,8 @@ void PbRegister_markIsolatedFluidCell()
|
||||
void copyMACData(
|
||||
const MACGrid &source, MACGrid &target, const FlagGrid &flags, const int flag, const int bnd)
|
||||
{
|
||||
assertMsg(source.getSize() == target.getSize(),
|
||||
assertMsg(source.getSize().x == target.getSize().x && source.getSize().y == target.getSize().y &&
|
||||
source.getSize().z == target.getSize().z,
|
||||
"different grid resolutions " << source.getSize() << " vs " << target.getSize());
|
||||
|
||||
// Grid<Real> divGrid(target.getParent());
|
||||
|
16
extern/mantaflow/preprocessed/grid.h
vendored
16
extern/mantaflow/preprocessed/grid.h
vendored
@@ -204,12 +204,14 @@ class GridBase : public PbClass {
|
||||
inline void checkIndex(int i, int j, int k) const;
|
||||
//! Check if indices are within bounds, otherwise error (should only be called when debugging)
|
||||
inline void checkIndex(IndexInt idx) const;
|
||||
//! Check if vector int is within given boundaries
|
||||
inline bool isInBounds(const Vec3i &p, int bnd = 0) const;
|
||||
//! Check if vector real is within given boundaries
|
||||
//! Check if index is within given boundaries
|
||||
inline bool isInBounds(const Vec3i &p, int bnd) const;
|
||||
//! Check if index is within given boundaries
|
||||
inline bool isInBounds(const Vec3i &p) const;
|
||||
//! Check if index is within given boundaries
|
||||
inline bool isInBounds(const Vec3 &p, int bnd = 0) const
|
||||
{
|
||||
return isInBounds(toVec3iFloor(p), bnd);
|
||||
return isInBounds(toVec3i(p), bnd);
|
||||
}
|
||||
//! Check if linear index is in the range of the array
|
||||
inline bool isInBounds(IndexInt idx) const;
|
||||
@@ -594,7 +596,6 @@ template<class T> class Grid : public GridBase {
|
||||
//! set data
|
||||
inline void set(int i, int j, int k, T &val)
|
||||
{
|
||||
DEBUG_ONLY(checkIndex(i, j, k));
|
||||
mData[index(i, j, k)] = val;
|
||||
}
|
||||
|
||||
@@ -1783,6 +1784,11 @@ inline void GridBase::checkIndex(IndexInt idx) const
|
||||
}
|
||||
}
|
||||
|
||||
bool GridBase::isInBounds(const Vec3i &p) const
|
||||
{
|
||||
return (p.x >= 0 && p.y >= 0 && p.z >= 0 && p.x < mSize.x && p.y < mSize.y && p.z < mSize.z);
|
||||
}
|
||||
|
||||
bool GridBase::isInBounds(const Vec3i &p, int bnd) const
|
||||
{
|
||||
bool ret = (p.x >= bnd && p.y >= bnd && p.x < mSize.x - bnd && p.y < mSize.y - bnd);
|
||||
|
4
extern/mantaflow/preprocessed/grid4d.cpp
vendored
4
extern/mantaflow/preprocessed/grid4d.cpp
vendored
@@ -491,7 +491,9 @@ template<class T> Grid4d<T> &Grid4d<T>::safeDivide(const Grid4d<T> &a)
|
||||
}
|
||||
template<class T> Grid4d<T> &Grid4d<T>::copyFrom(const Grid4d<T> &a, bool copyType)
|
||||
{
|
||||
assertMsg(a.mSize == mSize, "different Grid4d resolutions " << a.mSize << " vs " << this->mSize);
|
||||
assertMsg(a.mSize.x == mSize.x && a.mSize.y == mSize.y && a.mSize.z == mSize.z &&
|
||||
a.mSize.t == mSize.t,
|
||||
"different Grid4d resolutions " << a.mSize << " vs " << this->mSize);
|
||||
memcpy(mData, a.mData, sizeof(T) * mSize.x * mSize.y * mSize.z * mSize.t);
|
||||
if (copyType)
|
||||
mType = a.mType; // copy type marker
|
||||
|
10
extern/mantaflow/preprocessed/plugin/flip.cpp
vendored
10
extern/mantaflow/preprocessed/plugin/flip.cpp
vendored
@@ -429,7 +429,7 @@ void markFluidCells(const BasicParticleSystem &parts,
|
||||
for (IndexInt idx = 0; idx < parts.size(); idx++) {
|
||||
if (!parts.isActive(idx) || (ptype && ((*ptype)[idx] & exclude)))
|
||||
continue;
|
||||
Vec3i p = toVec3iFloor(parts.getPos(idx));
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
if (flags.isInBounds(p) && flags.isEmpty(p))
|
||||
flags(p) = (flags(p) | FlagGrid::TypeFluid) & ~FlagGrid::TypeEmpty;
|
||||
}
|
||||
@@ -544,7 +544,7 @@ void adjustNumber(BasicParticleSystem &parts,
|
||||
// count particles in cells, and delete excess particles
|
||||
for (IndexInt idx = 0; idx < (int)parts.size(); idx++) {
|
||||
if (parts.isActive(idx)) {
|
||||
Vec3i p = toVec3iFloor(parts.getPos(idx));
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
if (!tmp.isInBounds(p)) {
|
||||
parts.kill(idx); // out of domain, remove
|
||||
continue;
|
||||
@@ -711,7 +711,7 @@ void gridParticleIndex(const BasicParticleSystem &parts,
|
||||
for (IndexInt idx = 0; idx < (IndexInt)parts.size(); idx++) {
|
||||
if (parts.isActive(idx)) {
|
||||
// check index for validity...
|
||||
Vec3i p = toVec3iFloor(parts.getPos(idx));
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
if (!index.isInBounds(p)) {
|
||||
inactive++;
|
||||
continue;
|
||||
@@ -740,7 +740,7 @@ void gridParticleIndex(const BasicParticleSystem &parts,
|
||||
for (IndexInt idx = 0; idx < (IndexInt)parts.size(); idx++) {
|
||||
if (!parts.isActive(idx))
|
||||
continue;
|
||||
Vec3i p = toVec3iFloor(parts.getPos(idx));
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
if (!index.isInBounds(p)) {
|
||||
continue;
|
||||
}
|
||||
@@ -1636,7 +1636,7 @@ struct knPushOutofObs : public KernelBase {
|
||||
{
|
||||
if (!parts.isActive(idx) || (ptype && ((*ptype)[idx] & exclude)))
|
||||
return;
|
||||
Vec3i p = toVec3iFloor(parts.getPos(idx));
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
|
||||
if (!flags.isInBounds(p))
|
||||
return;
|
||||
|
@@ -2278,7 +2278,7 @@ T convolveGrid(Grid<T> &originGrid, GaussianKernelCreator &gkSigma, Vec3 pos, in
|
||||
step = Vec3(0.0, 0.0, 1.0);
|
||||
T pxResult(0);
|
||||
for (int i = 0; i < gkSigma.mDim; ++i) {
|
||||
Vec3i curpos = toVec3iFloor(pos - step * (i - gkSigma.mDim / 2));
|
||||
Vec3i curpos = toVec3i(pos - step * (i - gkSigma.mDim / 2));
|
||||
if (originGrid.isInBounds(curpos))
|
||||
pxResult += gkSigma.get1DKernelValue(i) * originGrid.get(curpos);
|
||||
else { // TODO , improve...
|
||||
@@ -2423,7 +2423,7 @@ struct KnBlurMACGridGauss : public KernelBase {
|
||||
|
||||
Vec3 pxResult(0.0f);
|
||||
for (int di = 0; di < gkSigma.mDim; ++di) {
|
||||
Vec3i curpos = toVec3iFloor(pos - step * (di - gkSigma.mDim / 2));
|
||||
Vec3i curpos = toVec3i(pos - step * (di - gkSigma.mDim / 2));
|
||||
if (!originGrid.isInBounds(curpos)) {
|
||||
if (curpos.x < 0)
|
||||
curpos.x = 0;
|
||||
|
@@ -1214,8 +1214,8 @@ struct knFlipUpdateSecondaryParticlesLinear : public KernelBase {
|
||||
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1234,8 +1234,8 @@ struct knFlipUpdateSecondaryParticlesLinear : public KernelBase {
|
||||
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1252,7 +1252,7 @@ struct knFlipUpdateSecondaryParticlesLinear : public KernelBase {
|
||||
const Vec3 vj = v.getInterpolated(pts_sec[idx].pos);
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt * vj);
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt * vj);
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1474,8 +1474,8 @@ struct knFlipUpdateSecondaryParticlesCubic : public KernelBase {
|
||||
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1515,8 +1515,8 @@ struct knFlipUpdateSecondaryParticlesCubic : public KernelBase {
|
||||
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1554,8 +1554,8 @@ struct knFlipUpdateSecondaryParticlesCubic : public KernelBase {
|
||||
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt *
|
||||
(sumNumerator / sumDenominator));
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt *
|
||||
(sumNumerator / sumDenominator));
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1863,7 +1863,7 @@ struct knFlipDeleteParticlesInObstacle : public KernelBase {
|
||||
return;
|
||||
|
||||
const Vec3 &xi = pts[idx].pos;
|
||||
const Vec3i xidx = toVec3iFloor(xi);
|
||||
const Vec3i xidx = toVec3i(xi);
|
||||
// remove particles that completely left the bounds
|
||||
if (!flags.isInBounds(xidx)) {
|
||||
pts.kill(idx);
|
||||
|
@@ -2145,7 +2145,8 @@ void PbRegister_particleSurfaceTurbulence()
|
||||
void debugCheckParts(const BasicParticleSystem &parts, const FlagGrid &flags)
|
||||
{
|
||||
for (int idx = 0; idx < parts.size(); idx++) {
|
||||
if (!flags.isInBounds(parts.getPos(idx))) {
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
if (!flags.isInBounds(p)) {
|
||||
debMsg("bad position??? " << idx << " " << parts.getPos(idx), 1);
|
||||
exit(1);
|
||||
}
|
||||
|
@@ -139,7 +139,7 @@ struct KnEstimateVolumeFraction : public KernelBase {
|
||||
{
|
||||
const Vec3 centre = startCentre + Vec3(i, j, k) * 0.5;
|
||||
const Real offset = 0.5 * dx;
|
||||
const int order = 1; // is sufficient
|
||||
const int order = 2;
|
||||
|
||||
Real phi000 = phi.getInterpolatedHi(centre + Vec3(-offset, -offset, -offset), order);
|
||||
Real phi001 = phi.getInterpolatedHi(centre + Vec3(-offset, -offset, +offset), order);
|
||||
@@ -1067,8 +1067,10 @@ void solveViscosity(const FlagGrid &flags,
|
||||
|
||||
Real viscTop = 0.25 * (viscosity(i, j, k) + viscosity(i, j, k - 1) + viscosity(i, j + 1, k) +
|
||||
viscosity(i, j + 1, k - 1));
|
||||
;
|
||||
Real viscBottom = 0.25 * (viscosity(i, j, k) + viscosity(i, j, k - 1) +
|
||||
viscosity(i, j - 1, k) + viscosity(i, j - 1, k - 1));
|
||||
;
|
||||
Real volTop = exVolLiquid(i, j + 1, k);
|
||||
Real volBottom = exVolLiquid(i, j, k);
|
||||
|
||||
@@ -1222,7 +1224,7 @@ void solveViscosity(const FlagGrid &flags,
|
||||
uSolution, uRhs, uResidual, uSearch, flags, uTmp, uMatA, uVecRhs);
|
||||
}
|
||||
else {
|
||||
errMsg("Viscosity: 2D Matrix application not yet supported in viscosity solver");
|
||||
errMsg("2D Matrix application not yet supported in viscosity solver");
|
||||
}
|
||||
|
||||
// CG solver for V
|
||||
@@ -1247,7 +1249,7 @@ void solveViscosity(const FlagGrid &flags,
|
||||
vSolution, vRhs, vResidual, vSearch, flags, vTmp, vMatA, vVecRhs);
|
||||
}
|
||||
else {
|
||||
errMsg("Viscosity: 2D Matrix application not yet supported in viscosity solver");
|
||||
errMsg("2D Matrix application not yet supported in viscosity solver");
|
||||
}
|
||||
|
||||
// CG solver for W
|
||||
@@ -1272,7 +1274,7 @@ void solveViscosity(const FlagGrid &flags,
|
||||
wSolution, wRhs, wResidual, wSearch, flags, wTmp, wMatA, wVecRhs);
|
||||
}
|
||||
else {
|
||||
errMsg("Viscosity: 2D Matrix application not yet supported in viscosity solver");
|
||||
errMsg("2D Matrix application not yet supported in viscosity solver");
|
||||
}
|
||||
|
||||
// Same accuracy for all dimensions
|
||||
@@ -1306,7 +1308,7 @@ void solveViscosity(const FlagGrid &flags,
|
||||
wRhs.copyFrom(wSearch);
|
||||
}
|
||||
debMsg(
|
||||
"Viscosity: solveViscosity() done. "
|
||||
"Viscosity::solveViscosity done. "
|
||||
"Iterations (u,v,w): ("
|
||||
<< uGcg->getIterations() << "," << vGcg->getIterations() << "," << wGcg->getIterations()
|
||||
<< "), "
|
||||
|
2
extern/quadriflow/CMakeLists.txt
vendored
2
extern/quadriflow/CMakeLists.txt
vendored
@@ -102,7 +102,7 @@ set(SRC
|
||||
)
|
||||
|
||||
set(LIB
|
||||
${BOOST_LIBRARIES}
|
||||
${BOOST_LIBRARIES}
|
||||
)
|
||||
|
||||
blender_add_lib(extern_quadriflow "${SRC}" "${INC}" "${INC_SYS}" "${LIB}")
|
||||
|
@@ -14,7 +14,7 @@
|
||||
|
||||
# Standalone or with Blender
|
||||
if(NOT WITH_BLENDER AND WITH_CYCLES_STANDALONE)
|
||||
set(CYCLES_INSTALL_PATH ${CMAKE_INSTALL_PREFIX})
|
||||
set(CYCLES_INSTALL_PATH "")
|
||||
else()
|
||||
set(WITH_CYCLES_BLENDER ON)
|
||||
# WINDOWS_PYTHON_DEBUG needs to write into the user addons folder since it will
|
||||
@@ -64,7 +64,7 @@ if(WITH_CYCLES_NATIVE_ONLY)
|
||||
endif()
|
||||
set(CYCLES_KERNEL_FLAGS "${MSVC_NATIVE_ARCH_FLAGS}")
|
||||
endif()
|
||||
elseif(NOT WITH_CPU_SIMD OR (SUPPORT_NEON_BUILD AND SSE2NEON_FOUND))
|
||||
elseif(NOT WITH_CPU_SSE)
|
||||
set(CXX_HAS_SSE FALSE)
|
||||
set(CXX_HAS_AVX FALSE)
|
||||
set(CXX_HAS_AVX2 FALSE)
|
||||
@@ -379,9 +379,6 @@ endif()
|
||||
# Subdirectories
|
||||
|
||||
if(WITH_CYCLES_BLENDER)
|
||||
# Not needed to make cycles automated tests pass with -march=native.
|
||||
# However Blender itself needs this flag.
|
||||
remove_cc_flag("-ffp-contract=off")
|
||||
add_definitions(-DWITH_BLENDER_GUARDEDALLOC)
|
||||
add_subdirectory(blender)
|
||||
endif()
|
||||
|
@@ -103,7 +103,7 @@ static bool compile_cuda(CompilationSettings &settings)
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Transfer options to a classic C array. */
|
||||
/* Tranfer options to a classic C array. */
|
||||
vector<const char *> opts(options.size());
|
||||
for (size_t i = 0; i < options.size(); i++) {
|
||||
opts[i] = options[i].c_str();
|
||||
|
@@ -133,12 +133,12 @@ static void scene_init()
|
||||
|
||||
/* Camera width/height override? */
|
||||
if (!(options.width == 0 || options.height == 0)) {
|
||||
options.scene->camera->set_full_width(options.width);
|
||||
options.scene->camera->set_full_height(options.height);
|
||||
options.scene->camera->width = options.width;
|
||||
options.scene->camera->height = options.height;
|
||||
}
|
||||
else {
|
||||
options.width = options.scene->camera->get_full_width();
|
||||
options.height = options.scene->camera->get_full_height();
|
||||
options.width = options.scene->camera->width;
|
||||
options.height = options.scene->camera->height;
|
||||
}
|
||||
|
||||
/* Calculate Viewplane */
|
||||
@@ -233,7 +233,7 @@ static void display()
|
||||
static void motion(int x, int y, int button)
|
||||
{
|
||||
if (options.interactive) {
|
||||
Transform matrix = options.session->scene->camera->get_matrix();
|
||||
Transform matrix = options.session->scene->camera->matrix;
|
||||
|
||||
/* Translate */
|
||||
if (button == 0) {
|
||||
@@ -251,8 +251,8 @@ static void motion(int x, int y, int button)
|
||||
}
|
||||
|
||||
/* Update and Reset */
|
||||
options.session->scene->camera->set_matrix(matrix);
|
||||
options.session->scene->camera->need_flags_update = true;
|
||||
options.session->scene->camera->matrix = matrix;
|
||||
options.session->scene->camera->need_update = true;
|
||||
options.session->scene->camera->need_device_update = true;
|
||||
|
||||
options.session->reset(session_buffer_params(), options.session_params.samples);
|
||||
@@ -266,10 +266,10 @@ static void resize(int width, int height)
|
||||
|
||||
if (options.session) {
|
||||
/* Update camera */
|
||||
options.session->scene->camera->set_full_width(options.width);
|
||||
options.session->scene->camera->set_full_height(options.height);
|
||||
options.session->scene->camera->width = width;
|
||||
options.session->scene->camera->height = height;
|
||||
options.session->scene->camera->compute_auto_viewplane();
|
||||
options.session->scene->camera->need_flags_update = true;
|
||||
options.session->scene->camera->need_update = true;
|
||||
options.session->scene->camera->need_device_update = true;
|
||||
|
||||
options.session->reset(session_buffer_params(), options.session_params.samples);
|
||||
@@ -302,7 +302,7 @@ static void keyboard(unsigned char key)
|
||||
|
||||
/* Navigation */
|
||||
else if (options.interactive && (key == 'w' || key == 'a' || key == 's' || key == 'd')) {
|
||||
Transform matrix = options.session->scene->camera->get_matrix();
|
||||
Transform matrix = options.session->scene->camera->matrix;
|
||||
float3 translate;
|
||||
|
||||
if (key == 'w')
|
||||
@@ -317,8 +317,8 @@ static void keyboard(unsigned char key)
|
||||
matrix = matrix * transform_translate(translate);
|
||||
|
||||
/* Update and Reset */
|
||||
options.session->scene->camera->set_matrix(matrix);
|
||||
options.session->scene->camera->need_flags_update = true;
|
||||
options.session->scene->camera->matrix = matrix;
|
||||
options.session->scene->camera->need_update = true;
|
||||
options.session->scene->camera->need_device_update = true;
|
||||
|
||||
options.session->reset(session_buffer_params(), options.session_params.samples);
|
||||
@@ -345,7 +345,10 @@ static void keyboard(unsigned char key)
|
||||
break;
|
||||
}
|
||||
|
||||
options.session->scene->integrator->set_max_bounce(bounce);
|
||||
options.session->scene->integrator->max_bounce = bounce;
|
||||
|
||||
/* Update and Reset */
|
||||
options.session->scene->integrator->need_update = true;
|
||||
|
||||
options.session->reset(session_buffer_params(), options.session_params.samples);
|
||||
}
|
||||
|
@@ -190,18 +190,17 @@ static void xml_read_camera(XMLReadState &state, xml_node node)
|
||||
{
|
||||
Camera *cam = state.scene->camera;
|
||||
|
||||
int width = -1, height = -1;
|
||||
xml_read_int(&width, node, "width");
|
||||
xml_read_int(&height, node, "height");
|
||||
xml_read_int(&cam->width, node, "width");
|
||||
xml_read_int(&cam->height, node, "height");
|
||||
|
||||
cam->set_full_width(width);
|
||||
cam->set_full_height(height);
|
||||
cam->full_width = cam->width;
|
||||
cam->full_height = cam->height;
|
||||
|
||||
xml_read_node(state, cam, node);
|
||||
|
||||
cam->set_matrix(state.tfm);
|
||||
cam->matrix = state.tfm;
|
||||
|
||||
cam->need_flags_update = true;
|
||||
cam->need_update = true;
|
||||
cam->update(state.scene);
|
||||
}
|
||||
|
||||
@@ -339,13 +338,11 @@ static void xml_read_shader_graph(XMLReadState &state, Shader *shader, xml_node
|
||||
|
||||
if (node_name == "image_texture") {
|
||||
ImageTextureNode *img = (ImageTextureNode *)snode;
|
||||
ustring filename(path_join(state.base, img->get_filename().string()));
|
||||
img->set_filename(filename);
|
||||
img->filename = path_join(state.base, img->filename.string());
|
||||
}
|
||||
else if (node_name == "environment_texture") {
|
||||
EnvironmentTextureNode *env = (EnvironmentTextureNode *)snode;
|
||||
ustring filename(path_join(state.base, env->get_filename().string()));
|
||||
env->set_filename(filename);
|
||||
env->filename = path_join(state.base, env->filename.string());
|
||||
}
|
||||
|
||||
if (snode) {
|
||||
@@ -387,8 +384,8 @@ static Mesh *xml_add_mesh(Scene *scene, const Transform &tfm)
|
||||
|
||||
/* create object*/
|
||||
Object *object = new Object();
|
||||
object->set_geometry(mesh);
|
||||
object->set_tfm(tfm);
|
||||
object->geometry = mesh;
|
||||
object->tfm = tfm;
|
||||
scene->objects.push_back(object);
|
||||
|
||||
return mesh;
|
||||
@@ -398,9 +395,7 @@ static void xml_read_mesh(const XMLReadState &state, xml_node node)
|
||||
{
|
||||
/* add mesh */
|
||||
Mesh *mesh = xml_add_mesh(state.scene, state.tfm);
|
||||
array<Node *> used_shaders = mesh->get_used_shaders();
|
||||
used_shaders.push_back_slow(state.shader);
|
||||
mesh->set_used_shaders(used_shaders);
|
||||
mesh->used_shaders.push_back(state.shader);
|
||||
|
||||
/* read state */
|
||||
int shader = 0;
|
||||
@@ -416,24 +411,20 @@ static void xml_read_mesh(const XMLReadState &state, xml_node node)
|
||||
xml_read_int_array(nverts, node, "nverts");
|
||||
|
||||
if (xml_equal_string(node, "subdivision", "catmull-clark")) {
|
||||
mesh->set_subdivision_type(Mesh::SUBDIVISION_CATMULL_CLARK);
|
||||
mesh->subdivision_type = Mesh::SUBDIVISION_CATMULL_CLARK;
|
||||
}
|
||||
else if (xml_equal_string(node, "subdivision", "linear")) {
|
||||
mesh->set_subdivision_type(Mesh::SUBDIVISION_LINEAR);
|
||||
mesh->subdivision_type = Mesh::SUBDIVISION_LINEAR;
|
||||
}
|
||||
|
||||
array<float3> P_array;
|
||||
P_array = P;
|
||||
|
||||
if (mesh->get_subdivision_type() == Mesh::SUBDIVISION_NONE) {
|
||||
if (mesh->subdivision_type == Mesh::SUBDIVISION_NONE) {
|
||||
/* create vertices */
|
||||
|
||||
mesh->set_verts(P_array);
|
||||
mesh->verts = P;
|
||||
|
||||
size_t num_triangles = 0;
|
||||
for (size_t i = 0; i < nverts.size(); i++)
|
||||
num_triangles += nverts[i] - 2;
|
||||
mesh->reserve_mesh(mesh->get_verts().size(), num_triangles);
|
||||
mesh->reserve_mesh(mesh->verts.size(), num_triangles);
|
||||
|
||||
/* create triangles */
|
||||
int index_offset = 0;
|
||||
@@ -483,7 +474,7 @@ static void xml_read_mesh(const XMLReadState &state, xml_node node)
|
||||
}
|
||||
else {
|
||||
/* create vertices */
|
||||
mesh->set_verts(P_array);
|
||||
mesh->verts = P;
|
||||
|
||||
size_t num_ngons = 0;
|
||||
size_t num_corners = 0;
|
||||
@@ -522,20 +513,23 @@ static void xml_read_mesh(const XMLReadState &state, xml_node node)
|
||||
}
|
||||
|
||||
/* setup subd params */
|
||||
float dicing_rate = state.dicing_rate;
|
||||
xml_read_float(&dicing_rate, node, "dicing_rate");
|
||||
dicing_rate = std::max(0.1f, dicing_rate);
|
||||
if (!mesh->subd_params) {
|
||||
mesh->subd_params = new SubdParams(mesh);
|
||||
}
|
||||
SubdParams &sdparams = *mesh->subd_params;
|
||||
|
||||
mesh->set_subd_dicing_rate(dicing_rate);
|
||||
mesh->set_subd_objecttoworld(state.tfm);
|
||||
sdparams.dicing_rate = state.dicing_rate;
|
||||
xml_read_float(&sdparams.dicing_rate, node, "dicing_rate");
|
||||
sdparams.dicing_rate = std::max(0.1f, sdparams.dicing_rate);
|
||||
|
||||
sdparams.objecttoworld = state.tfm;
|
||||
}
|
||||
|
||||
/* we don't yet support arbitrary attributes, for now add vertex
|
||||
* coordinates as generated coordinates if requested */
|
||||
if (mesh->need_attribute(state.scene, ATTR_STD_GENERATED)) {
|
||||
Attribute *attr = mesh->attributes.add(ATTR_STD_GENERATED);
|
||||
memcpy(
|
||||
attr->data_float3(), mesh->get_verts().data(), sizeof(float3) * mesh->get_verts().size());
|
||||
memcpy(attr->data_float3(), mesh->verts.data(), sizeof(float3) * mesh->verts.size());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -545,7 +539,7 @@ static void xml_read_light(XMLReadState &state, xml_node node)
|
||||
{
|
||||
Light *light = new Light();
|
||||
|
||||
light->set_shader(state.shader);
|
||||
light->shader = state.shader;
|
||||
xml_read_node(state, light, node);
|
||||
|
||||
state.scene->lights.push_back(light);
|
||||
@@ -564,19 +558,19 @@ static void xml_read_transform(xml_node node, Transform &tfm)
|
||||
}
|
||||
|
||||
if (node.attribute("translate")) {
|
||||
float3 translate = zero_float3();
|
||||
float3 translate = make_float3(0.0f, 0.0f, 0.0f);
|
||||
xml_read_float3(&translate, node, "translate");
|
||||
tfm = tfm * transform_translate(translate);
|
||||
}
|
||||
|
||||
if (node.attribute("rotate")) {
|
||||
float4 rotate = zero_float4();
|
||||
float4 rotate = make_float4(0.0f, 0.0f, 0.0f, 0.0f);
|
||||
xml_read_float4(&rotate, node, "rotate");
|
||||
tfm = tfm * transform_rotate(DEG2RADF(rotate.x), make_float3(rotate.y, rotate.z, rotate.w));
|
||||
}
|
||||
|
||||
if (node.attribute("scale")) {
|
||||
float3 scale = zero_float3();
|
||||
float3 scale = make_float3(0.0f, 0.0f, 0.0f);
|
||||
xml_read_float3(&scale, node, "scale");
|
||||
tfm = tfm * transform_scale(scale);
|
||||
}
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
bl_info = {
|
||||
"name": "Cycles Render Engine",
|
||||
@@ -60,7 +59,6 @@ class CyclesRender(bpy.types.RenderEngine):
|
||||
bl_use_exclude_layers = True
|
||||
bl_use_save_buffers = True
|
||||
bl_use_spherical_stereo = True
|
||||
bl_use_custom_freestyle = True
|
||||
|
||||
def __init__(self):
|
||||
self.session = None
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
def _is_using_buggy_driver():
|
||||
@@ -302,7 +301,7 @@ def list_render_passes(scene, srl):
|
||||
yield ("Denoising Clean", "RGB", 'COLOR')
|
||||
|
||||
# Custom AOV passes.
|
||||
for aov in srl.aovs:
|
||||
for aov in crl.aovs:
|
||||
if aov.type == 'VALUE':
|
||||
yield (aov.name, "X", 'VALUE')
|
||||
else:
|
||||
@@ -310,5 +309,22 @@ def list_render_passes(scene, srl):
|
||||
|
||||
|
||||
def register_passes(engine, scene, view_layer):
|
||||
# Detect duplicate render pass names, first one wins.
|
||||
listed = set()
|
||||
for name, channelids, channeltype in list_render_passes(scene, view_layer):
|
||||
engine.register_pass(scene, view_layer, name, len(channelids), channelids, channeltype)
|
||||
if name not in listed:
|
||||
engine.register_pass(scene, view_layer, name, len(channelids), channelids, channeltype)
|
||||
listed.add(name)
|
||||
|
||||
|
||||
def detect_conflicting_passes(scene, view_layer):
|
||||
# Detect conflicting render pass names for UI.
|
||||
counter = {}
|
||||
for name, _, _ in list_render_passes(scene, view_layer):
|
||||
counter[name] = counter.get(name, 0) + 1
|
||||
|
||||
for aov in view_layer.cycles.aovs:
|
||||
if counter[aov.name] > 1:
|
||||
aov.conflict = "Conflicts with another render pass with the same name"
|
||||
else:
|
||||
aov.conflict = ""
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
import bpy
|
||||
from bpy.types import Operator
|
||||
@@ -45,6 +44,36 @@ class CYCLES_OT_use_shading_nodes(Operator):
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class CYCLES_OT_add_aov(bpy.types.Operator):
|
||||
"""Add an AOV pass"""
|
||||
bl_idname = "cycles.add_aov"
|
||||
bl_label = "Add AOV"
|
||||
|
||||
def execute(self, context):
|
||||
view_layer = context.view_layer
|
||||
cycles_view_layer = view_layer.cycles
|
||||
|
||||
cycles_view_layer.aovs.add()
|
||||
|
||||
view_layer.update_render_passes()
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class CYCLES_OT_remove_aov(bpy.types.Operator):
|
||||
"""Remove an AOV pass"""
|
||||
bl_idname = "cycles.remove_aov"
|
||||
bl_label = "Remove AOV"
|
||||
|
||||
def execute(self, context):
|
||||
view_layer = context.view_layer
|
||||
cycles_view_layer = view_layer.cycles
|
||||
|
||||
cycles_view_layer.aovs.remove(cycles_view_layer.active_aov)
|
||||
|
||||
view_layer.update_render_passes()
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class CYCLES_OT_denoise_animation(Operator):
|
||||
"Denoise rendered animation sequence using current scene and view " \
|
||||
"layer settings. Requires denoising data passes and output to " \
|
||||
@@ -168,6 +197,8 @@ class CYCLES_OT_merge_images(Operator):
|
||||
|
||||
classes = (
|
||||
CYCLES_OT_use_shading_nodes,
|
||||
CYCLES_OT_add_aov,
|
||||
CYCLES_OT_remove_aov,
|
||||
CYCLES_OT_denoise_animation,
|
||||
CYCLES_OT_merge_images
|
||||
)
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
import bpy
|
||||
import _cycles
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
from bl_operators.presets import AddPresetBase
|
||||
from bpy.types import Operator
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user