Compare commits
9 Commits
temp-deriv
...
temp-geome
Author | SHA1 | Date | |
---|---|---|---|
8b11d36cda | |||
d726aaec13 | |||
c407647469 | |||
c9383993f8 | |||
89d5710830 | |||
7746c562a4 | |||
525d36813c | |||
5494ad43fa | |||
8268e733f6 |
@@ -39,8 +39,9 @@ Checks: >
|
||||
-modernize-use-nodiscard,
|
||||
-modernize-loop-convert,
|
||||
-modernize-pass-by-value,
|
||||
-modernize-use-default-member-init,
|
||||
-modernize-raw-string-literal,
|
||||
-modernize-avoid-bind,
|
||||
-modernize-use-transparent-functors,
|
||||
|
||||
WarningsAsErrors: '*'
|
||||
CheckOptions:
|
||||
- key: modernize-use-default-member-init.UseAssignment
|
||||
value: 1
|
||||
|
104
CMakeLists.txt
104
CMakeLists.txt
@@ -63,9 +63,6 @@ list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/build_files/cmake/platform")
|
||||
# avoid having empty buildtype
|
||||
if(NOT DEFINED CMAKE_BUILD_TYPE_INIT)
|
||||
set(CMAKE_BUILD_TYPE_INIT "Release")
|
||||
# Internal logic caches this variable, avoid showing it by default
|
||||
# since it's easy to accidentally set instead of the build type.
|
||||
mark_as_advanced(CMAKE_BUILD_TYPE_INIT)
|
||||
endif()
|
||||
|
||||
# Omit superfluous "Up-to-date" messages.
|
||||
@@ -167,6 +164,10 @@ if(APPLE)
|
||||
endif()
|
||||
|
||||
option(WITH_BUILDINFO "Include extra build details (only disable for development & faster builds)" ON)
|
||||
if(${CMAKE_VERSION} VERSION_LESS 2.8.8)
|
||||
# add_library OBJECT arg unsupported
|
||||
set(WITH_BUILDINFO OFF)
|
||||
endif()
|
||||
set(BUILDINFO_OVERRIDE_DATE "" CACHE STRING "Use instead of the current date for reproducible builds (empty string disables this option)")
|
||||
set(BUILDINFO_OVERRIDE_TIME "" CACHE STRING "Use instead of the current time for reproducible builds (empty string disables this option)")
|
||||
set(CPACK_OVERRIDE_PACKAGENAME "" CACHE STRING "Use instead of the standard packagename (empty string disables this option)")
|
||||
@@ -370,8 +371,8 @@ if(WITH_PYTHON_INSTALL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
option(WITH_CPU_SIMD "Enable SIMD instruction if they're detected on the host machine" ON)
|
||||
mark_as_advanced(WITH_CPU_SIMD)
|
||||
option(WITH_CPU_SSE "Enable SIMD instruction if they're detected on the host machine" ON)
|
||||
mark_as_advanced(WITH_CPU_SSE)
|
||||
|
||||
# Cycles
|
||||
option(WITH_CYCLES "Enable Cycles Render Engine" ON)
|
||||
@@ -416,7 +417,6 @@ else()
|
||||
option(LLVM_STATIC "Link with LLVM static libraries" OFF)
|
||||
endif()
|
||||
mark_as_advanced(LLVM_STATIC)
|
||||
option(WITH_CLANG "Use Clang" OFF)
|
||||
|
||||
# disable for now, but plan to support on all platforms eventually
|
||||
option(WITH_MEM_JEMALLOC "Enable malloc replacement (http://www.canonware.com/jemalloc)" ON)
|
||||
@@ -614,7 +614,6 @@ endif()
|
||||
if(UNIX)
|
||||
# See WITH_WINDOWS_SCCACHE for Windows.
|
||||
option(WITH_COMPILER_CCACHE "Use ccache to improve rebuild times (Works with Ninja, Makefiles and Xcode)" OFF)
|
||||
mark_as_advanced(WITH_COMPILER_CCACHE)
|
||||
endif()
|
||||
|
||||
# The following only works with the Ninja generator in CMake >= 3.0.
|
||||
@@ -729,9 +728,8 @@ set_and_warn_dependency(WITH_TBB WITH_MOD_FLUID OFF)
|
||||
# NanoVDB requires OpenVDB to convert the data structure
|
||||
set_and_warn_dependency(WITH_OPENVDB WITH_NANOVDB OFF)
|
||||
|
||||
# OpenVDB and OpenColorIO uses 'half' type from OpenEXR
|
||||
# OpenVDB uses 'half' type from OpenEXR & fails to link without OpenEXR enabled.
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENVDB OFF)
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENCOLORIO OFF)
|
||||
|
||||
# Haru needs `TIFFFaxBlackCodes` & `TIFFFaxWhiteCodes` symbols from TIFF.
|
||||
set_and_warn_dependency(WITH_IMAGE_TIFF WITH_HARU OFF)
|
||||
@@ -743,7 +741,6 @@ if(WITH_CYCLES)
|
||||
# auto enable llvm for cycles_osl
|
||||
if(WITH_CYCLES_OSL)
|
||||
set(WITH_LLVM ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CLANG ON CACHE BOOL "" FORCE)
|
||||
endif()
|
||||
else()
|
||||
set(WITH_CYCLES_OSL OFF)
|
||||
@@ -777,6 +774,14 @@ if(WITH_GHOST_SDL OR WITH_HEADLESS)
|
||||
set(WITH_XR_OPENXR OFF)
|
||||
endif()
|
||||
|
||||
if(WITH_CPU_SSE)
|
||||
TEST_SSE_SUPPORT(COMPILER_SSE_FLAG COMPILER_SSE2_FLAG)
|
||||
else()
|
||||
message(STATUS "SSE and SSE2 optimizations are DISABLED!")
|
||||
set(COMPILER_SSE_FLAG)
|
||||
set(COMPILER_SSE2_FLAG)
|
||||
endif()
|
||||
|
||||
if(WITH_BUILDINFO)
|
||||
find_package(Git)
|
||||
if(NOT GIT_FOUND)
|
||||
@@ -825,8 +830,8 @@ if(WITH_PYTHON)
|
||||
# Do this before main 'platform_*' checks,
|
||||
# because UNIX will search for the old Python paths which may not exist.
|
||||
# giving errors about missing paths before this case is met.
|
||||
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.9")
|
||||
message(FATAL_ERROR "At least Python 3.9 is required to build")
|
||||
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.7")
|
||||
message(FATAL_ERROR "At least Python 3.7 is required to build")
|
||||
endif()
|
||||
|
||||
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/scripts/addons")
|
||||
@@ -901,18 +906,6 @@ if(NOT CMAKE_BUILD_TYPE MATCHES "Release")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Test SIMD support, before platform includes to determine if sse2neon is needed.
|
||||
if(WITH_CPU_SIMD)
|
||||
set(COMPILER_SSE_FLAG)
|
||||
set(COMPILER_SSE2_FLAG)
|
||||
|
||||
# Test Neon first since macOS Arm can compile and run x86-64 SSE binaries.
|
||||
TEST_NEON_SUPPORT()
|
||||
if(NOT SUPPORT_NEON_BUILD)
|
||||
TEST_SSE_SUPPORT(COMPILER_SSE_FLAG COMPILER_SSE2_FLAG)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Main Platform Checks
|
||||
#
|
||||
@@ -968,49 +961,22 @@ if(WITH_INTERNATIONAL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Enable SIMD support if detected by TEST_SSE_SUPPORT() or TEST_NEON_SUPPORT().
|
||||
#
|
||||
# This is done globally, so that all modules can use it if available, and
|
||||
# because these are used in headers used by many modules.
|
||||
if(WITH_CPU_SIMD)
|
||||
if(SUPPORT_NEON_BUILD)
|
||||
# Neon
|
||||
if(SSE2NEON_FOUND)
|
||||
blender_include_dirs_sys("${SSE2NEON_INCLUDE_DIRS}")
|
||||
add_definitions(-DWITH_SSE2NEON)
|
||||
endif()
|
||||
else()
|
||||
# SSE
|
||||
if(SUPPORT_SSE_BUILD)
|
||||
string(PREPEND PLATFORM_CFLAGS "${COMPILER_SSE_FLAG} ")
|
||||
add_definitions(-D__SSE__ -D__MMX__)
|
||||
endif()
|
||||
if(SUPPORT_SSE2_BUILD)
|
||||
string(APPEND PLATFORM_CFLAGS " ${COMPILER_SSE2_FLAG}")
|
||||
add_definitions(-D__SSE2__)
|
||||
if(NOT SUPPORT_SSE_BUILD) # don't double up
|
||||
add_definitions(-D__MMX__)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
# See TEST_SSE_SUPPORT() for how this is defined.
|
||||
|
||||
# Print instructions used
|
||||
if(SUPPORT_NEON_BUILD)
|
||||
if(SSE2NEON_FOUND)
|
||||
message(STATUS "Neon SIMD instructions enabled")
|
||||
else()
|
||||
message(STATUS "Neon SIMD instructions detected but unused, requires sse2neon")
|
||||
endif()
|
||||
elseif(SUPPORT_SSE2_BUILD)
|
||||
message(STATUS "SSE2 SIMD instructions enabled")
|
||||
elseif(SUPPORT_SSE_BUILD)
|
||||
message(STATUS "SSE SIMD instructions enabled")
|
||||
else()
|
||||
message(STATUS "No SIMD instructions detected")
|
||||
endif()
|
||||
else()
|
||||
message(STATUS "SIMD instructions disabled")
|
||||
# Do it globally, SSE2 is required for quite some time now.
|
||||
# Doing it now allows to use SSE/SSE2 in inline headers.
|
||||
if(SUPPORT_SSE_BUILD)
|
||||
string(PREPEND PLATFORM_CFLAGS "${COMPILER_SSE_FLAG} ")
|
||||
add_definitions(-D__SSE__ -D__MMX__)
|
||||
endif()
|
||||
if(SUPPORT_SSE2_BUILD)
|
||||
string(APPEND PLATFORM_CFLAGS " ${COMPILER_SSE2_FLAG}")
|
||||
add_definitions(-D__SSE2__)
|
||||
if(NOT SUPPORT_SSE_BUILD) # don't double up
|
||||
add_definitions(-D__MMX__)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
||||
# set the endian define
|
||||
if(MSVC)
|
||||
@@ -1056,9 +1022,6 @@ if(WITH_OPENVDB)
|
||||
list(APPEND OPENVDB_DEFINITIONS -DOPENVDB_3_ABI_COMPATIBLE)
|
||||
endif()
|
||||
|
||||
# OpenVDB headers use deprecated TBB headers, silence warning.
|
||||
list(APPEND OPENVDB_DEFINITIONS -DTBB_SUPPRESS_DEPRECATED_MESSAGES=1)
|
||||
|
||||
list(APPEND OPENVDB_INCLUDE_DIRS
|
||||
${BOOST_INCLUDE_DIR}
|
||||
${TBB_INCLUDE_DIRS}
|
||||
@@ -1221,7 +1184,6 @@ if(WITH_OPENMP)
|
||||
if(NOT WITH_OPENMP_STATIC)
|
||||
string(APPEND CMAKE_C_FLAGS " ${OpenMP_C_FLAGS}")
|
||||
string(APPEND CMAKE_CXX_FLAGS " ${OpenMP_CXX_FLAGS}")
|
||||
string(APPEND CMAKE_EXE_LINKER_FLAGS " ${OpenMP_LINKER_FLAGS}")
|
||||
else()
|
||||
# Typically avoid adding flags as defines but we can't
|
||||
# pass OpenMP flags to the linker for static builds, meaning
|
||||
@@ -1475,7 +1437,6 @@ if(CMAKE_COMPILER_IS_GNUCC)
|
||||
# gcc 4.2 gives annoying warnings on every file with this
|
||||
if(NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "4.3")
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_WARNINGS C_WARN_UNINITIALIZED -Wuninitialized)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_WARNINGS CXX_WARN_UNINITIALIZED -Wuninitialized)
|
||||
endif()
|
||||
|
||||
# versions before gcc4.6 give many BLI_math warnings
|
||||
@@ -1540,13 +1501,11 @@ if(CMAKE_COMPILER_IS_GNUCC)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_FORMAT -Wno-format)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_SWITCH -Wno-switch)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_UNUSED_VARIABLE -Wno-unused-variable)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_UNUSED_VARIABLE -Wno-uninitialized)
|
||||
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_CLASS_MEMACCESS -Wno-class-memaccess)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_COMMENT -Wno-comment)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_TYPEDEFS -Wno-unused-local-typedefs)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_VARIABLE -Wno-unused-variable)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_VARIABLE -Wno-uninitialized)
|
||||
|
||||
if(CMAKE_COMPILER_IS_GNUCC AND (NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "7.0"))
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_IMPLICIT_FALLTHROUGH -Wno-implicit-fallthrough)
|
||||
@@ -1890,7 +1849,6 @@ if(FIRST_RUN)
|
||||
info_cfg_text("Build Options:")
|
||||
info_cfg_option(WITH_ALEMBIC)
|
||||
info_cfg_option(WITH_BULLET)
|
||||
info_cfg_option(WITH_CLANG)
|
||||
info_cfg_option(WITH_CYCLES)
|
||||
info_cfg_option(WITH_FFTW3)
|
||||
info_cfg_option(WITH_FREESTYLE)
|
||||
|
22
GNUmakefile
22
GNUmakefile
@@ -26,31 +26,26 @@
|
||||
|
||||
define HELP_TEXT
|
||||
|
||||
Blender Convenience Targets
|
||||
Convenience Targets
|
||||
Provided for building Blender, (multiple at once can be used).
|
||||
|
||||
* debug: Build a debug binary.
|
||||
* full: Enable all supported dependencies & options.
|
||||
* lite: Disable non essential features for a smaller binary and faster build.
|
||||
* release: Complete build with all options enabled including CUDA and Optix, matching the releases on blender.org
|
||||
* release Complete build with all options enabled including CUDA and Optix, matching the releases on blender.org
|
||||
* headless: Build without an interface (renderfarm or server automation).
|
||||
* cycles: Build Cycles standalone only, without Blender.
|
||||
* bpy: Build as a python module which can be loaded from python directly.
|
||||
* deps: Build library dependencies (intended only for platform maintainers).
|
||||
|
||||
* developer: Enable faster builds, error checking and tests, recommended for developers.
|
||||
* config: Run cmake configuration tool to set build options.
|
||||
* ninja: Use ninja build tool for faster builds.
|
||||
* ccache: Use ccache for faster rebuilds.
|
||||
|
||||
Note: passing the argument 'BUILD_DIR=path' when calling make will override the default build dir.
|
||||
Note: passing the argument 'BUILD_CMAKE_ARGS=args' lets you add cmake arguments.
|
||||
|
||||
Other Convenience Targets
|
||||
Provided for other building operations.
|
||||
|
||||
* config: Run cmake configuration tool to set build options.
|
||||
* deps: Build library dependencies (intended only for platform maintainers).
|
||||
|
||||
The existance of locally build dependancies overrides the pre-built dependencies from subversion.
|
||||
These must be manually removed from '../lib/' to go back to using the pre-compiled libraries.
|
||||
|
||||
Project Files
|
||||
Generate project files for development environments.
|
||||
@@ -90,15 +85,12 @@ Static Source Code Checking
|
||||
* check_descriptions: Check for duplicate/invalid descriptions.
|
||||
|
||||
Spell Checkers
|
||||
This runs the spell checker from the developer tools repositor.
|
||||
|
||||
* check_spelling_c: Check for spelling errors (C/C++ only),
|
||||
* check_spelling_osl: Check for spelling errors (OSL only).
|
||||
* check_spelling_py: Check for spelling errors (Python only).
|
||||
|
||||
Note: an additional word-list is maintained at: 'source/tools/check_source/check_spelling_c_config.py'
|
||||
|
||||
Note: that spell checkers can take a 'CHECK_SPELLING_CACHE' filepath argument,
|
||||
Note that spell checkers can take a 'CHECK_SPELLING_CACHE' filepath argument,
|
||||
so re-running does not need to re-check unchanged files.
|
||||
|
||||
Example:
|
||||
@@ -131,7 +123,7 @@ Utilities
|
||||
* update:
|
||||
updates git and all submodules
|
||||
|
||||
* format:
|
||||
* format
|
||||
Format source code using clang (uses PATHS if passed in). For example::
|
||||
|
||||
make format PATHS="source/blender/blenlib source/blender/blenkernel"
|
||||
|
@@ -31,7 +31,6 @@
|
||||
#
|
||||
# MAC OS X USAGE:
|
||||
# Install with homebrew: brew install autoconf automake bison cmake libtool pkg-config yasm
|
||||
# Additional requirements for macOS arm64: brew install flex
|
||||
# Run "make deps" from main Blender directory
|
||||
#
|
||||
# LINUX USAGE:
|
||||
@@ -71,7 +70,9 @@ include(cmake/cuew.cmake)
|
||||
include(cmake/opensubdiv.cmake)
|
||||
include(cmake/sdl.cmake)
|
||||
include(cmake/opencollada.cmake)
|
||||
include(cmake/opencolorio.cmake)
|
||||
include(cmake/llvm.cmake)
|
||||
include(cmake/clang.cmake)
|
||||
if(APPLE)
|
||||
include(cmake/openmp.cmake)
|
||||
endif()
|
||||
@@ -86,40 +87,36 @@ include(cmake/tbb.cmake)
|
||||
include(cmake/openvdb.cmake)
|
||||
include(cmake/nanovdb.cmake)
|
||||
include(cmake/python.cmake)
|
||||
option(USE_PIP_NUMPY "Install NumPy using pip wheel instead of building from source" OFF)
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
set(USE_PIP_NUMPY ON)
|
||||
else()
|
||||
include(cmake/numpy.cmake)
|
||||
endif()
|
||||
include(cmake/python_site_packages.cmake)
|
||||
include(cmake/package_python.cmake)
|
||||
include(cmake/numpy.cmake)
|
||||
include(cmake/usd.cmake)
|
||||
include(cmake/potrace.cmake)
|
||||
include(cmake/haru.cmake)
|
||||
# Boost needs to be included after python.cmake due to the PYTHON_BINARY variable being needed.
|
||||
include(cmake/boost.cmake)
|
||||
include(cmake/pugixml.cmake)
|
||||
include(cmake/ispc.cmake)
|
||||
include(cmake/openimagedenoise.cmake)
|
||||
include(cmake/embree.cmake)
|
||||
if((NOT APPLE) OR ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
include(cmake/ispc.cmake)
|
||||
include(cmake/openimagedenoise.cmake)
|
||||
include(cmake/embree.cmake)
|
||||
endif()
|
||||
if(NOT APPLE)
|
||||
include(cmake/xr_openxr.cmake)
|
||||
endif()
|
||||
|
||||
# OpenColorIO and dependencies.
|
||||
include(cmake/expat.cmake)
|
||||
include(cmake/yamlcpp.cmake)
|
||||
include(cmake/opencolorio.cmake)
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
include(cmake/sse2neon.cmake)
|
||||
endif()
|
||||
|
||||
if(WITH_WEBP)
|
||||
include(cmake/webp.cmake)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
# OCIO deps
|
||||
include(cmake/tinyxml.cmake)
|
||||
include(cmake/yamlcpp.cmake)
|
||||
# LCMS is an OCIO dep, but only if you build the apps, leaving it here for convenience
|
||||
# include(cmake/lcms.cmake)
|
||||
endif()
|
||||
|
||||
if(NOT WIN32 OR ENABLE_MINGW64)
|
||||
include(cmake/gmp.cmake)
|
||||
include(cmake/openjpeg.cmake)
|
||||
|
@@ -19,6 +19,16 @@
|
||||
set(ALEMBIC_EXTRA_ARGS
|
||||
-DBUILDSTATIC=ON
|
||||
-DLINKSTATIC=ON
|
||||
-DALEMBIC_LIB_USES_BOOST=ON
|
||||
-DBoost_COMPILER:STRING=${BOOST_COMPILER_STRING}
|
||||
-DBoost_USE_MULTITHREADED=ON
|
||||
-DUSE_STATIC_BOOST=On
|
||||
-DBoost_USE_STATIC_LIBS=ON
|
||||
-DBoost_USE_STATIC_RUNTIME=OFF
|
||||
-DBoost_DEBUG=ON
|
||||
-DBOOST_ROOT=${LIBDIR}/boost
|
||||
-DBoost_NO_SYSTEM_PATHS=ON
|
||||
-DBoost_NO_BOOST_CMAKE=ON
|
||||
-DILMBASE_ROOT=${LIBDIR}/openexr
|
||||
-DALEMBIC_ILMBASE_INCLUDE_DIRECTORY=${LIBDIR}/openexr/include/OpenEXR
|
||||
-DALEMBIC_ILMBASE_HALF_LIB=${LIBDIR}/openexr/lib/${LIBPREFIX}Half${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
@@ -71,6 +81,7 @@ endif()
|
||||
|
||||
add_dependencies(
|
||||
external_alembic
|
||||
external_boost
|
||||
external_zlib
|
||||
external_openexr
|
||||
)
|
||||
|
@@ -23,7 +23,7 @@ if(WIN32)
|
||||
set(BOOST_COMPILER_STRING -vc141)
|
||||
|
||||
set(BOOST_CONFIGURE_COMMAND bootstrap.bat)
|
||||
set(BOOST_BUILD_COMMAND b2)
|
||||
set(BOOST_BUILD_COMMAND bjam)
|
||||
set(BOOST_BUILD_OPTIONS runtime-link=shared )
|
||||
set(BOOST_HARVEST_CMD ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/boost/lib/ ${HARVEST_TARGET}/boost/lib/ )
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
|
@@ -33,16 +33,6 @@ if(UNIX)
|
||||
yasm
|
||||
)
|
||||
|
||||
if(NOT APPLE)
|
||||
set(_required_software
|
||||
${_required_software}
|
||||
|
||||
# Needed for Mesa.
|
||||
meson
|
||||
ninja
|
||||
)
|
||||
endif()
|
||||
|
||||
foreach(_software ${_required_software})
|
||||
find_program(_software_find NAMES ${_software})
|
||||
if(NOT _software_find)
|
||||
@@ -70,7 +60,7 @@ if(UNIX)
|
||||
" ${_software_missing}\n"
|
||||
"\n"
|
||||
"On Debian and Ubuntu:\n"
|
||||
" apt install autoconf automake libtool yasm tcl ninja-build meson python3-mako\n"
|
||||
" apt install autoconf automake libtool yasm tcl\n"
|
||||
"\n"
|
||||
"On macOS (with homebrew):\n"
|
||||
" brew install autoconf automake bison libtool pkg-config yasm\n"
|
||||
|
104
build_files/build_environment/cmake/clang.cmake
Normal file
104
build_files/build_environment/cmake/clang.cmake
Normal file
@@ -0,0 +1,104 @@
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(CLANG_EXTRA_ARGS
|
||||
-DLLVM_DIR="${LIBDIR}/llvm/lib/cmake/llvm/"
|
||||
-DLLVM_USE_CRT_RELEASE=MD
|
||||
-DLLVM_USE_CRT_DEBUG=MDd
|
||||
-DLLVM_CONFIG=${LIBDIR}/llvm/bin/llvm-config
|
||||
)
|
||||
|
||||
set(BUILD_CLANG_TOOLS OFF)
|
||||
|
||||
if(WIN32)
|
||||
set(CLANG_GENERATOR "Ninja")
|
||||
else()
|
||||
set(CLANG_GENERATOR "Unix Makefiles")
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
set(BUILD_CLANG_TOOLS ON)
|
||||
set(CLANG_EXTRA_ARGS ${CLANG_EXTRA_ARGS}
|
||||
-DLIBXML2_LIBRARY=${LIBDIR}/xml2/lib/libxml2.a
|
||||
)
|
||||
endif()
|
||||
|
||||
if(BUILD_CLANG_TOOLS)
|
||||
# ExternalProject_Add does not allow multiple tarballs to be
|
||||
# downloaded. Work around this by having an empty build action
|
||||
# for the extra tools, and referring the clang build to the location
|
||||
# of the clang-tools-extra source.
|
||||
ExternalProject_Add(external_clang_tools
|
||||
URL ${CLANG_TOOLS_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${CLANG_TOOLS_HASH}
|
||||
INSTALL_DIR ${LIBDIR}/clang_tools
|
||||
PREFIX ${BUILD_DIR}/clang_tools
|
||||
CONFIGURE_COMMAND echo "."
|
||||
BUILD_COMMAND echo "."
|
||||
INSTALL_COMMAND echo "."
|
||||
)
|
||||
list(APPEND CLANG_EXTRA_ARGS
|
||||
-DLLVM_EXTERNAL_CLANG_TOOLS_EXTRA_SOURCE_DIR=${BUILD_DIR}/clang_tools/src/external_clang_tools/
|
||||
)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_clang
|
||||
URL ${CLANG_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${CLANG_HASH}
|
||||
PREFIX ${BUILD_DIR}/clang
|
||||
CMAKE_GENERATOR ${CLANG_GENERATOR}
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/clang ${DEFAULT_CMAKE_FLAGS} ${CLANG_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/clang
|
||||
)
|
||||
|
||||
if(MSVC)
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
set(CLANG_HARVEST_COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/clang/ ${HARVEST_TARGET}/llvm/)
|
||||
else()
|
||||
set(CLANG_HARVEST_COMMAND
|
||||
${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/clang/lib/ ${HARVEST_TARGET}/llvm/debug/lib/
|
||||
)
|
||||
endif()
|
||||
ExternalProject_Add_Step(external_clang after_install
|
||||
COMMAND ${CLANG_HARVEST_COMMAND}
|
||||
DEPENDEES mkdir update patch download configure build install
|
||||
)
|
||||
endif()
|
||||
|
||||
add_dependencies(
|
||||
external_clang
|
||||
ll
|
||||
)
|
||||
|
||||
if(BUILD_CLANG_TOOLS)
|
||||
# `external_clang_tools` is for downloading the source, not compiling it.
|
||||
add_dependencies(
|
||||
external_clang
|
||||
external_clang_tools
|
||||
)
|
||||
endif()
|
||||
|
||||
# We currently do not build libxml2 on Windows.
|
||||
if(NOT WIN32)
|
||||
add_dependencies(
|
||||
external_clang
|
||||
external_xml2
|
||||
)
|
||||
endif()
|
@@ -29,7 +29,6 @@ set(EMBREE_EXTRA_ARGS
|
||||
-DEMBREE_MAX_ISA=AVX2
|
||||
-DEMBREE_TASKING_SYSTEM=TBB
|
||||
-DEMBREE_TBB_ROOT=${LIBDIR}/tbb
|
||||
-DTBB_ROOT=${LIBDIR}/tbb
|
||||
-DTBB_STATIC_LIB=${TBB_STATIC_LIBRARY}
|
||||
)
|
||||
|
||||
@@ -47,26 +46,15 @@ else()
|
||||
set(EMBREE_BUILD_DIR)
|
||||
endif()
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
ExternalProject_Add(external_embree
|
||||
GIT_REPOSITORY ${EMBREE_ARM_GIT}
|
||||
GIT_TAG "blender-arm"
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
PREFIX ${BUILD_DIR}/embree
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/embree
|
||||
)
|
||||
else()
|
||||
ExternalProject_Add(external_embree
|
||||
URL ${EMBREE_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${EMBREE_HASH}
|
||||
PREFIX ${BUILD_DIR}/embree
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/embree/src/external_embree < ${PATCH_DIR}/embree.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/embree
|
||||
)
|
||||
endif()
|
||||
ExternalProject_Add(external_embree
|
||||
URL ${EMBREE_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${EMBREE_HASH}
|
||||
PREFIX ${BUILD_DIR}/embree
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/embree/src/external_embree < ${PATCH_DIR}/embree.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/embree
|
||||
)
|
||||
|
||||
add_dependencies(
|
||||
external_embree
|
||||
|
@@ -97,21 +97,18 @@ harvest(jemalloc/lib jemalloc/lib "*.a")
|
||||
harvest(jpg/include jpeg/include "*.h")
|
||||
harvest(jpg/lib jpeg/lib "libjpeg.a")
|
||||
harvest(lame/lib ffmpeg/lib "*.a")
|
||||
harvest(llvm/bin llvm/bin "clang-format")
|
||||
harvest(clang/bin llvm/bin "clang-format")
|
||||
if(BUILD_CLANG_TOOLS)
|
||||
harvest(llvm/bin llvm/bin "clang-tidy")
|
||||
harvest(llvm/share/clang llvm/share "run-clang-tidy.py")
|
||||
harvest(clang/bin llvm/bin "clang-tidy")
|
||||
harvest(clang/share/clang llvm/share "run-clang-tidy.py")
|
||||
endif()
|
||||
harvest(clang/include llvm/include "*")
|
||||
harvest(llvm/include llvm/include "*")
|
||||
harvest(llvm/bin llvm/bin "llvm-config")
|
||||
harvest(llvm/lib llvm/lib "libLLVM*.a")
|
||||
harvest(llvm/lib llvm/lib "libclang*.a")
|
||||
if(APPLE)
|
||||
harvest(openmp/lib openmp/lib "*")
|
||||
harvest(openmp/include openmp/include "*.h")
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
harvest(sse2neon sse2neon "*.h")
|
||||
endif()
|
||||
endif()
|
||||
harvest(ogg/lib ffmpeg/lib "*.a")
|
||||
harvest(openal/include openal/include "*.h")
|
||||
@@ -142,10 +139,12 @@ harvest(openimageio/bin openimageio/bin "maketx")
|
||||
harvest(openimageio/bin openimageio/bin "oiiotool")
|
||||
harvest(openimageio/include openimageio/include "*")
|
||||
harvest(openimageio/lib openimageio/lib "*.a")
|
||||
harvest(openimagedenoise/include openimagedenoise/include "*")
|
||||
harvest(openimagedenoise/lib openimagedenoise/lib "*.a")
|
||||
harvest(embree/include embree/include "*.h")
|
||||
harvest(embree/lib embree/lib "*.a")
|
||||
if((NOT APPLE) OR ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
harvest(openimagedenoise/include openimagedenoise/include "*")
|
||||
harvest(openimagedenoise/lib openimagedenoise/lib "*.a")
|
||||
harvest(embree/include embree/include "*.h")
|
||||
harvest(embree/lib embree/lib "*.a")
|
||||
endif()
|
||||
harvest(openjpeg/include/openjpeg-2.3 openjpeg/include "*.h")
|
||||
harvest(openjpeg/lib openjpeg/lib "*.a")
|
||||
harvest(opensubdiv/include opensubdiv/include "*.h")
|
||||
@@ -158,12 +157,12 @@ harvest(xr_openxr_sdk/lib xr_openxr_sdk/lib "*.a")
|
||||
harvest(osl/bin osl/bin "oslc")
|
||||
harvest(osl/include osl/include "*.h")
|
||||
harvest(osl/lib osl/lib "*.a")
|
||||
harvest(osl/share/OSL/shaders osl/share/OSL/shaders "*.h")
|
||||
harvest(osl/shaders osl/shaders "*.h")
|
||||
harvest(png/include png/include "*.h")
|
||||
harvest(png/lib png/lib "*.a")
|
||||
harvest(pugixml/include pugixml/include "*.hpp")
|
||||
harvest(pugixml/lib pugixml/lib "*.a")
|
||||
harvest(python/bin python/bin "python${PYTHON_SHORT_VERSION}")
|
||||
harvest(python/bin python/bin "python${PYTHON_SHORT_VERSION}m")
|
||||
harvest(python/include python/include "*h")
|
||||
harvest(python/lib python/lib "*")
|
||||
harvest(sdl/include/SDL2 sdl/include "*.h")
|
||||
@@ -193,7 +192,7 @@ harvest(haru/lib haru/lib "*.a")
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
harvest(libglu/lib mesa/lib "*.so*")
|
||||
harvest(mesa/lib64 mesa/lib "*.so*")
|
||||
harvest(mesa/lib mesa/lib "*.so*")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
@@ -21,42 +21,37 @@ if(WIN32)
|
||||
-DFLEX_EXECUTABLE=${LIBDIR}/flexbison/win_flex.exe
|
||||
-DBISON_EXECUTABLE=${LIBDIR}/flexbison/win_bison.exe
|
||||
-DM4_EXECUTABLE=${DOWNLOAD_DIR}/mingw/mingw64/msys/1.0/bin/m4.exe
|
||||
-DARM_ENABLED=Off
|
||||
)
|
||||
elseif(APPLE)
|
||||
# Use bison and flex installed via Homebrew.
|
||||
# The ones that come with Xcode toolset are too old.
|
||||
# Use bison installed via Homebrew.
|
||||
# The one which comes which Xcode toolset is too old.
|
||||
if("${CMAKE_HOST_SYSTEM_PROCESSOR}" STREQUAL "arm64")
|
||||
set(ISPC_EXTRA_ARGS_APPLE
|
||||
-DBISON_EXECUTABLE=/opt/homebrew/opt/bison/bin/bison
|
||||
-DFLEX_EXECUTABLE=/opt/homebrew/opt/flex/bin/flex
|
||||
-DARM_ENABLED=On
|
||||
)
|
||||
set(HOMEBREW_LOCATION "/opt/homebrew")
|
||||
else()
|
||||
set(ISPC_EXTRA_ARGS_APPLE
|
||||
-DBISON_EXECUTABLE=/usr/local/opt/bison/bin/bison
|
||||
-DARM_ENABLED=Off
|
||||
)
|
||||
set(HOMEBREW_LOCATION "/usr/local")
|
||||
endif()
|
||||
set(ISPC_EXTRA_ARGS_APPLE
|
||||
-DBISON_EXECUTABLE=${HOMEBREW_LOCATION}/opt/bison/bin/bison
|
||||
)
|
||||
elseif(UNIX)
|
||||
set(ISPC_EXTRA_ARGS_UNIX
|
||||
-DCMAKE_C_COMPILER=${LIBDIR}/llvm/bin/clang
|
||||
-DCMAKE_CXX_COMPILER=${LIBDIR}/llvm/bin/clang++
|
||||
-DARM_ENABLED=Off
|
||||
-DCMAKE_C_COMPILER=${LIBDIR}/clang/bin/clang
|
||||
-DCMAKE_CXX_COMPILER=${LIBDIR}/clang/bin/clang++
|
||||
)
|
||||
endif()
|
||||
|
||||
set(ISPC_EXTRA_ARGS
|
||||
-DARM_ENABLED=Off
|
||||
-DISPC_NO_DUMPS=On
|
||||
-DISPC_INCLUDE_EXAMPLES=Off
|
||||
-DISPC_INCLUDE_TESTS=Off
|
||||
-DLLVM_ROOT=${LIBDIR}/llvm/lib/cmake/llvm
|
||||
-DLLVM_LIBRARY_DIR=${LIBDIR}/llvm/lib
|
||||
-DCLANG_EXECUTABLE=${LIBDIR}/llvm/bin/clang
|
||||
-DCLANGPP_EXECUTABLE=${LIBDIR}/llvm/bin/clang++
|
||||
-DCLANG_EXECUTABLE=${LIBDIR}/clang/bin/clang
|
||||
-DCLANGPP_EXECUTABLE=${LIBDIR}/clang/bin/clang++
|
||||
-DISPC_INCLUDE_TESTS=Off
|
||||
-DCLANG_LIBRARY_DIR=${LIBDIR}/llvm/lib
|
||||
-DCLANG_INCLUDE_DIRS=${LIBDIR}/llvm/include
|
||||
-DCLANG_LIBRARY_DIR=${LIBDIR}/clang/lib
|
||||
-DCLANG_INCLUDE_DIRS=${LIBDIR}/clang/include
|
||||
${ISPC_EXTRA_ARGS_WIN}
|
||||
${ISPC_EXTRA_ARGS_APPLE}
|
||||
${ISPC_EXTRA_ARGS_UNIX}
|
||||
@@ -75,6 +70,7 @@ ExternalProject_Add(external_ispc
|
||||
add_dependencies(
|
||||
external_ispc
|
||||
ll
|
||||
external_clang
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
@@ -16,20 +16,16 @@
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(EXPAT_EXTRA_ARGS
|
||||
-DEXPAT_BUILD_DOCS=OFF
|
||||
-DEXPAT_BUILD_EXAMPLES=OFF
|
||||
-DEXPAT_BUILD_TESTS=OFF
|
||||
-DEXPAT_BUILD_TOOLS=OFF
|
||||
-DEXPAT_SHARED_LIBS=OFF
|
||||
set(LCMS_EXTRA_ARGS
|
||||
)
|
||||
|
||||
ExternalProject_Add(external_expat
|
||||
URL ${EXPAT_URI}
|
||||
ExternalProject_Add(external_lcms
|
||||
URL ${LCMS_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${EXPAT_HASH}
|
||||
PREFIX ${BUILD_DIR}/expat
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/expat ${DEFAULT_CMAKE_FLAGS} ${EXPAT_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/expat
|
||||
SOURCE_SUBDIR expat
|
||||
URL_HASH MD5=${LCMS_HASH}
|
||||
PREFIX ${BUILD_DIR}/lcms
|
||||
# Patch taken from ocio.
|
||||
PATCH_COMMAND ${CMAKE_COMMAND} -E copy ${PATCH_DIR}/cmakelists_lcms.txt ${BUILD_DIR}/lcms/src/external_lcms/CMakeLists.txt
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/lcms ${DEFAULT_CMAKE_FLAGS} ${LCMS_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/lcms
|
||||
)
|
@@ -17,20 +17,11 @@
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
if(APPLE AND "${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
set(LLVM_TARGETS AArch64$<SEMICOLON>ARM)
|
||||
set(LLVM_TARGETS AArch64)
|
||||
else()
|
||||
set(LLVM_TARGETS X86)
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
set(LLVM_XML2_ARGS
|
||||
-DLIBXML2_LIBRARY=${LIBDIR}/xml2/lib/libxml2.a
|
||||
)
|
||||
set(LLVM_BUILD_CLANG_TOOLS_EXTRA ^^clang-tools-extra)
|
||||
set(BUILD_CLANG_TOOLS ON)
|
||||
endif()
|
||||
|
||||
|
||||
set(LLVM_EXTRA_ARGS
|
||||
-DLLVM_USE_CRT_RELEASE=MD
|
||||
-DLLVM_USE_CRT_DEBUG=MDd
|
||||
@@ -40,8 +31,6 @@ set(LLVM_EXTRA_ARGS
|
||||
-DLLVM_ENABLE_TERMINFO=OFF
|
||||
-DLLVM_BUILD_LLVM_C_DYLIB=OFF
|
||||
-DLLVM_ENABLE_UNWIND_TABLES=OFF
|
||||
-DLLVM_ENABLE_PROJECTS=clang${LLVM_BUILD_CLANG_TOOLS_EXTRA}
|
||||
${LLVM_XML2_ARGS}
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
@@ -56,9 +45,7 @@ ExternalProject_Add(ll
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${LLVM_HASH}
|
||||
CMAKE_GENERATOR ${LLVM_GENERATOR}
|
||||
LIST_SEPARATOR ^^
|
||||
PREFIX ${BUILD_DIR}/ll
|
||||
SOURCE_SUBDIR llvm
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/ll/src/ll < ${PATCH_DIR}/llvm.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/llvm ${DEFAULT_CMAKE_FLAGS} ${LLVM_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/llvm
|
||||
@@ -78,11 +65,3 @@ if(MSVC)
|
||||
DEPENDEES mkdir update patch download configure build install
|
||||
)
|
||||
endif()
|
||||
|
||||
# We currently do not build libxml2 on Windows.
|
||||
if(APPLE)
|
||||
add_dependencies(
|
||||
ll
|
||||
external_xml2
|
||||
)
|
||||
endif()
|
||||
|
@@ -20,36 +20,19 @@ set(MESA_CFLAGS "-static-libgcc")
|
||||
set(MESA_CXXFLAGS "-static-libgcc -static-libstdc++ -Bstatic -lstdc++ -Bdynamic -l:libstdc++.a")
|
||||
set(MESA_LDFLAGS "-L${LIBDIR}/zlib/lib -pthread -static-libgcc -static-libstdc++ -Bstatic -lstdc++ -Bdynamic -l:libstdc++.a -l:libz_pic.a")
|
||||
|
||||
# The 'native-file', used for overrides with the meson build system.
|
||||
# meson does not provide a way to do this using command line arguments.
|
||||
#
|
||||
# Note that we can't output to "${BUILD_DIR}/mesa/src/external_mesa" as
|
||||
# it doesn't exist when CMake first executes.
|
||||
file(WRITE ${BUILD_DIR}/mesa/tmp/native-file.ini "\
|
||||
[binaries]
|
||||
llvm-config = '${LIBDIR}/llvm/bin/llvm-config'"
|
||||
)
|
||||
|
||||
set(MESA_EXTRA_FLAGS
|
||||
-Dbuildtype=release
|
||||
-Dc_args=${MESA_CFLAGS}
|
||||
-Dcpp_args=${MESA_CXXFLAGS}
|
||||
-Dc_link_args=${MESA_LDFLAGS}
|
||||
-Dcpp_link_args=${MESA_LDFLAGS}
|
||||
-Dglx=gallium-xlib
|
||||
-Dgallium-drivers=swrast
|
||||
-Ddri-drivers=
|
||||
-Dvulkan-drivers=
|
||||
-Dgbm=disabled
|
||||
-Degl=disabled
|
||||
-Dgles1=disabled
|
||||
-Dgles2=disabled
|
||||
-Dshared-llvm=disabled
|
||||
# Without this, the build fails when: `wayland-scanner` is not found.
|
||||
# At some point we will likely want to support Wayland.
|
||||
# Disable for now since it's not officially supported.
|
||||
-Dplatforms=x11
|
||||
--native-file ${BUILD_DIR}/mesa/tmp/native-file.ini
|
||||
CFLAGS=${MESA_CFLAGS}
|
||||
CXXFLAGS=${MESA_CXXFLAGS}
|
||||
LDFLAGS=${MESA_LDFLAGS}
|
||||
--enable-glx=gallium-xlib
|
||||
--with-gallium-drivers=swrast
|
||||
--disable-dri
|
||||
--disable-gbm
|
||||
--disable-egl
|
||||
--disable-gles1
|
||||
--disable-gles2
|
||||
--disable-llvm-shared-libs
|
||||
--with-llvm-prefix=${LIBDIR}/llvm
|
||||
)
|
||||
|
||||
ExternalProject_Add(external_mesa
|
||||
@@ -59,9 +42,9 @@ ExternalProject_Add(external_mesa
|
||||
PREFIX ${BUILD_DIR}/mesa
|
||||
CONFIGURE_COMMAND ${CONFIGURE_ENV} &&
|
||||
cd ${BUILD_DIR}/mesa/src/external_mesa/ &&
|
||||
meson ${BUILD_DIR}/mesa/src/external_mesa-build --prefix=${LIBDIR}/mesa ${MESA_EXTRA_FLAGS}
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa-build && ninja -j${MAKE_THREADS}
|
||||
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa-build && ninja install
|
||||
${CONFIGURE_COMMAND_NO_TARGET} --prefix=${LIBDIR}/mesa ${MESA_EXTRA_FLAGS}
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa/ && make -j${MAKE_THREADS}
|
||||
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa/ && make install
|
||||
INSTALL_DIR ${LIBDIR}/mesa
|
||||
)
|
||||
|
||||
|
@@ -22,17 +22,8 @@ ExternalProject_Add(external_nasm
|
||||
URL_HASH SHA256=${NASM_HASH}
|
||||
PREFIX ${BUILD_DIR}/nasm
|
||||
PATCH_COMMAND ${PATCH_CMD} --verbose -p 1 -N -d ${BUILD_DIR}/nasm/src/external_nasm < ${PATCH_DIR}/nasm.diff
|
||||
CONFIGURE_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && ./autogen.sh && ${CONFIGURE_COMMAND} --prefix=${LIBDIR}/nasm
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make -j${MAKE_THREADS} && make manpages
|
||||
CONFIGURE_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && ${CONFIGURE_COMMAND} --prefix=${LIBDIR}/nasm
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make -j${MAKE_THREADS}
|
||||
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make install
|
||||
INSTALL_DIR ${LIBDIR}/nasm
|
||||
)
|
||||
|
||||
if(UNIX)
|
||||
# `touch nasm.1 ndisasm.1` helps to create the manual pages files, even when
|
||||
# local `asciidoc` and `xmlto` packages are not installed.
|
||||
ExternalProject_Add_Step(external_nasm after_configure
|
||||
COMMAND ${CMAKE_COMMAND} -E touch ${BUILD_DIR}/nasm/src/external_nasm/nasm.1 ${BUILD_DIR}/nasm/src/external_nasm/ndisasm.1
|
||||
DEPENDEES configure
|
||||
)
|
||||
endif()
|
||||
|
@@ -47,5 +47,4 @@ ExternalProject_Add(external_numpy
|
||||
add_dependencies(
|
||||
external_numpy
|
||||
external_python
|
||||
external_python_site_packages
|
||||
)
|
||||
|
@@ -18,37 +18,50 @@
|
||||
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
-DOCIO_BUILD_APPS=OFF
|
||||
-DOCIO_BUILD_PYTHON=OFF
|
||||
-DOCIO_BUILD_PYGLUE=OFF
|
||||
-DOCIO_BUILD_NUKE=OFF
|
||||
-DOCIO_BUILD_JAVA=OFF
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DOCIO_USE_BOOST_PTR=OFF
|
||||
-DOCIO_BUILD_STATIC=ON
|
||||
-DOCIO_BUILD_SHARED=OFF
|
||||
-DOCIO_BUILD_TRUELIGHT=OFF
|
||||
-DOCIO_BUILD_DOCS=OFF
|
||||
-DOCIO_BUILD_TESTS=OFF
|
||||
-DOCIO_BUILD_GPU_TESTS=OFF
|
||||
-DOCIO_USE_SSE=ON
|
||||
|
||||
# Manually build ext packages except for pystring, which does not have
|
||||
# a CMake or autotools build system that we can easily use.
|
||||
-DOCIO_INSTALL_EXT_PACKAGES=MISSING
|
||||
-DHalf_ROOT=${LIBDIR}/openexr
|
||||
-DHalf_STATIC_LIBRARY=ON
|
||||
-Dexpat_ROOT=${LIBDIR}/expat
|
||||
-Dyaml-cpp_ROOT=${LIBDIR}/yamlcpp
|
||||
-DOCIO_BUILD_PYGLUE=OFF
|
||||
-DOCIO_BUILD_JNIGLUE=OFF
|
||||
-DOCIO_STATIC_JNIGLUE=OFF
|
||||
)
|
||||
|
||||
if(APPLE AND NOT("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
-DOCIO_USE_SSE=OFF
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
-DOCIO_USE_SSE=OFF
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
set(OCIO_PATCH opencolorio_win.diff)
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
-DOCIO_BUILD_TESTS=OFF
|
||||
-DOCIO_USE_SSE=ON
|
||||
-DOCIO_INLINES_HIDDEN=OFF
|
||||
-DOCIO_PYGLUE_LINK=OFF
|
||||
-DOCIO_PYGLUE_RESPECT_ABI=OFF
|
||||
-DOCIO_PYGLUE_SONAME=OFF
|
||||
-DOCIO_PYGLUE_LIB_PREFIX=OFF
|
||||
-DUSE_EXTERNAL_TINYXML=ON
|
||||
-DTINYXML_INCLUDE_DIR=${LIBDIR}/tinyxml/include
|
||||
-DTINYXML_LIBRARY=${LIBDIR}/tinyxml/lib/tinyxml${libext}
|
||||
-DUSE_EXTERNAL_YAML=ON
|
||||
-DYAML_CPP_FOUND=ON
|
||||
-DYAML_CPP_VERSION=${YAMLCPP_VERSION}
|
||||
-DUSE_EXTERNAL_LCMS=ON
|
||||
-DINC_1=${LIBDIR}/tinyxml/include
|
||||
-DINC_2=${LIBDIR}/yamlcpp/include
|
||||
# Lie because ocio cmake is demanding boost even though it is not needed.
|
||||
-DYAML_CPP_VERSION=0.5.0
|
||||
)
|
||||
else()
|
||||
set(OCIO_PATCH opencolorio.diff)
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
)
|
||||
@@ -59,43 +72,48 @@ ExternalProject_Add(external_opencolorio
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${OPENCOLORIO_HASH}
|
||||
PREFIX ${BUILD_DIR}/opencolorio
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/opencolorio/src/external_opencolorio < ${PATCH_DIR}/opencolorio.diff
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/opencolorio/src/external_opencolorio < ${PATCH_DIR}/${OCIO_PATCH}
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/opencolorio ${DEFAULT_CMAKE_FLAGS} ${OPENCOLORIO_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/opencolorio
|
||||
)
|
||||
|
||||
if(NOT WIN32)
|
||||
add_custom_command(
|
||||
OUTPUT ${LIBDIR}/opencolorio/lib/libtinyxml.a
|
||||
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libtinyxml.a ${LIBDIR}/opencolorio/lib/libtinyxml.a
|
||||
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libyaml-cpp.a ${LIBDIR}/opencolorio/lib/libyaml-cpp.a
|
||||
)
|
||||
add_custom_target(external_opencolorio_extra ALL DEPENDS external_opencolorio ${LIBDIR}/opencolorio/lib/libtinyxml.a)
|
||||
endif()
|
||||
|
||||
add_dependencies(
|
||||
external_opencolorio
|
||||
external_yamlcpp
|
||||
external_expat
|
||||
external_openexr
|
||||
external_boost
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
add_dependencies(
|
||||
external_opencolorio
|
||||
external_tinyxml
|
||||
external_yamlcpp
|
||||
|
||||
)
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
ExternalProject_Add_Step(external_opencolorio after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/include ${HARVEST_TARGET}/opencolorio/include
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/lib ${HARVEST_TARGET}/opencolorio/lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/lib/static ${HARVEST_TARGET}/opencolorio/lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/yamlcpp/lib/libyaml-cppmd.lib ${HARVEST_TARGET}/opencolorio/lib/libyaml-cpp.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/expat/lib/libexpatMD.lib ${HARVEST_TARGET}/opencolorio/lib/libexpatMD.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/pystring.lib ${HARVEST_TARGET}/opencolorio/lib/pystring.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tinyxml/lib/tinyxml.lib ${HARVEST_TARGET}/opencolorio/lib/tinyxml.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
if(BUILD_MODE STREQUAL Debug)
|
||||
ExternalProject_Add_Step(external_opencolorio after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/opencolorio/lib/Opencolorio.lib ${HARVEST_TARGET}/opencolorio/lib/OpencolorIO_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/opencolorio/lib/static/Opencolorio.lib ${HARVEST_TARGET}/opencolorio/lib/OpencolorIO_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/yamlcpp/lib/libyaml-cppmdd.lib ${HARVEST_TARGET}/opencolorio/lib/libyaml-cpp_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/expat/lib/libexpatdMD.lib ${HARVEST_TARGET}/opencolorio/lib/libexpatdMD.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/pystring.lib ${HARVEST_TARGET}/opencolorio/lib/pystring_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tinyxml/lib/tinyxml.lib ${HARVEST_TARGET}/opencolorio/lib/tinyxml_d.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
else()
|
||||
ExternalProject_Add_Step(external_opencolorio after_install
|
||||
COMMAND cp ${LIBDIR}/yamlcpp/lib/libyaml-cpp.a ${LIBDIR}/opencolorio/lib/
|
||||
COMMAND cp ${LIBDIR}/expat/lib/libexpat.a ${LIBDIR}/opencolorio/lib/
|
||||
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libpystring.a ${LIBDIR}/opencolorio/lib/
|
||||
DEPENDEES install
|
||||
)
|
||||
|
||||
endif()
|
||||
|
@@ -45,7 +45,6 @@ ExternalProject_Add(external_openimagedenoise
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${OIDN_HASH}
|
||||
PREFIX ${BUILD_DIR}/openimagedenoise
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/openimagedenoise/src/external_openimagedenoise < ${PATCH_DIR}/oidn.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/openimagedenoise ${DEFAULT_CMAKE_FLAGS} ${OIDN_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/openimagedenoise
|
||||
)
|
||||
|
@@ -30,5 +30,5 @@ ExternalProject_Add(external_openmp
|
||||
|
||||
add_dependencies(
|
||||
external_openmp
|
||||
ll
|
||||
external_clang
|
||||
)
|
||||
|
@@ -54,8 +54,6 @@ set(OPENVDB_EXTRA_ARGS
|
||||
-DOPENVDB_CORE_STATIC=${OPENVDB_STATIC}
|
||||
-DOPENVDB_BUILD_BINARIES=Off
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
-DILMBASE_USE_STATIC_LIBS=On
|
||||
-DOPENEXR_USE_STATIC_LIBS=On
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
@@ -19,7 +19,12 @@
|
||||
if(WIN32)
|
||||
set(OSL_CMAKE_CXX_STANDARD_LIBRARIES "kernel32${LIBEXT} user32${LIBEXT} gdi32${LIBEXT} winspool${LIBEXT} shell32${LIBEXT} ole32${LIBEXT} oleaut32${LIBEXT} uuid${LIBEXT} comdlg32${LIBEXT} advapi32${LIBEXT} psapi${LIBEXT}")
|
||||
set(OSL_FLEX_BISON -DFLEX_EXECUTABLE=${LIBDIR}/flexbison/win_flex.exe -DBISON_EXECUTABLE=${LIBDIR}/flexbison/win_bison.exe)
|
||||
set(OSL_SIMD_FLAGS -DOIIO_NOSIMD=1 -DOIIO_SIMD=sse2)
|
||||
set(OSL_OPENIMAGEIO_LIBRARY "${LIBDIR}/openimageio/lib/${LIBPREFIX}OpenImageIO${LIBEXT};${LIBDIR}/openimageio/lib/${LIBPREFIX}OpenImageIO_Util${LIBEXT};${LIBDIR}/png/lib/libpng16${LIBEXT};${LIBDIR}/jpg/lib/${LIBPREFIX}jpeg${LIBEXT};${LIBDIR}/tiff/lib/${LIBPREFIX}tiff${LIBEXT};${LIBDIR}/openexr/lib/${LIBPREFIX}IlmImf${OPENEXR_VERSION_POSTFIX}${LIBEXT}")
|
||||
if("${CMAKE_SIZEOF_VOID_P}" EQUAL "4")
|
||||
set(OSL_SIMD_FLAGS -DOIIO_NOSIMD=1 -DOIIO_SIMD=0)
|
||||
else()
|
||||
set(OSL_SIMD_FLAGS -DOIIO_NOSIMD=1 -DOIIO_SIMD=sse2)
|
||||
endif()
|
||||
SET(OSL_PLATFORM_FLAGS -DLINKSTATIC=ON)
|
||||
else()
|
||||
set(OSL_CMAKE_CXX_STANDARD_LIBRARIES)
|
||||
@@ -29,6 +34,7 @@ else()
|
||||
endif()
|
||||
|
||||
set(OSL_ILMBASE_CUSTOM_LIBRARIES "${LIBDIR}/openexr/lib/Imath${OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/Half{OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/IlmThread${OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/Iex${OPENEXR_VERSION_POSTFIX}.lib")
|
||||
set(OSL_LLVM_LIBRARY "${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMAnalysis${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMAsmParser${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMAsmPrinter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMBitReader${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMBitWriter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMCodeGen${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMCore${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMDebugInfo${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMExecutionEngine${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMInstCombine${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMInstrumentation${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMInterpreter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMJIT${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMLinker${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMC${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMCDisassembler${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMCJIT${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMCParser${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMObject${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMRuntimeDyld${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMScalarOpts${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMSelectionDAG${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMSupport${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMTableGen${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMTarget${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMTransformUtils${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMVectorize${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86AsmParser${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86AsmPrinter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86CodeGen${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Desc${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Disassembler${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Info${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Utils${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMipa${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMipo${LIBEXT}")
|
||||
|
||||
set(OSL_EXTRA_ARGS
|
||||
-DBoost_COMPILER:STRING=${BOOST_COMPILER_STRING}
|
||||
@@ -39,8 +45,13 @@ set(OSL_EXTRA_ARGS
|
||||
-DBOOST_LIBRARYDIR=${LIBDIR}/boost/lib/
|
||||
-DBoost_NO_SYSTEM_PATHS=ON
|
||||
-DBoost_NO_BOOST_CMAKE=ON
|
||||
-DOpenEXR_ROOT=${LIBDIR}/openexr/
|
||||
-DIlmBase_ROOT=${LIBDIR}/openexr/
|
||||
-DLLVM_DIRECTORY=${LIBDIR}/llvm
|
||||
-DLLVM_INCLUDES=${LIBDIR}/llvm/include
|
||||
-DLLVM_LIB_DIR=${LIBDIR}/llvm/lib
|
||||
-DLLVM_VERSION=3.4
|
||||
-DLLVM_LIBRARY=${OSL_LLVM_LIBRARY}
|
||||
-DOPENEXR_HOME=${LIBDIR}/openexr/
|
||||
-DILMBASE_HOME=${LIBDIR}/openexr/
|
||||
-DILMBASE_INCLUDE_DIR=${LIBDIR}/openexr/include/
|
||||
-DOPENEXR_HALF_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Half${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
-DOPENEXR_IMATH_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Imath${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
@@ -48,32 +59,35 @@ set(OSL_EXTRA_ARGS
|
||||
-DOPENEXR_IEX_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Iex${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
-DOPENEXR_INCLUDE_DIR=${LIBDIR}/openexr/include/
|
||||
-DOPENEXR_ILMIMF_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}IlmImf${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
-DOpenImageIO_ROOT=${LIBDIR}/openimageio/
|
||||
-DOSL_BUILD_TESTS=OFF
|
||||
-DOSL_BUILD_MATERIALX=OFF
|
||||
-DZLIB_LIBRARY=${LIBDIR}/zlib/lib/${ZLIB_LIBRARY}
|
||||
-DZLIB_INCLUDE_DIR=${LIBDIR}/zlib/include/
|
||||
-DOPENIMAGEIOHOME=${LIBDIR}/openimageio/
|
||||
-DOPENIMAGEIO_INCLUDE_DIR=${LIBDIR}/openimageio/include
|
||||
-DOPENIMAGEIO_LIBRARY=${OSL_OPENIMAGEIO_LIBRARY}
|
||||
${OSL_FLEX_BISON}
|
||||
-DCMAKE_CXX_STANDARD_LIBRARIES=${OSL_CMAKE_CXX_STANDARD_LIBRARIES}
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DBUILDSTATIC=ON
|
||||
${OSL_PLATFORM_FLAGS}
|
||||
-DOSL_BUILD_PLUGINS=OFF
|
||||
-DOSL_BUILD_PLUGINS=Off
|
||||
-DSTOP_ON_WARNING=OFF
|
||||
-DUSE_LLVM_BITCODE=OFF
|
||||
-DLLVM_ROOT=${LIBDIR}/llvm/
|
||||
-DLLVM_DIRECTORY=${LIBDIR}/llvm/
|
||||
-DUSE_PARTIO=OFF
|
||||
-DUSE_QT=OFF
|
||||
-DUSE_Qt5=OFF
|
||||
-DINSTALL_DOCS=OFF
|
||||
${OSL_SIMD_FLAGS}
|
||||
-Dpugixml_ROOT=${LIBDIR}/pugixml
|
||||
-DUSE_PYTHON=OFF
|
||||
-DPARTIO_LIBRARIES=
|
||||
-DPUGIXML_HOME=${LIBDIR}/pugixml
|
||||
)
|
||||
|
||||
# Apple arm64 uses LLVM 11, LLVM 10+ requires C++14
|
||||
if (APPLE AND "${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
list(APPEND OSL_EXTRA_ARGS -DCMAKE_CXX_STANDARD=14)
|
||||
if(APPLE)
|
||||
# Make symbol hiding consistent with OIIO which defaults to OFF,
|
||||
# avoids linker warnings on macOS
|
||||
set(OSL_EXTRA_ARGS
|
||||
${OSL_EXTRA_ARGS}
|
||||
-DHIDE_SYMBOLS=OFF
|
||||
)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_osl
|
||||
@@ -91,6 +105,7 @@ add_dependencies(
|
||||
external_osl
|
||||
external_boost
|
||||
ll
|
||||
external_clang
|
||||
external_openexr
|
||||
external_zlib
|
||||
external_flexbison
|
||||
|
@@ -43,7 +43,7 @@ if(WIN32)
|
||||
PREFIX ${BUILD_DIR}/python
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND cd ${BUILD_DIR}/python/src/external_python/pcbuild/ && set IncludeTkinter=false && call build.bat -e -p x64 -c ${BUILD_MODE}
|
||||
INSTALL_COMMAND ${PYTHON_BINARY_INTERNAL} ${PYTHON_SRC}/PC/layout/main.py -b ${PYTHON_SRC}/PCbuild/amd64 -s ${PYTHON_SRC} -t ${PYTHON_SRC}/tmp/ --include-stable --include-pip --include-dev --include-launchers --include-venv --include-symbols ${PYTHON_EXTRA_INSTLAL_FLAGS} --copy ${LIBDIR}/python
|
||||
INSTALL_COMMAND ${PYTHON_BINARY_INTERNAL} ${PYTHON_SRC}/PC/layout/main.py -b ${PYTHON_SRC}/PCbuild/amd64 -s ${PYTHON_SRC} -t ${PYTHON_SRC}/tmp/ --include-underpth --include-stable --include-pip --include-dev --include-launchers --include-venv --include-symbols ${PYTHON_EXTRA_INSTLAL_FLAGS} --copy ${LIBDIR}/python
|
||||
)
|
||||
|
||||
else()
|
||||
@@ -74,15 +74,16 @@ else()
|
||||
endif()
|
||||
set(PYTHON_CONFIGURE_ENV ${CONFIGURE_ENV} && ${PYTHON_FUNC_CONFIGS})
|
||||
set(PYTHON_BINARY ${BUILD_DIR}/python/src/external_python/python.exe)
|
||||
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_macos.diff)
|
||||
else()
|
||||
set(PYTHON_CONFIGURE_ENV ${CONFIGURE_ENV})
|
||||
set(PYTHON_BINARY ${BUILD_DIR}/python/src/external_python/python)
|
||||
endif()
|
||||
# Link against zlib statically (Unix). Avoid rpath issues (macOS).
|
||||
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_unix.diff)
|
||||
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_linux.diff)
|
||||
endif()
|
||||
|
||||
set(PYTHON_CONFIGURE_EXTRA_ARGS "--with-openssl=${LIBDIR}/ssl")
|
||||
set(PYTHON_CFLAGS "-I${LIBDIR}/sqlite/include -I${LIBDIR}/bzip2/include -I${LIBDIR}/lzma/include -I${LIBDIR}/zlib/include ${PLATFORM_CFLAGS}")
|
||||
set(PYTHON_LDFLAGS "-L${LIBDIR}/ffi/lib -L${LIBDIR}/sqlite/lib -L${LIBDIR}/bzip2/lib -L${LIBDIR}/lzma/lib -L${LIBDIR}/zlib/lib ${PLATFORM_LDFLAGS}")
|
||||
set(PYTHON_CFLAGS "-I${LIBDIR}/sqlite/include -I${LIBDIR}/bzip2/include -I${LIBDIR}/lzma/include -I${LIBDIR}/zlib/include")
|
||||
set(PYTHON_LDFLAGS "-L${LIBDIR}/ffi/lib -L${LIBDIR}/sqlite/lib -L${LIBDIR}/bzip2/lib -L${LIBDIR}/lzma/lib -L${LIBDIR}/zlib/lib")
|
||||
set(PYTHON_CONFIGURE_EXTRA_ENV
|
||||
export CFLAGS=${PYTHON_CFLAGS} &&
|
||||
export CPPFLAGS=${PYTHON_CFLAGS} &&
|
||||
|
@@ -16,27 +16,14 @@
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
if(WIN32 AND BUILD_MODE STREQUAL Debug)
|
||||
set(SITE_PACKAGES_EXTRA --global-option build --global-option --debug)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_python_site_packages
|
||||
DOWNLOAD_COMMAND ""
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
PREFIX ${BUILD_DIR}/site_packages
|
||||
INSTALL_COMMAND ${PYTHON_BINARY} -m pip install ${SITE_PACKAGES_EXTRA} cython==${CYTHON_VERSION} idna==${IDNA_VERSION} chardet==${CHARDET_VERSION} urllib3==${URLLIB3_VERSION} certifi==${CERTIFI_VERSION} requests==${REQUESTS_VERSION} --no-binary :all:
|
||||
INSTALL_COMMAND ${PYTHON_BINARY} -m pip install idna==${IDNA_VERSION} chardet==${CHARDET_VERSION} urllib3==${URLLIB3_VERSION} certifi==${CERTIFI_VERSION} requests==${REQUESTS_VERSION} --no-binary :all:
|
||||
)
|
||||
|
||||
if(USE_PIP_NUMPY)
|
||||
# Use only wheel (and not build from source) to stop NumPy from linking against buggy
|
||||
# Accelerate framework backend on macOS. Official wheels are built with OpenBLAS.
|
||||
ExternalProject_Add_Step(external_python_site_packages after_install
|
||||
COMMAND ${PYTHON_BINARY} -m pip install --no-cache-dir numpy==${NUMPY_VERSION} --only-binary :all:
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
|
||||
add_dependencies(
|
||||
external_python_site_packages
|
||||
external_python
|
||||
|
@@ -42,21 +42,8 @@ if(UNIX)
|
||||
-DSQLITE_MAX_VARIABLE_NUMBER=250000 \
|
||||
-fPIC")
|
||||
set(SQLITE_CONFIGURE_ENV ${SQLITE_CONFIGURE_ENV} && export LDFLAGS=${SQLITE_LDFLAGS} && export CFLAGS=${SQLITE_CFLAGS})
|
||||
set(SQLITE_CONFIGURATION_ARGS
|
||||
${SQLITE_CONFIGURATION_ARGS}
|
||||
--enable-threadsafe
|
||||
--enable-load-extension
|
||||
--enable-json1
|
||||
--enable-fts4
|
||||
--enable-fts5
|
||||
# While building `tcl` is harmless, it causes problems when the install step
|
||||
# tries to copy the files into the system path.
|
||||
# Since this isn't required by Python or Blender this can be disabled.
|
||||
# Note that Debian (for example), splits this off into a separate package,
|
||||
# so it's safe to turn off.
|
||||
--disable-tcl
|
||||
--enable-shared=no
|
||||
)
|
||||
set(SQLITE_CONFIGURATION_ARGS ${SQLITE_CONFIGURATION_ARGS} --enable-threadsafe --enable-load-extension --enable-json1 --enable-fts4 --enable-fts5 --disable-tcl
|
||||
--enable-shared=no)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_sqlite
|
||||
|
@@ -20,7 +20,7 @@ set(SSL_CONFIGURE_COMMAND ./Configure)
|
||||
set(SSL_PATCH_CMD echo .)
|
||||
|
||||
if(APPLE)
|
||||
set(SSL_OS_COMPILER "blender-darwin-${CMAKE_OSX_ARCHITECTURES}")
|
||||
set(SSL_OS_COMPILER "blender-darwin-${CMAKE_OSX_ARCHITECTURES}")
|
||||
else()
|
||||
if("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
|
||||
set(SSL_EXTRA_ARGS enable-ec_nistp_64_gcc_128)
|
||||
|
@@ -42,8 +42,7 @@ ExternalProject_Add(external_tbb
|
||||
URL_HASH MD5=${TBB_HASH}
|
||||
PREFIX ${BUILD_DIR}/tbb
|
||||
PATCH_COMMAND COMMAND ${CMAKE_COMMAND} -E copy ${PATCH_DIR}/cmakelists_tbb.txt ${BUILD_DIR}/tbb/src/external_tbb/CMakeLists.txt &&
|
||||
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/tbb/src/external_tbb/build/vs2013/version_string.ver ${BUILD_DIR}/tbb/src/external_tbb/src/tbb/version_string.ver &&
|
||||
${PATCH_CMD} -p 1 -d ${BUILD_DIR}/tbb/src/external_tbb < ${PATCH_DIR}/tbb.diff
|
||||
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/tbb/src/external_tbb/build/vs2013/version_string.ver ${BUILD_DIR}/tbb/src/external_tbb/src/tbb/version_string.ver
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/tbb ${DEFAULT_CMAKE_FLAGS} ${TBB_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/tbb
|
||||
)
|
||||
|
@@ -16,15 +16,16 @@
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
ExternalProject_Add(external_sse2neon
|
||||
GIT_REPOSITORY ${SSE2NEON_GIT}
|
||||
GIT_TAG ${SSE2NEON_GIT_HASH}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
PREFIX ${BUILD_DIR}/sse2neon
|
||||
CONFIGURE_COMMAND echo sse2neon - Nothing to configure
|
||||
BUILD_COMMAND echo sse2neon - nothing to build
|
||||
INSTALL_COMMAND mkdir -p ${LIBDIR}/sse2neon && cp ${BUILD_DIR}/sse2neon/src/external_sse2neon/sse2neon.h ${LIBDIR}/sse2neon
|
||||
INSTALL_DIR ${LIBDIR}/sse2neon
|
||||
)
|
||||
endif()
|
||||
set(TINYXML_EXTRA_ARGS
|
||||
)
|
||||
|
||||
ExternalProject_Add(external_tinyxml
|
||||
URL ${TINYXML_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${TINYXML_HASH}
|
||||
PREFIX ${BUILD_DIR}/tinyxml
|
||||
# patch taken from ocio
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/tinyxml/src/external_tinyxml < ${PATCH_DIR}/tinyxml.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/tinyxml ${DEFAULT_CMAKE_FLAGS} ${TINYXML_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/tinyxml
|
||||
)
|
@@ -32,11 +32,11 @@ set(JPEG_VERSION 2.0.4)
|
||||
set(JPEG_URI https://github.com/libjpeg-turbo/libjpeg-turbo/archive/${JPEG_VERSION}.tar.gz)
|
||||
set(JPEG_HASH 44c43e4a9fb352f47090804529317c88)
|
||||
|
||||
set(BOOST_VERSION 1.73.0)
|
||||
set(BOOST_VERSION_NODOTS 1_73_0)
|
||||
set(BOOST_VERSION_NODOTS_SHORT 1_73)
|
||||
set(BOOST_VERSION 1.70.0)
|
||||
set(BOOST_VERSION_NODOTS 1_70_0)
|
||||
set(BOOST_VERSION_NODOTS_SHORT 1_70)
|
||||
set(BOOST_URI https://dl.bintray.com/boostorg/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz)
|
||||
set(BOOST_HASH 4036cd27ef7548b8d29c30ea10956196)
|
||||
set(BOOST_HASH fea771fe8176828fabf9c09242ee8c26)
|
||||
|
||||
# Using old version as recommended by OpenVDB build documentation.
|
||||
set(BLOSC_VERSION 1.5.0)
|
||||
@@ -47,9 +47,9 @@ set(PTHREADS_VERSION 3.0.0)
|
||||
set(PTHREADS_URI http://sourceforge.mirrorservice.org/p/pt/pthreads4w/pthreads4w-code-v${PTHREADS_VERSION}.zip)
|
||||
set(PTHREADS_HASH f3bf81bb395840b3446197bcf4ecd653)
|
||||
|
||||
set(OPENEXR_VERSION 2.5.5)
|
||||
set(OPENEXR_VERSION 2.4.0)
|
||||
set(OPENEXR_URI https://github.com/AcademySoftwareFoundation/openexr/archive/v${OPENEXR_VERSION}.tar.gz)
|
||||
set(OPENEXR_HASH 85e8a979092c9055d10ed103062d31a0)
|
||||
set(OPENEXR_HASH 9e4d69cf2a12c6fb19b98af7c5e0eaee)
|
||||
if(WIN32)
|
||||
# Openexr started appending _d on its own so now
|
||||
# we need to tell the build the postfix is _s while
|
||||
@@ -78,9 +78,9 @@ set(FREEGLUT_VERSION 3.0.0)
|
||||
set(FREEGLUT_URI http://pilotfiber.dl.sourceforge.net/project/freeglut/freeglut/${FREEGLUT_VERSION}/freeglut-${FREEGLUT_VERSION}.tar.gz)
|
||||
set(FREEGLUT_HASH 90c3ca4dd9d51cf32276bc5344ec9754)
|
||||
|
||||
set(ALEMBIC_VERSION 1.7.16)
|
||||
set(ALEMBIC_VERSION 1.7.12)
|
||||
set(ALEMBIC_URI https://github.com/alembic/alembic/archive/${ALEMBIC_VERSION}.tar.gz)
|
||||
set(ALEMBIC_MD5 effcc86e42fe6605588e3de57bde6677)
|
||||
set(ALEMBIC_MD5 e2b3777f23c5c09481a008cc6f0f8a40)
|
||||
|
||||
# hash is for 3.1.2
|
||||
set(GLFW_GIT_UID 30306e54705c3adae9fe082c816a3be71963485c)
|
||||
@@ -109,27 +109,22 @@ set(OPENCOLLADA_VERSION v1.6.68)
|
||||
set(OPENCOLLADA_URI https://github.com/KhronosGroup/OpenCOLLADA/archive/${OPENCOLLADA_VERSION}.tar.gz)
|
||||
set(OPENCOLLADA_HASH ee7dae874019fea7be11613d07567493)
|
||||
|
||||
set(OPENCOLORIO_VERSION 2.0.0)
|
||||
set(OPENCOLORIO_VERSION 1.1.1)
|
||||
set(OPENCOLORIO_URI https://github.com/AcademySoftwareFoundation/OpenColorIO/archive/v${OPENCOLORIO_VERSION}.tar.gz)
|
||||
set(OPENCOLORIO_HASH 1a2e3478b6cd9a1549f24e1b2205e3f0)
|
||||
set(OPENCOLORIO_HASH 23d8b9ac81599305539a5a8674b94a3d)
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
# Newer version required by ISPC with arm support.
|
||||
set(LLVM_VERSION 11.0.1)
|
||||
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-project-${LLVM_VERSION}.src.tar.xz)
|
||||
set(LLVM_HASH e700af40ab83463e4e9ab0ba3708312e)
|
||||
set(LLVM_VERSION 9.0.1)
|
||||
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-${LLVM_VERSION}.src.tar.xz)
|
||||
set(LLVM_HASH 31eb9ce73dd2a0f8dcab8319fb03f8fc)
|
||||
|
||||
set(OPENMP_VERSION 9.0.1)
|
||||
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${OPENMP_VERSION}/openmp-${OPENMP_VERSION}.src.tar.xz)
|
||||
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
|
||||
else()
|
||||
set(LLVM_VERSION 9.0.1)
|
||||
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-project-${LLVM_VERSION}.tar.xz)
|
||||
set(LLVM_HASH b4268e733dfe352960140dc07ef2efcb)
|
||||
set(CLANG_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-${LLVM_VERSION}.src.tar.xz)
|
||||
set(CLANG_HASH 13468e4a44940efef1b75e8641752f90)
|
||||
|
||||
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/openmp-${LLVM_VERSION}.src.tar.xz)
|
||||
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
|
||||
endif()
|
||||
set(CLANG_TOOLS_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-tools-extra-${LLVM_VERSION}.src.tar.xz)
|
||||
set(CLANG_TOOLS_HASH c76293870b564c6a7968622b475b7646)
|
||||
|
||||
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/openmp-${LLVM_VERSION}.src.tar.xz)
|
||||
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
|
||||
|
||||
set(OPENIMAGEIO_VERSION 2.1.15.0)
|
||||
set(OPENIMAGEIO_URI https://github.com/OpenImageIO/oiio/archive/Release-${OPENIMAGEIO_VERSION}.tar.gz)
|
||||
@@ -139,39 +134,38 @@ set(TIFF_VERSION 4.1.0)
|
||||
set(TIFF_URI http://download.osgeo.org/libtiff/tiff-${TIFF_VERSION}.tar.gz)
|
||||
set(TIFF_HASH 2165e7aba557463acc0664e71a3ed424)
|
||||
|
||||
set(OSL_VERSION 1.11.10.0)
|
||||
set(OSL_VERSION 1.10.10)
|
||||
set(OSL_URI https://github.com/imageworks/OpenShadingLanguage/archive/Release-${OSL_VERSION}.tar.gz)
|
||||
set(OSL_HASH dfdc23597aeef083832cbada62211756)
|
||||
set(OSL_HASH 00dec08a93c8084e53848b9ad047889f)
|
||||
|
||||
set(PYTHON_VERSION 3.9.2)
|
||||
set(PYTHON_SHORT_VERSION 3.9)
|
||||
set(PYTHON_SHORT_VERSION_NO_DOTS 39)
|
||||
set(PYTHON_VERSION 3.7.7)
|
||||
set(PYTHON_SHORT_VERSION 3.7)
|
||||
set(PYTHON_SHORT_VERSION_NO_DOTS 37)
|
||||
set(PYTHON_URI https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tar.xz)
|
||||
set(PYTHON_HASH f0dc9000312abeb16de4eccce9a870ab)
|
||||
set(PYTHON_HASH 172c650156f7bea68ce31b2fd01fa766)
|
||||
|
||||
set(TBB_VERSION 2020_U2)
|
||||
set(TBB_VERSION 2019_U9)
|
||||
set(TBB_URI https://github.com/oneapi-src/oneTBB/archive/${TBB_VERSION}.tar.gz)
|
||||
set(TBB_HASH 1b711ae956524855088df3bbf5ec65dc)
|
||||
set(TBB_HASH 26263622e9187212ec240dcf01b66207)
|
||||
|
||||
set(OPENVDB_VERSION 8.0.1)
|
||||
set(OPENVDB_VERSION 7.0.0)
|
||||
set(OPENVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/v${OPENVDB_VERSION}.tar.gz)
|
||||
set(OPENVDB_HASH 01b490be16cc0e15c690f9a153c21461)
|
||||
set(OPENVDB_HASH fd6c4f168282f7e0e494d290cd531fa8)
|
||||
|
||||
set(NANOVDB_GIT_UID e62f7a0bf1e27397223c61ddeaaf57edf111b77f)
|
||||
set(NANOVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_GIT_UID}.tar.gz)
|
||||
set(NANOVDB_HASH 90919510bc6ccd630fedc56f748cb199)
|
||||
|
||||
set(IDNA_VERSION 2.10)
|
||||
set(CHARDET_VERSION 4.0.0)
|
||||
set(URLLIB3_VERSION 1.26.3)
|
||||
set(CERTIFI_VERSION 2020.12.5)
|
||||
set(REQUESTS_VERSION 2.25.1)
|
||||
set(CYTHON_VERSION 0.29.21)
|
||||
set(IDNA_VERSION 2.9)
|
||||
set(CHARDET_VERSION 3.0.4)
|
||||
set(URLLIB3_VERSION 1.25.9)
|
||||
set(CERTIFI_VERSION 2020.4.5.2)
|
||||
set(REQUESTS_VERSION 2.23.0)
|
||||
|
||||
set(NUMPY_VERSION 1.19.5)
|
||||
set(NUMPY_SHORT_VERSION 1.19)
|
||||
set(NUMPY_VERSION 1.17.5)
|
||||
set(NUMPY_SHORT_VERSION 1.17)
|
||||
set(NUMPY_URI https://github.com/numpy/numpy/releases/download/v${NUMPY_VERSION}/numpy-${NUMPY_VERSION}.zip)
|
||||
set(NUMPY_HASH f6a1b48717c552bbc18f1adc3cc1fe0e)
|
||||
set(NUMPY_HASH 763a5646fa6eef7a22f4895bca0524f2)
|
||||
|
||||
set(LAME_VERSION 3.100)
|
||||
set(LAME_URI http://downloads.sourceforge.net/project/lame/lame/3.100/lame-${LAME_VERSION}.tar.gz)
|
||||
@@ -262,16 +256,16 @@ set(YAMLCPP_VERSION 0.6.3)
|
||||
set(YAMLCPP_URI https://codeload.github.com/jbeder/yaml-cpp/tar.gz/yaml-cpp-${YAMLCPP_VERSION})
|
||||
set(YAMLCPP_HASH b45bf1089a382e81f6b661062c10d0c2)
|
||||
|
||||
set(EXPAT_VERSION 2_2_10)
|
||||
set(EXPAT_URI https://github.com/libexpat/libexpat/archive/R_${EXPAT_VERSION}.tar.gz)
|
||||
set(EXPAT_HASH 7ca5f09959fcb9a57618368deb627b9f)
|
||||
set(LCMS_VERSION 2.9)
|
||||
set(LCMS_URI https://nchc.dl.sourceforge.net/project/lcms/lcms/${LCMS_VERSION}/lcms2-${LCMS_VERSION}.tar.gz)
|
||||
set(LCMS_HASH 8de1b7724f578d2995c8fdfa35c3ad0e)
|
||||
|
||||
set(PUGIXML_VERSION 1.10)
|
||||
set(PUGIXML_URI https://github.com/zeux/pugixml/archive/v${PUGIXML_VERSION}.tar.gz)
|
||||
set(PUGIXML_HASH 0c208b0664c7fb822bf1b49ad035e8fd)
|
||||
|
||||
set(FLEXBISON_VERSION 2.5.5)
|
||||
set(FLEXBISON_URI http://prdownloads.sourceforge.net/winflexbison/win_flex_bison-2.5.5.zip)
|
||||
set(FLEXBISON_URI http://prdownloads.sourceforge.net/winflexbison//win_flex_bison-2.5.5.zip)
|
||||
set(FLEXBISON_HASH d87a3938194520d904013abef3df10ce)
|
||||
|
||||
# Libraries to keep Python modules static on Linux.
|
||||
@@ -301,41 +295,34 @@ set(SQLITE_HASH fb558c49ee21a837713c4f1e7e413309aabdd9c7)
|
||||
set(EMBREE_VERSION 3.10.0)
|
||||
set(EMBREE_URI https://github.com/embree/embree/archive/v${EMBREE_VERSION}.zip)
|
||||
set(EMBREE_HASH 4bbe29e7eaa46417efc75fc5f1e8eb87)
|
||||
set(EMBREE_ARM_GIT https://github.com/brechtvl/embree.git)
|
||||
|
||||
set(USD_VERSION 21.02)
|
||||
set(USD_VERSION 20.05)
|
||||
set(USD_URI https://github.com/PixarAnimationStudios/USD/archive/v${USD_VERSION}.tar.gz)
|
||||
set(USD_HASH 1dd1e2092d085ed393c1f7c450a4155a)
|
||||
set(USD_HASH 6d679e739e7f65725d9c029e37dda9fc)
|
||||
|
||||
set(OIDN_VERSION 1.3.0)
|
||||
set(OIDN_VERSION 1.2.3)
|
||||
set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz)
|
||||
set(OIDN_HASH 301a5a0958d375a942014df0679b9270)
|
||||
set(OIDN_HASH 1f11466c2c3efc27faba5ec7078d12b2)
|
||||
|
||||
set(LIBGLU_VERSION 9.0.1)
|
||||
set(LIBGLU_URI ftp://ftp.freedesktop.org/pub/mesa/glu/glu-${LIBGLU_VERSION}.tar.xz)
|
||||
set(LIBGLU_HASH 151aef599b8259efe9acd599c96ea2a3)
|
||||
|
||||
set(MESA_VERSION 20.3.4)
|
||||
set(MESA_URI ftp://ftp.freedesktop.org/pub/mesa/mesa-${MESA_VERSION}.tar.xz)
|
||||
set(MESA_HASH 556338446aef8ae947a789b3e0b5e056)
|
||||
set(MESA_VERSION 18.3.1)
|
||||
set(MESA_URI ftp://ftp.freedesktop.org/pub/mesa//mesa-${MESA_VERSION}.tar.xz)
|
||||
set(MESA_HASH d60828056d77bfdbae0970f9b15fb1be)
|
||||
|
||||
set(NASM_VERSION 2.15.02)
|
||||
set(NASM_URI https://github.com/netwide-assembler/nasm/archive/nasm-${NASM_VERSION}.tar.gz)
|
||||
set(NASM_HASH aded8b796c996a486a56e0515c83e414116decc3b184d88043480b32eb0a8589)
|
||||
set(NASM_URI https://www.nasm.us/pub/nasm/releasebuilds/${NASM_VERSION}/nasm-${NASM_VERSION}.tar.xz)
|
||||
set(NASM_HASH f4fd1329b1713e1ccd34b2fc121c4bcd278c9f91cc4cb205ae8fcd2e4728dd14)
|
||||
|
||||
set(XR_OPENXR_SDK_VERSION 1.0.14)
|
||||
set(XR_OPENXR_SDK_VERSION 1.0.8)
|
||||
set(XR_OPENXR_SDK_URI https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_SDK_VERSION}.tar.gz)
|
||||
set(XR_OPENXR_SDK_HASH 0df6b2fd6045423451a77ff6bc3e1a75)
|
||||
set(XR_OPENXR_SDK_HASH c6de63d2e0f9029aa58dfa97cad8ce07)
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
# Unreleased version with macOS arm support.
|
||||
set(ISPC_URI https://github.com/ispc/ispc/archive/f5949c055eb9eeb93696978a3da4bfb3a6a30b35.zip)
|
||||
set(ISPC_HASH d382fea18d01dbd0cd05d9e1ede36d7d)
|
||||
else()
|
||||
set(ISPC_VERSION v1.14.1)
|
||||
set(ISPC_URI https://github.com/ispc/ispc/archive/${ISPC_VERSION}.tar.gz)
|
||||
set(ISPC_HASH 968fbc8dfd16a60ba4e32d2e0e03ea7a)
|
||||
endif()
|
||||
set(ISPC_VERSION v1.14.1)
|
||||
set(ISPC_URI https://github.com/ispc/ispc/archive/${ISPC_VERSION}.tar.gz)
|
||||
set(ISPC_HASH 968fbc8dfd16a60ba4e32d2e0e03ea7a)
|
||||
|
||||
set(GMP_VERSION 6.2.0)
|
||||
set(GMP_URI https://gmplib.org/download/gmp/gmp-${GMP_VERSION}.tar.xz)
|
||||
@@ -348,6 +335,3 @@ set(POTRACE_HASH 5f0bd87ddd9a620b0c4e65652ef93d69)
|
||||
set(HARU_VERSION 2_3_0)
|
||||
set(HARU_URI https://github.com/libharu/libharu/archive/RELEASE_${HARU_VERSION}.tar.gz)
|
||||
set(HARU_HASH 4f916aa49c3069b3a10850013c507460)
|
||||
|
||||
set(SSE2NEON_GIT https://github.com/DLTcollab/sse2neon.git)
|
||||
set(SSE2NEON_GIT_HASH fe5ff00bb8d19b327714a3c290f3e2ce81ba3525)
|
||||
|
@@ -53,7 +53,7 @@ if(WIN32)
|
||||
endif()
|
||||
if(BUILD_MODE STREQUAL Debug)
|
||||
ExternalProject_Add_Step(external_xr_openxr_sdk after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/xr_openxr_sdk/lib/openxr_loaderd.lib ${HARVEST_TARGET}/xr_openxr_sdk/lib/openxr_loaderd.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/xr_openxr_sdk/lib/openxr_loader.lib ${HARVEST_TARGET}/xr_openxr_sdk/lib/openxr_loader_d.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
|
@@ -17,18 +17,13 @@
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(YAMLCPP_EXTRA_ARGS
|
||||
-DBUILD_GMOCK=OFF
|
||||
-DYAML_CPP_BUILD_TESTS=OFF
|
||||
-DYAML_CPP_BUILD_TOOLS=OFF
|
||||
-DYAML_CPP_BUILD_CONTRIB=OFF
|
||||
-DYAML_MSVC_SHARED_RT=ON
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
set(YAMLCPP_EXTRA_ARGS
|
||||
${YAMLCPP_EXTRA_ARGS}
|
||||
-DBUILD_GMOCK=OFF
|
||||
-DYAML_MSVC_SHARED_RT=ON)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_yamlcpp
|
||||
URL ${YAMLCPP_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -20,7 +20,6 @@ if(APPLE)
|
||||
endif()
|
||||
|
||||
file(GLOB tbb_src "${CMAKE_CURRENT_SOURCE_DIR}/src/tbb/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/src/old/*.cpp")
|
||||
list(REMOVE_ITEM tbb_src ${CMAKE_CURRENT_SOURCE_DIR}/src/tbb/tbb_bind.cpp)
|
||||
list(APPEND tbb_src ${CMAKE_CURRENT_SOURCE_DIR}/src/rml/client/rml_tbb.cpp)
|
||||
file(GLOB to_remove "${CMAKE_CURRENT_SOURCE_DIR}/src/old/test*.cpp")
|
||||
list(REMOVE_ITEM tbb_src ${to_remove})
|
||||
|
@@ -1,5 +1,5 @@
|
||||
--- a/llvm/lib/Support/Unix/Path.inc 2020-02-17 09:24:26.000000000 +0100
|
||||
+++ b/llvm/lib/Support/Unix/Path.inc 2020-02-17 09:26:25.000000000 +0100
|
||||
--- a/lib/Support/Unix/Path.inc 2020-02-17 09:24:26.000000000 +0100
|
||||
+++ b/lib/Support/Unix/Path.inc 2020-02-17 09:26:25.000000000 +0100
|
||||
@@ -1200,7 +1200,9 @@
|
||||
/// implementation.
|
||||
std::error_code copy_file(const Twine &From, const Twine &To) {
|
||||
|
@@ -1,40 +0,0 @@
|
||||
diff -Naur oidn-1.3.0/cmake/FindTBB.cmake external_openimagedenoise/cmake/FindTBB.cmake
|
||||
--- oidn-1.3.0/cmake/FindTBB.cmake 2021-02-04 16:20:26 -0700
|
||||
+++ external_openimagedenoise/cmake/FindTBB.cmake 2021-02-12 09:35:53 -0700
|
||||
@@ -332,20 +332,22 @@
|
||||
${TBB_ROOT}/lib/${TBB_ARCH}/${TBB_VCVER}
|
||||
${TBB_ROOT}/lib
|
||||
)
|
||||
-
|
||||
# On Windows, also search the DLL so that the client may install it.
|
||||
file(GLOB DLL_NAMES
|
||||
${TBB_ROOT}/bin/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/bin/${LIB_NAME}.dll
|
||||
+ ${TBB_ROOT}/lib/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB1}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB2}.dll
|
||||
${TBB_ROOT}/../redist/${TBB_ARCH}/tbb/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/../redist/${TBB_ARCH}_win/tbb/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
)
|
||||
- list(GET DLL_NAMES 0 DLL_NAME)
|
||||
- get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
|
||||
- set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
|
||||
+ if (DLL_NAMES)
|
||||
+ list(GET DLL_NAMES 0 DLL_NAME)
|
||||
+ get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
|
||||
+ set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
|
||||
+ endif()
|
||||
elseif(APPLE)
|
||||
set(LIB_PATHS ${TBB_ROOT}/lib)
|
||||
else()
|
||||
--- external_openimagedenoise/cmake/oidn_ispc.cmake 2021-02-15 17:29:34.000000000 +0100
|
||||
+++ external_openimagedenoise/cmake/oidn_ispc.cmake2 2021-02-15 17:29:28.000000000 +0100
|
||||
@@ -98,7 +98,7 @@
|
||||
elseif(OIDN_ARCH STREQUAL "ARM64")
|
||||
set(ISPC_ARCHITECTURE "aarch64")
|
||||
if(APPLE)
|
||||
- set(ISPC_TARGET_OS "--target-os=ios")
|
||||
+ set(ISPC_TARGET_OS "--target-os=macos")
|
||||
endif()
|
||||
endif()
|
@@ -1,16 +1,33 @@
|
||||
diff --git a/share/cmake/modules/Findpystring.cmake b/share/cmake/modules/Findpystring.cmake
|
||||
index 7b894a45..92618215 100644
|
||||
--- a/share/cmake/modules/Findpystring.cmake
|
||||
+++ b/share/cmake/modules/Findpystring.cmake
|
||||
@@ -113,6 +113,11 @@ if(NOT pystring_FOUND)
|
||||
-DCMAKE_INSTALL_MESSAGE=${CMAKE_INSTALL_MESSAGE}
|
||||
-DCMAKE_INSTALL_PREFIX=${_EXT_DIST_ROOT}
|
||||
-DCMAKE_OBJECT_PATH_MAX=${CMAKE_OBJECT_PATH_MAX}
|
||||
+ -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES}
|
||||
+ -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET}
|
||||
+ -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT}
|
||||
+ -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG}
|
||||
+ -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE}
|
||||
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
|
||||
index 1eb691b..cff9bd8 100644
|
||||
--- a/src/core/CMakeLists.txt
|
||||
+++ b/src/core/CMakeLists.txt
|
||||
@@ -23,8 +23,6 @@ if(WIN32)
|
||||
if("${CMAKE_BUILD_TYPE}" STREQUAL "Release")
|
||||
set(EXTERNAL_COMPILE_FLAGS "${EXTERNAL_COMPILE_FLAGS} /WX")
|
||||
endif()
|
||||
-else()
|
||||
- set(EXTERNAL_COMPILE_FLAGS "${EXTERNAL_COMPILE_FLAGS} -Werror")
|
||||
endif()
|
||||
|
||||
# SHARED
|
||||
--- a/CMakeLists.txt 2018-09-10 22:15:29.000000000 +0200
|
||||
+++ b/CMakeLists.txt 2018-09-10 22:17:40.000000000 +0200
|
||||
@@ -229,7 +229,7 @@
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-whitespace ${TINYXML_PATCHFILE}
|
||||
BINARY_DIR ext/build/tinyxml
|
||||
INSTALL_DIR ext/dist
|
||||
- CMAKE_ARGS ${TINYXML_CMAKE_ARGS}
|
||||
+ CMAKE_ARGS ${TINYXML_CMAKE_ARGS} -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET} -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} -DCMAKE_C_FLAGS_DEBUG=${CMAKE_C_FLAGS_DEBUG} -DCMAKE_C_FLAGS_RELEASE=${CMAKE_C_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
|
||||
)
|
||||
if(CMAKE_TOOLCHAIN_FILE)
|
||||
set(pystring_CMAKE_ARGS
|
||||
if(WIN32)
|
||||
set(TINYXML_STATIC_LIBRARIES ${PROJECT_BINARY_DIR}/ext/dist/lib/tinyxml.lib)
|
||||
@@ -343,7 +343,7 @@
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-whitespace ${YAML_CPP_PATCHFILE}
|
||||
BINARY_DIR ext/build/yaml-cpp
|
||||
INSTALL_DIR ext/dist
|
||||
- CMAKE_ARGS ${YAML_CPP_CMAKE_ARGS}
|
||||
+ CMAKE_ARGS ${YAML_CPP_CMAKE_ARGS} -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET} -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} -DCMAKE_C_FLAGS_DEBUG=${CMAKE_C_FLAGS_DEBUG} -DCMAKE_C_FLAGS_RELEASE=${CMAKE_C_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
|
||||
)
|
||||
set(YAML_CPP_INCLUDE_DIRS ${PROJECT_BINARY_DIR}/ext/dist/include)
|
||||
set(YAML_CPP_LIBRARY_DIRS ${PROJECT_BINARY_DIR}/ext/dist/lib)
|
||||
|
51
build_files/build_environment/patches/opencolorio_win.diff
Normal file
51
build_files/build_environment/patches/opencolorio_win.diff
Normal file
@@ -0,0 +1,51 @@
|
||||
diff -Naur external_opencolorio/CMakeLists.txt external_opencolorio.patched/CMakeLists.txt
|
||||
--- external_opencolorio/CMakeLists.txt 2018-01-04 18:38:27 -0700
|
||||
+++ external_opencolorio.patched/CMakeLists.txt 2018-08-15 11:46:53 -0600
|
||||
@@ -251,25 +251,30 @@
|
||||
if(USE_EXTERNAL_YAML)
|
||||
# Set minimum yaml version for non-patched sources.
|
||||
set(YAML_VERSION_MIN "0.3.0")
|
||||
- include(FindPkgConfig)
|
||||
- pkg_check_modules(PC_YAML_CPP REQUIRED QUIET yaml-cpp)
|
||||
- find_path(YAML_CPP_INCLUDE_DIR yaml-cpp/yaml.h
|
||||
- HINTS ${PC_YAML_CPP_INCLUDEDIR} ${PC_YAML_CPP_INCLUDE_DIRS} )
|
||||
- find_library(YAML_CPP_LIBRARY LIBRARY_NAMES yaml-cpp libyaml-cpp
|
||||
- HINTS ${PC_YAML_CPP_LIBRARY_DIRS} )
|
||||
- set(YAML_CPP_LIBRARIES ${YAML_CPP_LIBRARY})
|
||||
- set(YAML_CPP_INCLUDE_DIRS ${YAML_CPP_INCLUDE_DIR})
|
||||
- set(YAML_CPP_VERSION ${PC_YAML_CPP_VERSION})
|
||||
+ if(NOT WIN32)
|
||||
+ include(FindPkgConfig)
|
||||
+ pkg_check_modules(PC_YAML_CPP REQUIRED QUIET yaml-cpp)
|
||||
+ find_path(YAML_CPP_INCLUDE_DIR yaml-cpp/yaml.h
|
||||
+ HINTS ${PC_YAML_CPP_INCLUDEDIR} ${PC_YAML_CPP_INCLUDE_DIRS} )
|
||||
+ find_library(YAML_CPP_LIBRARY LIBRARY_NAMES yaml-cpp libyaml-cpp
|
||||
+ HINTS ${PC_YAML_CPP_LIBRARY_DIRS} )
|
||||
+ set(YAML_CPP_LIBRARIES ${YAML_CPP_LIBRARY})
|
||||
+ set(YAML_CPP_INCLUDE_DIRS ${YAML_CPP_INCLUDE_DIR})
|
||||
+ set(YAML_CPP_VERSION ${PC_YAML_CPP_VERSION})
|
||||
|
||||
- if(YAML_CPP_VERSION VERSION_LESS ${YAML_VERSION_MIN})
|
||||
- message(FATAL_ERROR "ERROR: yaml-cpp ${YAML_VERSION_MIN} or greater is required.")
|
||||
- endif()
|
||||
-
|
||||
- find_package_handle_standard_args(yaml-cpp
|
||||
- REQUIRED_VARS YAML_CPP_LIBRARIES YAML_CPP_INCLUDE_DIRS )
|
||||
- set(YAML_CPP_FOUND ${YAML-CPP_FOUND})
|
||||
- mark_as_advanced(YAML_CPP_INCLUDE_DIR YAML_CPP_LIBRARY YAML-CPP_FOUND)
|
||||
+ if(YAML_CPP_VERSION VERSION_LESS ${YAML_VERSION_MIN})
|
||||
+ message(FATAL_ERROR "ERROR: yaml-cpp ${YAML_VERSION_MIN} or greater is required.")
|
||||
+ endif()
|
||||
|
||||
+ find_package_handle_standard_args(yaml-cpp
|
||||
+ REQUIRED_VARS YAML_CPP_LIBRARIES YAML_CPP_INCLUDE_DIRS )
|
||||
+ set(YAML_CPP_FOUND ${YAML-CPP_FOUND})
|
||||
+ mark_as_advanced(YAML_CPP_INCLUDE_DIR YAML_CPP_LIBRARY YAML-CPP_FOUND)
|
||||
+ else()
|
||||
+ set(EXTERNAL_INCLUDE_DIRS ${EXTERNAL_INCLUDE_DIRS} ${INC_1})
|
||||
+ set(EXTERNAL_INCLUDE_DIRS ${EXTERNAL_INCLUDE_DIRS} ${INC_2})
|
||||
+ message("INCLUDE DIRS = i:${EXTERNAL_INCLUDE_DIRS} |1:${INC_1} |2:${INC_2}")
|
||||
+ endif()
|
||||
if(YAML_CPP_FOUND)
|
||||
if(YAML_CPP_VERSION VERSION_GREATER "0.5.0")
|
||||
# Need to also get the boost headers here, as yaml-cpp 0.5.0+ requires them.
|
@@ -1,10 +1,10 @@
|
||||
diff -Naur openvdb-8.0.0/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
|
||||
--- openvdb-8.0.0/cmake/FindIlmBase.cmake 2020-12-24 10:13:14 -0700
|
||||
+++ openvdb/cmake/FindIlmBase.cmake 2021-02-05 12:07:49 -0700
|
||||
@@ -217,6 +217,12 @@
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib")
|
||||
endif()
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${_IlmBase_Version_Suffix}.lib")
|
||||
diff -Naur orig/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
|
||||
--- orig/cmake/FindIlmBase.cmake 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/cmake/FindIlmBase.cmake 2020-08-12 12:48:44 -0600
|
||||
@@ -225,6 +225,12 @@
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
"-${IlmBase_VERSION_MAJOR}_${IlmBase_VERSION_MINOR}.lib"
|
||||
)
|
||||
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
+ "_s.lib"
|
||||
+ )
|
||||
@@ -13,14 +13,14 @@ diff -Naur openvdb-8.0.0/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
|
||||
+ )
|
||||
else()
|
||||
if(ILMBASE_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
||||
diff -Naur openvdb-8.0.0/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
|
||||
--- openvdb-8.0.0/cmake/FindOpenEXR.cmake 2020-12-24 10:13:14 -0700
|
||||
+++ openvdb/cmake/FindOpenEXR.cmake 2021-02-05 12:23:39 -0700
|
||||
@@ -210,6 +210,12 @@
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib")
|
||||
endif()
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${_OpenEXR_Version_Suffix}.lib")
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
diff -Naur orig/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
|
||||
--- orig/cmake/FindOpenEXR.cmake 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/cmake/FindOpenEXR.cmake 2020-08-12 12:48:44 -0600
|
||||
@@ -218,6 +218,12 @@
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
"-${OpenEXR_VERSION_MAJOR}_${OpenEXR_VERSION_MINOR}.lib"
|
||||
)
|
||||
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
+ "_s.lib"
|
||||
+ )
|
||||
@@ -29,11 +29,11 @@ diff -Naur openvdb-8.0.0/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
|
||||
+ )
|
||||
else()
|
||||
if(OPENEXR_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
||||
diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/CMakeLists.txt
|
||||
--- openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt 2020-12-24 10:13:14 -0700
|
||||
+++ openvdb/openvdb/openvdb/CMakeLists.txt 2021-02-05 11:18:33 -0700
|
||||
@@ -107,7 +107,9 @@
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
diff -Naur orig/openvdb/CMakeLists.txt openvdb/openvdb/CMakeLists.txt
|
||||
--- orig/openvdb/CMakeLists.txt 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/openvdb/CMakeLists.txt 2020-08-12 14:12:26 -0600
|
||||
@@ -105,7 +105,9 @@
|
||||
# http://boost.2283326.n4.nabble.com/CMake-config-scripts-broken-in-1-70-td4708957.html
|
||||
# https://github.com/boostorg/boost_install/commit/160c7cb2b2c720e74463865ef0454d4c4cd9ae7c
|
||||
set(BUILD_SHARED_LIBS ON)
|
||||
@@ -44,15 +44,15 @@ diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/
|
||||
endif()
|
||||
|
||||
find_package(Boost ${MINIMUM_BOOST_VERSION} REQUIRED COMPONENTS iostreams system)
|
||||
@@ -146,6 +148,7 @@
|
||||
Boost::disable_autolinking # add -DBOOST_ALL_NO_LIB
|
||||
)
|
||||
@@ -193,6 +195,7 @@
|
||||
if(OPENVDB_DISABLE_BOOST_IMPLICIT_LINKING)
|
||||
add_definitions(-DBOOST_ALL_NO_LIB)
|
||||
endif()
|
||||
+ add_definitions(-D__TBB_NO_IMPLICIT_LINKAGE -DOPENVDB_OPENEXR_STATICLIB)
|
||||
endif()
|
||||
|
||||
if(USE_EXR)
|
||||
@@ -379,7 +382,12 @@
|
||||
# @todo Should be target definitions
|
||||
@@ -383,7 +386,12 @@
|
||||
# imported targets.
|
||||
|
||||
if(OPENVDB_CORE_SHARED)
|
||||
@@ -66,9 +66,9 @@ diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/
|
||||
endif()
|
||||
|
||||
if(OPENVDB_CORE_STATIC)
|
||||
diff -Naur openvdb-8.0.0/openvdb/openvdb/version.rc.in openvdb/openvdb/openvdb/version.rc.in
|
||||
--- openvdb-8.0.0/openvdb/openvdb/version.rc.in 1969-12-31 17:00:00 -0700
|
||||
+++ openvdb/openvdb/openvdb/version.rc.in 2021-02-05 11:18:33 -0700
|
||||
diff -Naur orig/openvdb/version.rc.in openvdb/openvdb/version.rc.in
|
||||
--- orig/openvdb/version.rc.in 1969-12-31 17:00:00 -0700
|
||||
+++ openvdb/openvdb/version.rc.in 2020-08-12 14:15:01 -0600
|
||||
@@ -0,0 +1,48 @@
|
||||
+#include <winver.h>
|
||||
+
|
||||
|
@@ -43,23 +43,27 @@ diff -Naur OpenShadingLanguage-Release-1.9.9/src/liboslexec/llvm_util.cpp extern
|
||||
-
|
||||
+void LLVM_Util::Cleanup ()
|
||||
+{
|
||||
+ if(jitmm_hold) jitmm_hold->clear();
|
||||
+ jitmm_hold.clear();
|
||||
+}
|
||||
|
||||
size_t
|
||||
LLVM_Util::total_jit_memory_held ()
|
||||
diff -Naur org/CMakeLists.txt external_osl/CMakeLists.txt
|
||||
--- org/CMakeLists.txt 2020-12-01 12:37:15 -0700
|
||||
+++ external_osl/CMakeLists.txt 2021-01-20 13:26:50 -0700
|
||||
@@ -84,6 +84,11 @@
|
||||
CACHE STRING "Directory where OptiX PTX files will be installed")
|
||||
set (CMAKE_DEBUG_POSTFIX "" CACHE STRING "Library naming postfix for Debug builds (e.g., '_debug')")
|
||||
|
||||
diff -Naur OpenShadingLanguage-Release-1.9.9/CMakeLists.txt external_osl/CMakeLists.txt
|
||||
--- orig/CMakeLists.txt 2020-01-27 16:22:31 -0700
|
||||
+++ external_osl/CMakeLists.txt 2020-05-13 18:04:52 -0600
|
||||
@@ -102,10 +102,11 @@
|
||||
set (OPTIX_EXTRA_LIBS CACHE STRING "Extra lib targets needed for OptiX")
|
||||
set (CUDA_TARGET_ARCH "sm_35" CACHE STRING "CUDA GPU architecture (e.g. sm_35)")
|
||||
|
||||
-# set (USE_OIIO_STATIC ON CACHE BOOL "If OIIO is built static")
|
||||
-# if (USE_OIIO_STATIC)
|
||||
-# add_definitions ("-DOIIO_STATIC_BUILD=1")
|
||||
-# endif ()
|
||||
+set (USE_OIIO_STATIC ON CACHE BOOL "If OIIO is built static")
|
||||
+if (USE_OIIO_STATIC)
|
||||
+ add_definitions ("-DOIIO_STATIC_BUILD=1")
|
||||
+ add_definitions ("-DOIIO_STATIC_DEFINE=1")
|
||||
+endif ()
|
||||
|
||||
|
||||
set (OSL_NO_DEFAULT_TEXTURESYSTEM OFF CACHE BOOL "Do not use create a raw OIIO::TextureSystem")
|
||||
if (OSL_NO_DEFAULT_TEXTURESYSTEM)
|
||||
if (OSL_NO_DEFAULT_TEXTURESYSTEM)
|
@@ -2,23 +2,23 @@ diff --git a/setup.py.orig b/setup.py
|
||||
index a97a755..07ce853 100644
|
||||
--- a/setup.py.orig
|
||||
+++ b/setup.py
|
||||
@@ -1603,13 +1603,13 @@
|
||||
@@ -1422,13 +1422,13 @@ class PyBuildExt(build_ext):
|
||||
version = line.split()[2]
|
||||
break
|
||||
if version >= version_req:
|
||||
- if (self.compiler.find_library_file(self.lib_dirs, 'z')):
|
||||
+ if (self.compiler.find_library_file(self.lib_dirs, 'z_pic')):
|
||||
if MACOS:
|
||||
- if (self.compiler.find_library_file(lib_dirs, 'z')):
|
||||
+ if (self.compiler.find_library_file(lib_dirs, 'z_pic')):
|
||||
if host_platform == "darwin":
|
||||
zlib_extra_link_args = ('-Wl,-search_paths_first',)
|
||||
else:
|
||||
zlib_extra_link_args = ()
|
||||
self.add(Extension('zlib', ['zlibmodule.c'],
|
||||
- libraries=['z'],
|
||||
+ libraries=['z_pic'],
|
||||
extra_link_args=zlib_extra_link_args))
|
||||
exts.append( Extension('zlib', ['zlibmodule.c'],
|
||||
- libraries = ['z'],
|
||||
+ libraries = ['z_pic'],
|
||||
extra_link_args = zlib_extra_link_args))
|
||||
have_zlib = True
|
||||
else:
|
||||
@@ -1623,7 +1623,7 @@
|
||||
@@ -1442,7 +1442,7 @@ class PyBuildExt(build_ext):
|
||||
# crc32 if we have it. Otherwise binascii uses its own.
|
||||
if have_zlib:
|
||||
extra_compile_args = ['-DUSE_ZLIB_CRC32']
|
||||
@@ -27,12 +27,12 @@ index a97a755..07ce853 100644
|
||||
extra_link_args = zlib_extra_link_args
|
||||
else:
|
||||
extra_compile_args = []
|
||||
@@ -2168,7 +2168,7 @@
|
||||
ffi_inc = None
|
||||
@@ -1991,7 +1991,7 @@ class PyBuildExt(build_ext):
|
||||
print('Header file {} does not exist'.format(ffi_h))
|
||||
if ffi_lib is None and ffi_inc:
|
||||
ffi_lib = None
|
||||
if ffi_inc is not None:
|
||||
- for lib_name in ('ffi', 'ffi_pic'):
|
||||
+ for lib_name in ('ffi_pic', ):
|
||||
if (self.compiler.find_library_file(self.lib_dirs, lib_name)):
|
||||
if (self.compiler.find_library_file(lib_dirs, lib_name)):
|
||||
ffi_lib = lib_name
|
||||
break
|
289
build_files/build_environment/patches/python_macos.diff
Normal file
289
build_files/build_environment/patches/python_macos.diff
Normal file
@@ -0,0 +1,289 @@
|
||||
diff -ru a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst
|
||||
--- a/Doc/library/ctypes.rst 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Doc/library/ctypes.rst 2020-07-14 08:10:10.000000000 +0200
|
||||
@@ -1551,6 +1551,13 @@
|
||||
value usable as argument (integer, string, ctypes instance). This allows
|
||||
defining adapters that can adapt custom objects as function parameters.
|
||||
|
||||
+ .. attribute:: variadic
|
||||
+
|
||||
+ Assign a boolean to specify that the function takes a variable number of
|
||||
+ arguments. This does not matter on most platforms, but for Apple arm64
|
||||
+ platforms variadic functions have a different calling convention than
|
||||
+ normal functions.
|
||||
+
|
||||
.. attribute:: errcheck
|
||||
|
||||
Assign a Python function or another callable to this attribute. The
|
||||
diff -ru a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
|
||||
--- a/Modules/_ctypes/_ctypes.c 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Modules/_ctypes/_ctypes.c 2020-07-14 08:14:41.000000000 +0200
|
||||
@@ -3175,6 +3175,35 @@
|
||||
}
|
||||
|
||||
static int
|
||||
+PyCFuncPtr_set_variadic(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ignored))
|
||||
+{
|
||||
+ StgDictObject *dict = PyObject_stgdict((PyObject *)self);
|
||||
+ assert(dict);
|
||||
+ int r = PyObject_IsTrue(ob);
|
||||
+ if (r == 1) {
|
||||
+ dict->flags |= FUNCFLAG_VARIADIC;
|
||||
+ return 0;
|
||||
+ } else if (r == 0) {
|
||||
+ dict->flags &= ~FUNCFLAG_VARIADIC;
|
||||
+ return 0;
|
||||
+ } else {
|
||||
+ return -1;
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+static PyObject *
|
||||
+PyCFuncPtr_get_variadic(PyCFuncPtrObject *self, void *Py_UNUSED(ignored))
|
||||
+{
|
||||
+ StgDictObject *dict = PyObject_stgdict((PyObject *)self);
|
||||
+ assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */
|
||||
+ if (dict->flags & FUNCFLAG_VARIADIC)
|
||||
+ Py_RETURN_TRUE;
|
||||
+ else
|
||||
+ Py_RETURN_FALSE;
|
||||
+}
|
||||
+
|
||||
+
|
||||
+static int
|
||||
PyCFuncPtr_set_argtypes(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ignored))
|
||||
{
|
||||
PyObject *converters;
|
||||
@@ -5632,6 +5661,7 @@
|
||||
PyModule_AddObject(m, "FUNCFLAG_USE_ERRNO", PyLong_FromLong(FUNCFLAG_USE_ERRNO));
|
||||
PyModule_AddObject(m, "FUNCFLAG_USE_LASTERROR", PyLong_FromLong(FUNCFLAG_USE_LASTERROR));
|
||||
PyModule_AddObject(m, "FUNCFLAG_PYTHONAPI", PyLong_FromLong(FUNCFLAG_PYTHONAPI));
|
||||
+ PyModule_AddObject(m, "FUNCFLAG_VARIADIC", PyLong_FromLong(FUNCFLAG_VARIADIC));
|
||||
PyModule_AddStringConstant(m, "__version__", "1.1.0");
|
||||
|
||||
PyModule_AddObject(m, "_memmove_addr", PyLong_FromVoidPtr(memmove));
|
||||
diff -ru a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
|
||||
--- a/Modules/_ctypes/callproc.c 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Modules/_ctypes/callproc.c 2020-07-14 08:18:33.000000000 +0200
|
||||
@@ -767,7 +767,8 @@
|
||||
ffi_type **atypes,
|
||||
ffi_type *restype,
|
||||
void *resmem,
|
||||
- int argcount)
|
||||
+ int argcount,
|
||||
+ int argtypecount)
|
||||
{
|
||||
PyThreadState *_save = NULL; /* For Py_BLOCK_THREADS and Py_UNBLOCK_THREADS */
|
||||
PyObject *error_object = NULL;
|
||||
@@ -793,15 +794,38 @@
|
||||
if ((flags & FUNCFLAG_CDECL) == 0)
|
||||
cc = FFI_STDCALL;
|
||||
#endif
|
||||
- if (FFI_OK != ffi_prep_cif(&cif,
|
||||
- cc,
|
||||
- argcount,
|
||||
- restype,
|
||||
- atypes)) {
|
||||
- PyErr_SetString(PyExc_RuntimeError,
|
||||
- "ffi_prep_cif failed");
|
||||
- return -1;
|
||||
- }
|
||||
+#if HAVE_FFI_PREP_CIF_VAR
|
||||
+ /* Everyone SHOULD set f.variadic=True on variadic function pointers, but
|
||||
+ * lots of existing code will not. If there's at least one arg and more
|
||||
+ * args are passed than are defined in the prototype, then it must be a
|
||||
+ * variadic function. */
|
||||
+ if ((flags & FUNCFLAG_VARIADIC) ||
|
||||
+ (argtypecount != 0 && argcount > argtypecount))
|
||||
+ {
|
||||
+ if (FFI_OK != ffi_prep_cif_var(&cif,
|
||||
+ cc,
|
||||
+ argtypecount,
|
||||
+ argcount,
|
||||
+ restype,
|
||||
+ atypes)) {
|
||||
+ PyErr_SetString(PyExc_RuntimeError,
|
||||
+ "ffi_prep_cif_var failed");
|
||||
+ return -1;
|
||||
+ }
|
||||
+ } else {
|
||||
+#endif
|
||||
+ if (FFI_OK != ffi_prep_cif(&cif,
|
||||
+ cc,
|
||||
+ argcount,
|
||||
+ restype,
|
||||
+ atypes)) {
|
||||
+ PyErr_SetString(PyExc_RuntimeError,
|
||||
+ "ffi_prep_cif failed");
|
||||
+ return -1;
|
||||
+ }
|
||||
+#if HAVE_FFI_PREP_CIF_VAR
|
||||
+ }
|
||||
+#endif
|
||||
|
||||
if (flags & (FUNCFLAG_USE_ERRNO | FUNCFLAG_USE_LASTERROR)) {
|
||||
error_object = _ctypes_get_errobj(&space);
|
||||
@@ -1185,9 +1209,8 @@
|
||||
|
||||
if (-1 == _call_function_pointer(flags, pProc, avalues, atypes,
|
||||
rtype, resbuf,
|
||||
- Py_SAFE_DOWNCAST(argcount,
|
||||
- Py_ssize_t,
|
||||
- int)))
|
||||
+ Py_SAFE_DOWNCAST(argcount, Py_ssize_t, int),
|
||||
+ Py_SAFE_DOWNCAST(argtype_count, Py_ssize_t, int)))
|
||||
goto cleanup;
|
||||
|
||||
#ifdef WORDS_BIGENDIAN
|
||||
diff -ru a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h
|
||||
--- a/Modules/_ctypes/ctypes.h 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Modules/_ctypes/ctypes.h 2020-07-14 08:30:53.000000000 +0200
|
||||
@@ -285,6 +285,7 @@
|
||||
#define FUNCFLAG_PYTHONAPI 0x4
|
||||
#define FUNCFLAG_USE_ERRNO 0x8
|
||||
#define FUNCFLAG_USE_LASTERROR 0x10
|
||||
+#define FUNCFLAG_VARIADIC 0x20
|
||||
|
||||
#define TYPEFLAG_ISPOINTER 0x100
|
||||
#define TYPEFLAG_HASPOINTER 0x200
|
||||
diff -ru a/configure b/configure
|
||||
--- a/configure 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/configure 2020-07-14 08:03:27.000000000 +0200
|
||||
@@ -3374,7 +3374,7 @@
|
||||
# has no effect, don't bother defining them
|
||||
Darwin/[6789].*)
|
||||
define_xopen_source=no;;
|
||||
- Darwin/1[0-9].*)
|
||||
+ Darwin/[12][0-9].*)
|
||||
define_xopen_source=no;;
|
||||
# On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but
|
||||
# used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined
|
||||
@@ -9251,6 +9251,9 @@
|
||||
ppc)
|
||||
MACOSX_DEFAULT_ARCH="ppc64"
|
||||
;;
|
||||
+ arm64)
|
||||
+ MACOSX_DEFAULT_ARCH="arm64"
|
||||
+ ;;
|
||||
*)
|
||||
as_fn_error $? "Unexpected output of 'arch' on OSX" "$LINENO" 5
|
||||
;;
|
||||
diff -ru a/configure.ac b/configure.ac
|
||||
--- a/configure.ac 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/configure.ac 2020-07-14 08:03:27.000000000 +0200
|
||||
@@ -2456,6 +2456,9 @@
|
||||
ppc)
|
||||
MACOSX_DEFAULT_ARCH="ppc64"
|
||||
;;
|
||||
+ arm64)
|
||||
+ MACOSX_DEFAULT_ARCH="arm64"
|
||||
+ ;;
|
||||
*)
|
||||
AC_MSG_ERROR([Unexpected output of 'arch' on OSX])
|
||||
;;
|
||||
diff -ru a/setup.py b/setup.py
|
||||
--- a/setup.py 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/setup.py 2020-07-14 08:28:12.000000000 +0200
|
||||
@@ -141,6 +141,13 @@
|
||||
os.unlink(tmpfile)
|
||||
|
||||
return MACOS_SDK_ROOT
|
||||
+
|
||||
+def is_macosx_at_least(vers):
|
||||
+ if host_platform == 'darwin':
|
||||
+ dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
|
||||
+ if dep_target:
|
||||
+ return tuple(map(int, dep_target.split('.'))) >= vers
|
||||
+ return False
|
||||
|
||||
def is_macosx_sdk_path(path):
|
||||
"""
|
||||
@@ -150,6 +157,13 @@
|
||||
or path.startswith('/System/')
|
||||
or path.startswith('/Library/') )
|
||||
|
||||
+def grep_headers_for(function, headers):
|
||||
+ for header in headers:
|
||||
+ with open(header, 'r') as f:
|
||||
+ if function in f.read():
|
||||
+ return True
|
||||
+ return False
|
||||
+
|
||||
def find_file(filename, std_dirs, paths):
|
||||
"""Searches for the directory where a given file is located,
|
||||
and returns a possibly-empty list of additional directories, or None
|
||||
@@ -1972,7 +1986,11 @@
|
||||
return True
|
||||
|
||||
def detect_ctypes(self, inc_dirs, lib_dirs):
|
||||
- self.use_system_libffi = False
|
||||
+ if not sysconfig.get_config_var("LIBFFI_INCLUDEDIR") and is_macosx_at_least((10,15)):
|
||||
+ self.use_system_libffi = True
|
||||
+ else:
|
||||
+ self.use_system_libffi = '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS")
|
||||
+
|
||||
include_dirs = []
|
||||
extra_compile_args = []
|
||||
extra_link_args = []
|
||||
@@ -2016,32 +2034,48 @@
|
||||
ext_test = Extension('_ctypes_test',
|
||||
sources=['_ctypes/_ctypes_test.c'],
|
||||
libraries=['m'])
|
||||
+ ffi_inc = sysconfig.get_config_var("LIBFFI_INCLUDEDIR")
|
||||
+ ffi_lib = None
|
||||
+
|
||||
self.extensions.extend([ext, ext_test])
|
||||
|
||||
if host_platform == 'darwin':
|
||||
- if '--with-system-ffi' not in sysconfig.get_config_var("CONFIG_ARGS"):
|
||||
+ if not self.use_system_libffi:
|
||||
return
|
||||
- # OS X 10.5 comes with libffi.dylib; the include files are
|
||||
- # in /usr/include/ffi
|
||||
- inc_dirs.append('/usr/include/ffi')
|
||||
-
|
||||
- ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
|
||||
- if not ffi_inc or ffi_inc[0] == '':
|
||||
- ffi_inc = find_file('ffi.h', [], inc_dirs)
|
||||
- if ffi_inc is not None:
|
||||
- ffi_h = ffi_inc[0] + '/ffi.h'
|
||||
+ ffi_in_sdk = os.path.join(macosx_sdk_root(), "usr/include/ffi")
|
||||
+ if os.path.exists(ffi_in_sdk):
|
||||
+ ffi_inc = ffi_in_sdk
|
||||
+ ffi_lib = 'ffi'
|
||||
+ else:
|
||||
+ # OS X 10.5 comes with libffi.dylib; the include files are
|
||||
+ # in /usr/include/ffi
|
||||
+ ffi_inc_dirs.append('/usr/include/ffi')
|
||||
+
|
||||
+ if not ffi_inc:
|
||||
+ found = find_file('ffi.h', [], ffi_inc_dirs)
|
||||
+ if found:
|
||||
+ ffi_inc = found[0]
|
||||
+ if ffi_inc:
|
||||
+ ffi_h = ffi_inc + '/ffi.h'
|
||||
if not os.path.exists(ffi_h):
|
||||
ffi_inc = None
|
||||
print('Header file {} does not exist'.format(ffi_h))
|
||||
- ffi_lib = None
|
||||
- if ffi_inc is not None:
|
||||
+ if ffi_lib is None and ffi_inc:
|
||||
for lib_name in ('ffi', 'ffi_pic'):
|
||||
if (self.compiler.find_library_file(lib_dirs, lib_name)):
|
||||
ffi_lib = lib_name
|
||||
break
|
||||
|
||||
if ffi_inc and ffi_lib:
|
||||
- ext.include_dirs.extend(ffi_inc)
|
||||
+ ffi_headers = glob(os.path.join(ffi_inc, '*.h'))
|
||||
+ if grep_headers_for('ffi_closure_alloc', ffi_headers):
|
||||
+ try:
|
||||
+ sources.remove('_ctypes/malloc_closure.c')
|
||||
+ except ValueError:
|
||||
+ pass
|
||||
+ if grep_headers_for('ffi_prep_cif_var', ffi_headers):
|
||||
+ ext.extra_compile_args.append("-DHAVE_FFI_PREP_CIF_VAR=1")
|
||||
+ ext.include_dirs.append(ffi_inc)
|
||||
ext.libraries.append(ffi_lib)
|
||||
self.use_system_libffi = True
|
||||
|
@@ -1,13 +0,0 @@
|
||||
diff --git a/include/tbb/tbb_config.h b/include/tbb/tbb_config.h
|
||||
index 7a8d06a0..886699d8 100644
|
||||
--- a/include/tbb/tbb_config.h
|
||||
+++ b/include/tbb/tbb_config.h
|
||||
@@ -620,7 +620,7 @@ There are four cases that are supported:
|
||||
// instantiation site, which is too late for suppression of the corresponding messages for internal
|
||||
// stuff.
|
||||
#if !defined(__INTEL_COMPILER) && (!defined(TBB_SUPPRESS_DEPRECATED_MESSAGES) || (TBB_SUPPRESS_DEPRECATED_MESSAGES == 0))
|
||||
- #if (__cplusplus >= 201402L)
|
||||
+ #if (__cplusplus >= 201402L && (!defined(_MSC_VER) || _MSC_VER >= 1920))
|
||||
#define __TBB_DEPRECATED [[deprecated]]
|
||||
#define __TBB_DEPRECATED_MSG(msg) [[deprecated(msg)]]
|
||||
#elif _MSC_VER
|
@@ -26,30 +26,36 @@ diff -Naur external_usd_base/cmake/macros/Public.cmake external_usd/cmake/macros
|
||||
endforeach()
|
||||
foreach(lib ${PXR_OBJECT_LIBS})
|
||||
set(objects "${objects};\$<TARGET_OBJECTS:${lib}>")
|
||||
diff -ru USD-20.11/pxr/base/tf/pxrLZ4/lz4.cpp external_usd/pxr/base/tf/pxrLZ4/lz4.cpp
|
||||
--- USD-20.11/pxr/base/tf/pxrLZ4/lz4.cpp 2020-10-14 19:25:19.000000000 +0100
|
||||
+++ external_usd/pxr/base/tf/pxrLZ4/lz4.cpp 2021-02-09 09:28:51.496190085 +0000
|
||||
@@ -614,6 +614,15 @@
|
||||
/*-************************************
|
||||
* Internal Definitions used in Tests
|
||||
**************************************/
|
||||
+
|
||||
+/*******************************************************************
|
||||
+ * Disabled in Blender. The BLOSC library also exposes these
|
||||
+ * functions, and this causes 'duplicate symbol' linker errors.
|
||||
+ *
|
||||
+ * This problem has been reported upstream at
|
||||
+ * https://github.com/PixarAnimationStudios/USD/issues/1447
|
||||
+ *
|
||||
+ *******************************************************************
|
||||
#if defined (__cplusplus)
|
||||
extern "C" {
|
||||
#endif
|
||||
@@ -627,6 +636,7 @@
|
||||
#if defined (__cplusplus)
|
||||
}
|
||||
#endif
|
||||
+********************************************************************/
|
||||
|
||||
diff --git a/pxr/base/arch/align.h b/pxr/base/arch/align.h
|
||||
index f3cabf4..ebc8a69 100644
|
||||
--- a/pxr/base/arch/align.h
|
||||
+++ b/pxr/base/arch/align.h
|
||||
@@ -77,7 +77,11 @@ ArchAlignMemory(void *base)
|
||||
/// The size of a CPU cache line on the current processor architecture in bytes.
|
||||
///
|
||||
/// \hideinitializer
|
||||
+#if defined(ARCH_OS_DARWIN) && defined(ARCH_CPU_ARM)
|
||||
+#define ARCH_CACHE_LINE_SIZE 128
|
||||
+#else
|
||||
#define ARCH_CACHE_LINE_SIZE 64
|
||||
+#endif
|
||||
|
||||
/*-******************************
|
||||
* Compression functions
|
||||
///@}
|
||||
|
||||
diff --git a/pxr/base/arch/math.h b/pxr/base/arch/math.h
|
||||
index 3e66c37..64a052c 100644
|
||||
--- a/pxr/base/arch/math.h
|
||||
+++ b/pxr/base/arch/math.h
|
||||
@@ -42,7 +42,7 @@ PXR_NAMESPACE_OPEN_SCOPE
|
||||
/// \addtogroup group_arch_Math
|
||||
///@{
|
||||
|
||||
-#if defined (ARCH_CPU_INTEL) || defined(doxygen)
|
||||
+#if defined (ARCH_CPU_INTEL) || defined(ARCH_CPU_ARM) || defined(doxygen)
|
||||
|
||||
/// This is the smallest value e such that 1+e^2 == 1, using floats.
|
||||
/// True for all IEEE754 chipsets.
|
||||
|
||||
|
||||
|
||||
|
@@ -117,7 +117,7 @@ set path=%BUILD_DIR%\downloads\mingw\mingw64\msys\1.0\bin\;%BUILD_DIR%\downloads
|
||||
mkdir %STAGING%\%BuildDir%%ARCH%R
|
||||
cd %Staging%\%BuildDir%%ARCH%R
|
||||
echo %DATE% %TIME% : Start > %StatusFile%
|
||||
cmake -G "%CMAKE_BUILDER%" -Thost=x64 %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DBUILD_MODE=Release -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/
|
||||
cmake -G "%CMAKE_BUILDER%" %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DBUILD_MODE=Release -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/
|
||||
echo %DATE% %TIME% : Release Configuration done >> %StatusFile%
|
||||
if "%dobuild%" == "1" (
|
||||
msbuild /m "ll.vcxproj" /p:Configuration=Release /fl /flp:logfile=BlenderDeps_llvm.log;Verbosity=normal
|
||||
@@ -130,7 +130,7 @@ if "%NODEBUG%" == "1" goto exit
|
||||
cd %BUILD_DIR%
|
||||
mkdir %STAGING%\%BuildDir%%ARCH%D
|
||||
cd %Staging%\%BuildDir%%ARCH%D
|
||||
cmake -G "%CMAKE_BUILDER%" -Thost=x64 %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DCMAKE_BUILD_TYPE=Debug -DBUILD_MODE=Debug -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/ %CMAKE_DEBUG_OPTIONS%
|
||||
cmake -G "%CMAKE_BUILDER%" %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DCMAKE_BUILD_TYPE=Debug -DBUILD_MODE=Debug -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/ %CMAKE_DEBUG_OPTIONS%
|
||||
echo %DATE% %TIME% : Debug Configuration done >> %StatusFile%
|
||||
if "%dobuild%" == "1" (
|
||||
msbuild /m "ll.vcxproj" /p:Configuration=Debug /fl /flp:logfile=BlenderDeps_llvm.log;;Verbosity=normal
|
||||
|
@@ -19,7 +19,7 @@
|
||||
#=============================================================================
|
||||
|
||||
IF(NOT AUDASPACE_ROOT_DIR AND NOT $ENV{AUDASPACE_ROOT_DIR} STREQUAL "")
|
||||
SET(AUDASPACE_ROOT_DIR $ENV{AUDASPACE_ROOT_DIR})
|
||||
SET(AUDASPACE_ROOT_DIR $ENV{AUDASPACE_ROOT_DIR})
|
||||
ENDIF()
|
||||
|
||||
SET(_audaspace_SEARCH_DIRS
|
||||
|
@@ -1,111 +0,0 @@
|
||||
# - Find Clang library
|
||||
# Find the native Clang includes and library
|
||||
# This module defines
|
||||
# CLANG_INCLUDE_DIRS, where to find AST/AST.h, Set when
|
||||
# CLANG_INCLUDE_DIR is found.
|
||||
# CLANG_LIBRARIES, libraries to link against to use Clang.
|
||||
# CLANG_ROOT_DIR, The base directory to search for Clang.
|
||||
# This can also be an environment variable.
|
||||
# CLANG_FOUND, If false, do not try to use Clang.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2021 Blender Foundation.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD 3-Clause License,
|
||||
# see accompanying file BSD-3-Clause-license.txt for details.
|
||||
#=============================================================================
|
||||
|
||||
# If CLANG_ROOT_DIR was defined in the environment, use it.
|
||||
if(NOT CLANG_ROOT_DIR AND NOT $ENV{CLANG_ROOT_DIR} STREQUAL "")
|
||||
set(CLANG_ROOT_DIR $ENV{CLANG_ROOT_DIR})
|
||||
endif()
|
||||
|
||||
set(_CLANG_SEARCH_DIRS
|
||||
${CLANG_ROOT_DIR}
|
||||
/opt/lib/clang
|
||||
)
|
||||
|
||||
find_path(CLANG_INCLUDE_DIR
|
||||
NAMES
|
||||
AST/AST.h
|
||||
HINTS
|
||||
${_CLANG_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
include
|
||||
include/clang
|
||||
)
|
||||
|
||||
|
||||
set(_CLANG_FIND_COMPONENTS
|
||||
clangDependencyScanning
|
||||
clangDynamicASTMatchers
|
||||
clangFrontendTool
|
||||
clangStaticAnalyzerFrontend
|
||||
clangHandleCXX
|
||||
clangStaticAnalyzerCheckers
|
||||
clangStaticAnalyzerCore
|
||||
clangToolingASTDiff
|
||||
clangToolingRefactoring
|
||||
clangToolingSyntax
|
||||
clangARCMigrate
|
||||
clangCodeGen
|
||||
clangCrossTU
|
||||
clangIndex
|
||||
clangTooling
|
||||
clangFormat
|
||||
clangToolingInclusions
|
||||
clangRewriteFrontend
|
||||
clangFrontend
|
||||
clangSerialization
|
||||
clangDriver
|
||||
clangToolingCore
|
||||
clangParse
|
||||
clangRewrite
|
||||
clangSema
|
||||
clangEdit
|
||||
clangAnalysis
|
||||
clangASTMatchers
|
||||
clangAST
|
||||
clangLex
|
||||
clangBasic
|
||||
)
|
||||
|
||||
set(_CLANG_LIBRARIES)
|
||||
foreach(COMPONENT ${_CLANG_FIND_COMPONENTS})
|
||||
string(TOUPPER ${COMPONENT} UPPERCOMPONENT)
|
||||
|
||||
find_library(CLANG_${UPPERCOMPONENT}_LIBRARY
|
||||
NAMES
|
||||
${COMPONENT}
|
||||
HINTS
|
||||
${_CLANG_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
lib64 lib
|
||||
)
|
||||
list(APPEND _CLANG_LIBRARIES "${CLANG_${UPPERCOMPONENT}_LIBRARY}")
|
||||
endforeach()
|
||||
|
||||
|
||||
# Handle the QUIETLY and REQUIRED arguments and set CLANG_FOUND to TRUE if
|
||||
# all listed variables are TRUE.
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Clang DEFAULT_MSG
|
||||
_CLANG_LIBRARIES CLANG_INCLUDE_DIR)
|
||||
|
||||
if(CLANG_FOUND)
|
||||
set(CLANG_LIBRARIES ${_CLANG_LIBRARIES})
|
||||
set(CLANG_INCLUDE_DIRS ${CLANG_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
mark_as_advanced(
|
||||
CLANG_INCLUDE_DIR
|
||||
)
|
||||
|
||||
foreach(COMPONENT ${_CLANG_FIND_COMPONENTS})
|
||||
string(TOUPPER ${COMPONENT} UPPERCOMPONENT)
|
||||
mark_as_advanced(CLANG_${UPPERCOMPONENT}_LIBRARY)
|
||||
endforeach()
|
||||
|
||||
unset(_CLANG_SEARCH_DIRS)
|
||||
unset(_CLANG_FIND_COMPONENTS)
|
||||
unset(_CLANG_LIBRARIES)
|
@@ -34,17 +34,12 @@ FIND_PATH(EMBREE_INCLUDE_DIR
|
||||
include
|
||||
)
|
||||
|
||||
IF(NOT (APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")))
|
||||
SET(_embree_SIMD_COMPONENTS
|
||||
embree_sse42
|
||||
embree_avx
|
||||
embree_avx2
|
||||
)
|
||||
ENDIF()
|
||||
|
||||
SET(_embree_FIND_COMPONENTS
|
||||
embree3
|
||||
${_embree_SIMD_COMPONENTS}
|
||||
embree_sse42
|
||||
embree_avx
|
||||
embree_avx2
|
||||
lexers
|
||||
math
|
||||
simd
|
||||
@@ -64,14 +59,14 @@ FOREACH(COMPONENT ${_embree_FIND_COMPONENTS})
|
||||
PATH_SUFFIXES
|
||||
lib64 lib
|
||||
)
|
||||
IF(NOT EMBREE_${UPPERCOMPONENT}_LIBRARY)
|
||||
IF(EMBREE_EMBREE3_LIBRARY)
|
||||
IF (NOT EMBREE_${UPPERCOMPONENT}_LIBRARY)
|
||||
IF (EMBREE_EMBREE3_LIBRARY)
|
||||
# If we can't find all the static libraries, try to fall back to the shared library if found.
|
||||
# This allows building with a shared embree library
|
||||
SET(_embree_LIBRARIES ${EMBREE_EMBREE3_LIBRARY})
|
||||
BREAK()
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
ENDIF ()
|
||||
ENDIF ()
|
||||
LIST(APPEND _embree_LIBRARIES "${EMBREE_${UPPERCOMPONENT}_LIBRARY}")
|
||||
ENDFOREACH()
|
||||
|
||||
|
@@ -35,13 +35,11 @@ find_path(HARU_INCLUDE_DIR
|
||||
${_haru_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
include/haru
|
||||
include
|
||||
)
|
||||
|
||||
find_library(HARU_LIBRARY
|
||||
NAMES
|
||||
hpdfs
|
||||
hpdf
|
||||
HINTS
|
||||
${_haru_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
|
@@ -26,8 +26,7 @@ ENDIF()
|
||||
SET(_opencolorio_FIND_COMPONENTS
|
||||
OpenColorIO
|
||||
yaml-cpp
|
||||
expat
|
||||
pystring
|
||||
tinyxml
|
||||
)
|
||||
|
||||
SET(_opencolorio_SEARCH_DIRS
|
||||
@@ -61,23 +60,12 @@ FOREACH(COMPONENT ${_opencolorio_FIND_COMPONENTS})
|
||||
ENDIF()
|
||||
ENDFOREACH()
|
||||
|
||||
IF(EXISTS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h")
|
||||
# Search twice, because this symbol changed between OCIO 1.x and 2.x
|
||||
FILE(STRINGS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h" _opencolorio_version
|
||||
REGEX "^#define OCIO_VERSION_STR[ \t].*$")
|
||||
IF(NOT _opencolorio_version)
|
||||
file(STRINGS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h" _opencolorio_version
|
||||
REGEX "^#define OCIO_VERSION[ \t].*$")
|
||||
ENDIF()
|
||||
STRING(REGEX MATCHALL "[0-9]+[.0-9]+" OPENCOLORIO_VERSION ${_opencolorio_version})
|
||||
ENDIF()
|
||||
|
||||
# handle the QUIETLY and REQUIRED arguments and set OPENCOLORIO_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
INCLUDE(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenColorIO
|
||||
REQUIRED_VARS _opencolorio_LIBRARIES OPENCOLORIO_INCLUDE_DIR
|
||||
VERSION_VAR OPENCOLORIO_VERSION)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenColorIO DEFAULT_MSG
|
||||
_opencolorio_LIBRARIES OPENCOLORIO_INCLUDE_DIR)
|
||||
|
||||
IF(OPENCOLORIO_FOUND)
|
||||
SET(OPENCOLORIO_LIBRARIES ${_opencolorio_LIBRARIES})
|
||||
@@ -90,7 +78,6 @@ MARK_AS_ADVANCED(
|
||||
OPENCOLORIO_OPENCOLORIO_LIBRARY
|
||||
OPENCOLORIO_TINYXML_LIBRARY
|
||||
OPENCOLORIO_YAML-CPP_LIBRARY
|
||||
OPENCOLORIO_VERSION
|
||||
)
|
||||
|
||||
UNSET(COMPONENT)
|
||||
|
@@ -34,7 +34,7 @@ IF(NOT PYTHON_ROOT_DIR AND NOT $ENV{PYTHON_ROOT_DIR} STREQUAL "")
|
||||
SET(PYTHON_ROOT_DIR $ENV{PYTHON_ROOT_DIR})
|
||||
ENDIF()
|
||||
|
||||
SET(PYTHON_VERSION 3.9 CACHE STRING "Python Version (major and minor only)")
|
||||
SET(PYTHON_VERSION 3.7 CACHE STRING "Python Version (major and minor only)")
|
||||
MARK_AS_ADVANCED(PYTHON_VERSION)
|
||||
|
||||
|
||||
@@ -73,8 +73,8 @@ SET(_python_SEARCH_DIRS
|
||||
# only search for the dirs if we haven't already
|
||||
IF((NOT _IS_INC_DEF) OR (NOT _IS_INC_CONF_DEF) OR (NOT _IS_LIB_DEF) OR (NOT _IS_LIB_PATH_DEF))
|
||||
SET(_PYTHON_ABI_FLAGS_TEST
|
||||
"u; " # release
|
||||
"du;d" # debug
|
||||
"m;mu;u; " # release
|
||||
"dm;dmu;du;d" # debug
|
||||
)
|
||||
|
||||
FOREACH(_CURRENT_ABI_FLAGS ${_PYTHON_ABI_FLAGS_TEST})
|
||||
|
@@ -1,49 +0,0 @@
|
||||
# - Find sse2neon library
|
||||
# Find the native sse2neon includes and library
|
||||
# This module defines
|
||||
# SSE2NEON_INCLUDE_DIRS, where to find sse2neon.h, Set when
|
||||
# SSE2NEON_INCLUDE_DIR is found.
|
||||
# SSE2NEON_ROOT_DIR, The base directory to search for sse2neon.
|
||||
# This can also be an environment variable.
|
||||
# SSE2NEON_FOUND, If false, do not try to use sse2neon.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2020 Blender Foundation.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD 3-Clause License,
|
||||
# see accompanying file BSD-3-Clause-license.txt for details.
|
||||
#=============================================================================
|
||||
|
||||
# If SSE2NEON_ROOT_DIR was defined in the environment, use it.
|
||||
IF(NOT SSE2NEON_ROOT_DIR AND NOT $ENV{SSE2NEON_ROOT_DIR} STREQUAL "")
|
||||
SET(SSE2NEON_ROOT_DIR $ENV{SSE2NEON_ROOT_DIR})
|
||||
ENDIF()
|
||||
|
||||
SET(_sse2neon_SEARCH_DIRS
|
||||
${SSE2NEON_ROOT_DIR}
|
||||
)
|
||||
|
||||
FIND_PATH(SSE2NEON_INCLUDE_DIR
|
||||
NAMES
|
||||
sse2neon.h
|
||||
HINTS
|
||||
${_sse2neon_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
include
|
||||
)
|
||||
|
||||
# handle the QUIETLY and REQUIRED arguments and set SSE2NEON_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
INCLUDE(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(sse2neon DEFAULT_MSG
|
||||
SSE2NEON_INCLUDE_DIR)
|
||||
|
||||
IF(SSE2NEON_FOUND)
|
||||
SET(SSE2NEON_INCLUDE_DIRS ${SSE2NEON_INCLUDE_DIR})
|
||||
ENDIF(SSE2NEON_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
SSE2NEON_INCLUDE_DIR
|
||||
)
|
||||
|
||||
UNSET(_sse2neon_SEARCH_DIRS)
|
@@ -272,7 +272,7 @@ cmake_policy(SET CMP0057 NEW) # if IN_LIST
|
||||
#------------------------------------------------------------------------------
|
||||
function(gtest_add_tests)
|
||||
|
||||
if(ARGC LESS 1)
|
||||
if (ARGC LESS 1)
|
||||
message(FATAL_ERROR "No arguments supplied to gtest_add_tests()")
|
||||
endif()
|
||||
|
||||
@@ -298,7 +298,7 @@ function(gtest_add_tests)
|
||||
set(autoAddSources YES)
|
||||
else()
|
||||
# Non-keyword syntax, convert to keyword form
|
||||
if(ARGC LESS 3)
|
||||
if (ARGC LESS 3)
|
||||
message(FATAL_ERROR "gtest_add_tests() without keyword options requires at least 3 arguments")
|
||||
endif()
|
||||
set(ARGS_TARGET "${ARGV0}")
|
||||
|
@@ -389,7 +389,7 @@ function(blender_add_lib
|
||||
endfunction()
|
||||
|
||||
function(blender_add_test_suite)
|
||||
if(ARGC LESS 1)
|
||||
if (ARGC LESS 1)
|
||||
message(FATAL_ERROR "No arguments supplied to blender_add_test_suite()")
|
||||
endif()
|
||||
|
||||
@@ -668,6 +668,12 @@ macro(TEST_SSE_SUPPORT
|
||||
#include <xmmintrin.h>
|
||||
int main(void) { __m128 v = _mm_setzero_ps(); return 0; }"
|
||||
SUPPORT_SSE_BUILD)
|
||||
|
||||
if(SUPPORT_SSE_BUILD)
|
||||
message(STATUS "SSE Support: detected.")
|
||||
else()
|
||||
message(STATUS "SSE Support: missing.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED SUPPORT_SSE2_BUILD)
|
||||
@@ -676,21 +682,17 @@ macro(TEST_SSE_SUPPORT
|
||||
#include <emmintrin.h>
|
||||
int main(void) { __m128d v = _mm_setzero_pd(); return 0; }"
|
||||
SUPPORT_SSE2_BUILD)
|
||||
|
||||
if(SUPPORT_SSE2_BUILD)
|
||||
message(STATUS "SSE2 Support: detected.")
|
||||
else()
|
||||
message(STATUS "SSE2 Support: missing.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
unset(CMAKE_REQUIRED_FLAGS)
|
||||
endmacro()
|
||||
|
||||
macro(TEST_NEON_SUPPORT)
|
||||
if(NOT DEFINED SUPPORT_NEON_BUILD)
|
||||
include(CheckCXXSourceCompiles)
|
||||
check_cxx_source_compiles(
|
||||
"#include <arm_neon.h>
|
||||
int main() {return vaddvq_s32(vdupq_n_s32(1));}"
|
||||
SUPPORT_NEON_BUILD)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
# Only print message if running CMake first time
|
||||
macro(message_first_run)
|
||||
if(FIRST_RUN)
|
||||
@@ -1207,9 +1209,9 @@ function(find_python_package
|
||||
site-packages
|
||||
dist-packages
|
||||
vendor-packages
|
||||
NO_DEFAULT_PATH
|
||||
DOC
|
||||
"Path to python site-packages or dist-packages containing '${package}' module"
|
||||
NO_DEFAULT_PATH
|
||||
DOC
|
||||
"Path to python site-packages or dist-packages containing '${package}' module"
|
||||
)
|
||||
mark_as_advanced(PYTHON_${_upper_package}_PATH)
|
||||
|
||||
|
@@ -131,22 +131,22 @@ if(WITH_CODEC_SNDFILE)
|
||||
endif()
|
||||
|
||||
if(WITH_PYTHON)
|
||||
# we use precompiled libraries for py 3.9 and up by default
|
||||
set(PYTHON_VERSION 3.9)
|
||||
# we use precompiled libraries for py 3.7 and up by default
|
||||
set(PYTHON_VERSION 3.7)
|
||||
if(NOT WITH_PYTHON_MODULE AND NOT WITH_PYTHON_FRAMEWORK)
|
||||
# normally cached but not since we include them with blender
|
||||
set(PYTHON_INCLUDE_DIR "${LIBDIR}/python/include/python${PYTHON_VERSION}")
|
||||
set(PYTHON_EXECUTABLE "${LIBDIR}/python/bin/python${PYTHON_VERSION}")
|
||||
set(PYTHON_LIBRARY ${LIBDIR}/python/lib/libpython${PYTHON_VERSION}.a)
|
||||
set(PYTHON_INCLUDE_DIR "${LIBDIR}/python/include/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_EXECUTABLE "${LIBDIR}/python/bin/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_LIBRARY ${LIBDIR}/python/lib/libpython${PYTHON_VERSION}m.a)
|
||||
set(PYTHON_LIBPATH "${LIBDIR}/python/lib/python${PYTHON_VERSION}")
|
||||
# set(PYTHON_LINKFLAGS "-u _PyMac_Error") # won't build with this enabled
|
||||
else()
|
||||
# module must be compiled against Python framework
|
||||
set(_py_framework "/Library/Frameworks/Python.framework/Versions/${PYTHON_VERSION}")
|
||||
|
||||
set(PYTHON_INCLUDE_DIR "${_py_framework}/include/python${PYTHON_VERSION}")
|
||||
set(PYTHON_EXECUTABLE "${_py_framework}/bin/python${PYTHON_VERSION}")
|
||||
set(PYTHON_LIBPATH "${_py_framework}/lib/python${PYTHON_VERSION}/config-${PYTHON_VERSION}")
|
||||
set(PYTHON_INCLUDE_DIR "${_py_framework}/include/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_EXECUTABLE "${_py_framework}/bin/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_LIBPATH "${_py_framework}/lib/python${PYTHON_VERSION}/config-${PYTHON_VERSION}m")
|
||||
# set(PYTHON_LIBRARY python${PYTHON_VERSION})
|
||||
# set(PYTHON_LINKFLAGS "-u _PyMac_Error -framework Python") # won't build with this enabled
|
||||
|
||||
@@ -206,13 +206,6 @@ set(PLATFORM_LINKFLAGS
|
||||
|
||||
list(APPEND PLATFORM_LINKLIBS c++)
|
||||
|
||||
if(WITH_OPENIMAGEDENOISE)
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
# OpenImageDenoise uses BNNS from the Accelerate framework.
|
||||
string(APPEND PLATFORM_LINKFLAGS " -framework Accelerate")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_JACK)
|
||||
string(APPEND PLATFORM_LINKFLAGS " -F/Library/Frameworks -weak_framework jackmp")
|
||||
endif()
|
||||
@@ -304,12 +297,7 @@ if(WITH_OPENIMAGEIO)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENCOLORIO)
|
||||
find_package(OpenColorIO 2.0.0)
|
||||
|
||||
if(NOT OPENCOLORIO_FOUND)
|
||||
set(WITH_OPENCOLORIO OFF)
|
||||
message(STATUS "OpenColorIO not found")
|
||||
endif()
|
||||
find_package(OpenColorIO)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENVDB)
|
||||
@@ -321,11 +309,8 @@ if(WITH_OPENVDB)
|
||||
endif()
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
find_package(NanoVDB)
|
||||
endif()
|
||||
|
||||
if(WITH_CPU_SIMD AND SUPPORT_NEON_BUILD)
|
||||
find_package(sse2neon)
|
||||
set(NANOVDB ${LIBDIR}/nanovdb)
|
||||
set(NANOVDB_INCLUDE_DIR ${NANOVDB}/include)
|
||||
endif()
|
||||
|
||||
if(WITH_LLVM)
|
||||
@@ -333,13 +318,6 @@ if(WITH_LLVM)
|
||||
if(NOT LLVM_FOUND)
|
||||
message(FATAL_ERROR "LLVM not found.")
|
||||
endif()
|
||||
if(WITH_CLANG)
|
||||
find_package(Clang)
|
||||
if(NOT CLANG_FOUND)
|
||||
message(FATAL_ERROR "Clang not found.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES_OSL)
|
||||
@@ -352,7 +330,7 @@ if(WITH_CYCLES_OSL)
|
||||
list(APPEND OSL_LIBRARIES ${OSL_LIB_COMP} -force_load ${OSL_LIB_EXEC} ${OSL_LIB_QUERY})
|
||||
find_path(OSL_INCLUDE_DIR OSL/oslclosure.h PATHS ${CYCLES_OSL}/include)
|
||||
find_program(OSL_COMPILER NAMES oslc PATHS ${CYCLES_OSL}/bin)
|
||||
find_path(OSL_SHADER_DIR NAMES stdosl.h PATHS ${CYCLES_OSL}/share/OSL/shaders)
|
||||
find_path(OSL_SHADER_DIR NAMES stdosl.h PATHS ${CYCLES_OSL}/shaders)
|
||||
|
||||
if(OSL_INCLUDE_DIR AND OSL_LIBRARIES AND OSL_COMPILER AND OSL_SHADER_DIR)
|
||||
set(OSL_FOUND TRUE)
|
||||
@@ -362,6 +340,12 @@ if(WITH_CYCLES_OSL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
set(WITH_CYCLES_EMBREE OFF)
|
||||
set(WITH_OPENIMAGEDENOISE OFF)
|
||||
set(WITH_CPU_SSE OFF)
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES_EMBREE)
|
||||
find_package(Embree 3.8.0 REQUIRED)
|
||||
# Increase stack size for Embree, only works for executables.
|
||||
@@ -410,7 +394,7 @@ if(WITH_OPENMP)
|
||||
set(OPENMP_FOUND ON)
|
||||
set(OpenMP_C_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'")
|
||||
set(OpenMP_CXX_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'")
|
||||
set(OpenMP_LINKER_FLAGS "-L'${LIBDIR}/openmp/lib' -lomp")
|
||||
string(APPEND CMAKE_EXE_LINKER_FLAGS " -L'${LIBDIR}/openmp/lib' -lomp")
|
||||
|
||||
# Copy libomp.dylib to allow executables like datatoc and tests to work.
|
||||
# `@executable_path/../Resources/lib/` is a default dylib search path.
|
||||
|
@@ -20,7 +20,6 @@
|
||||
|
||||
# Xcode and system configuration for Apple.
|
||||
|
||||
# Detect processor architecture.
|
||||
if(NOT CMAKE_OSX_ARCHITECTURES)
|
||||
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCHITECTURE OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
message(STATUS "Detected native architecture ${ARCHITECTURE}.")
|
||||
@@ -29,93 +28,56 @@ if(NOT CMAKE_OSX_ARCHITECTURES)
|
||||
FORCE)
|
||||
endif()
|
||||
|
||||
# Detect developer directory. Depending on configuration this may be either
|
||||
# an Xcode or Command Line Tools installation.
|
||||
if(NOT DEFINED OSX_SYSTEM)
|
||||
execute_process(
|
||||
COMMAND xcodebuild -version -sdk macosx SDKVersion
|
||||
OUTPUT_VARIABLE OSX_SYSTEM
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
|
||||
# workaround for incorrect cmake xcode lookup for developer previews - XCODE_VERSION does not
|
||||
# take xcode-select path into account but would always look into /Applications/Xcode.app
|
||||
# while dev versions are named Xcode<version>-DP<preview_number>
|
||||
execute_process(
|
||||
COMMAND xcode-select --print-path
|
||||
OUTPUT_VARIABLE XCODE_DEVELOPER_DIR OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
OUTPUT_VARIABLE XCODE_CHECK OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
string(REPLACE "/Contents/Developer" "" XCODE_BUNDLE ${XCODE_CHECK}) # truncate to bundlepath in any case
|
||||
|
||||
# Detect Xcode version. It is provided by the Xcode generator but not
|
||||
# Unix Makefiles or Ninja.
|
||||
if(NOT ${CMAKE_GENERATOR} MATCHES "Xcode")
|
||||
# Unix makefile generator does not fill XCODE_VERSION var, so we get it with a command.
|
||||
# Note that `xcodebuild -version` gives output in two lines: first line will include
|
||||
# Xcode version, second one will include build number. We are only interested in the
|
||||
# first line. Here is an example of the output:
|
||||
# former one. Here is an example of the output:
|
||||
# Xcode 11.4
|
||||
# Build version 11E146
|
||||
# The expected XCODE_VERSION in this case is 11.4.
|
||||
execute_process(
|
||||
COMMAND xcodebuild -version
|
||||
OUTPUT_VARIABLE _xcode_vers_build_nr
|
||||
RESULT_VARIABLE _xcode_vers_result
|
||||
ERROR_QUIET)
|
||||
|
||||
if(_xcode_vers_result EQUAL 0)
|
||||
# Convert output to a single line by replacing newlines with spaces.
|
||||
# This is needed because regex replace can not operate through the newline character
|
||||
# and applies substitutions for each individual lines.
|
||||
string(REPLACE "\n" " " _xcode_vers_build_nr_single_line "${_xcode_vers_build_nr}")
|
||||
string(REGEX REPLACE "(.*)Xcode ([0-9\\.]+).*" "\\2" XCODE_VERSION "${_xcode_vers_build_nr_single_line}")
|
||||
unset(_xcode_vers_build_nr_single_line)
|
||||
endif()
|
||||
execute_process(COMMAND xcodebuild -version OUTPUT_VARIABLE XCODE_VERS_BUILD_NR)
|
||||
|
||||
unset(_xcode_vers_build_nr)
|
||||
unset(_xcode_vers_result)
|
||||
# Convert output to a single line by replacling newlines with spaces.
|
||||
# This is needed because regex replace can not operate through the newline character
|
||||
# and applies substitutions for each individual lines.
|
||||
string(REPLACE "\n" " " XCODE_VERS_BUILD_NR_SINGLE_LINE "${XCODE_VERS_BUILD_NR}")
|
||||
|
||||
string(REGEX REPLACE "(.*)Xcode ([0-9\\.]+).*" "\\2" XCODE_VERSION "${XCODE_VERS_BUILD_NR_SINGLE_LINE}")
|
||||
|
||||
unset(XCODE_VERS_BUILD_NR)
|
||||
unset(XCODE_VERS_BUILD_NR_SINGLE_LINE)
|
||||
endif()
|
||||
|
||||
if(XCODE_VERSION)
|
||||
# Construct SDKs path ourselves, because xcode-select path could be ambiguous.
|
||||
# Both /Applications/Xcode.app/Contents/Developer or /Applications/Xcode.app would be allowed.
|
||||
set(XCODE_SDK_DIR ${XCODE_DEVELOPER_DIR}/Platforms/MacOSX.platform//Developer/SDKs)
|
||||
|
||||
# Detect SDK version to use
|
||||
if(NOT DEFINED OSX_SYSTEM)
|
||||
execute_process(
|
||||
COMMAND xcodebuild -version -sdk macosx SDKVersion
|
||||
OUTPUT_VARIABLE OSX_SYSTEM
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
|
||||
message(STATUS "Detected OS X ${OSX_SYSTEM} and Xcode ${XCODE_VERSION} at ${XCODE_DEVELOPER_DIR}")
|
||||
message(STATUS "SDKs Directory: " ${XCODE_SDK_DIR})
|
||||
else()
|
||||
# If no Xcode version found, try detecting command line tools.
|
||||
execute_process(
|
||||
COMMAND pkgutil --pkg-info=com.apple.pkg.CLTools_Executables
|
||||
OUTPUT_VARIABLE _cltools_pkg_info
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
RESULT_VARIABLE _cltools_pkg_info_result
|
||||
ERROR_QUIET)
|
||||
|
||||
if(_cltools_pkg_info_result EQUAL 0)
|
||||
# Extract version.
|
||||
string(REGEX REPLACE ".*version: ([0-9]+)\\.([0-9]+).*" "\\1.\\2" XCODE_VERSION "${_cltools_pkg_info}")
|
||||
# SDK directory.
|
||||
set(XCODE_SDK_DIR "${XCODE_DEVELOPER_DIR}/SDKs")
|
||||
|
||||
# Detect SDK version to use.
|
||||
if(NOT DEFINED OSX_SYSTEM)
|
||||
execute_process(
|
||||
COMMAND xcrun --show-sdk-version
|
||||
OUTPUT_VARIABLE OSX_SYSTEM
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
|
||||
message(STATUS "Detected OS X ${OSX_SYSTEM} and Command Line Tools ${XCODE_VERSION} at ${XCODE_DEVELOPER_DIR}")
|
||||
message(STATUS "SDKs Directory: " ${XCODE_SDK_DIR})
|
||||
else()
|
||||
message(FATAL_ERROR "No Xcode or Command Line Tools detected")
|
||||
endif()
|
||||
|
||||
unset( _cltools_pkg_info)
|
||||
unset(__cltools_pkg_info_result)
|
||||
endif()
|
||||
message(STATUS "Detected OS X ${OSX_SYSTEM} and Xcode ${XCODE_VERSION} at ${XCODE_BUNDLE}")
|
||||
|
||||
# Require a relatively recent Xcode version.
|
||||
if(${XCODE_VERSION} VERSION_LESS 10.0)
|
||||
message(FATAL_ERROR "Only Xcode version 10.0 and newer is supported")
|
||||
endif()
|
||||
|
||||
# note: xcode-select path could be ambiguous,
|
||||
# cause /Applications/Xcode.app/Contents/Developer or /Applications/Xcode.app would be allowed
|
||||
# so i use a selfcomposed bundlepath here
|
||||
set(OSX_SYSROOT_PREFIX ${XCODE_BUNDLE}/Contents/Developer/Platforms/MacOSX.platform)
|
||||
message(STATUS "OSX_SYSROOT_PREFIX: " ${OSX_SYSROOT_PREFIX})
|
||||
|
||||
# Collect list of OSX system versions which will be used to detect path to corresponding SDK.
|
||||
# Start with macOS SDK version reported by xcodebuild and include possible extra ones.
|
||||
#
|
||||
@@ -139,9 +101,10 @@ endif()
|
||||
# Loop through all possible versions and pick the first one which resolves to a valid SDK path.
|
||||
set(OSX_SDK_PATH)
|
||||
set(OSX_SDK_FOUND FALSE)
|
||||
set(OSX_SDK_PREFIX ${OSX_SYSROOT_PREFIX}/Developer/SDKs)
|
||||
set(OSX_SDKROOT)
|
||||
foreach(OSX_SDK_VERSION ${OSX_SDK_TEST_VERSIONS})
|
||||
set(CURRENT_OSX_SDK_PATH "${XCODE_SDK_DIR}/MacOSX${OSX_SDK_VERSION}.sdk")
|
||||
set(CURRENT_OSX_SDK_PATH "${OSX_SDK_PREFIX}/MacOSX${OSX_SDK_VERSION}.sdk")
|
||||
if(EXISTS ${CURRENT_OSX_SDK_PATH})
|
||||
set(OSX_SDK_PATH "${CURRENT_OSX_SDK_PATH}")
|
||||
set(OSX_SDKROOT macosx${OSX_SDK_VERSION})
|
||||
@@ -149,6 +112,7 @@ foreach(OSX_SDK_VERSION ${OSX_SDK_TEST_VERSIONS})
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
unset(OSX_SDK_PREFIX)
|
||||
unset(OSX_SDK_TEST_VERSIONS)
|
||||
|
||||
if(NOT OSX_SDK_FOUND)
|
||||
|
@@ -70,7 +70,6 @@ if(EXISTS ${LIBDIR})
|
||||
set(BOOST_LIBRARYDIR ${LIBDIR}/boost/lib)
|
||||
set(Boost_NO_SYSTEM_PATHS ON)
|
||||
set(OPENEXR_ROOT_DIR ${LIBDIR}/openexr)
|
||||
set(CLANG_ROOT_DIR ${LIBDIR}/llvm)
|
||||
endif()
|
||||
|
||||
if(WITH_STATIC_LIBS)
|
||||
@@ -285,10 +284,6 @@ if(WITH_NANOVDB)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_CPU_SIMD AND SUPPORT_NEON_BUILD)
|
||||
find_package_wrapper(sse2neon)
|
||||
endif()
|
||||
|
||||
if(WITH_ALEMBIC)
|
||||
find_package_wrapper(Alembic)
|
||||
|
||||
@@ -358,7 +353,7 @@ endif()
|
||||
if(WITH_PUGIXML)
|
||||
find_package_wrapper(PugiXML)
|
||||
|
||||
if(NOT PUGIXML_FOUND)
|
||||
if (NOT PUGIXML_FOUND)
|
||||
set(WITH_PUGIXML OFF)
|
||||
message(STATUS "PugiXML not found, disabling WITH_PUGIXML")
|
||||
endif()
|
||||
@@ -390,7 +385,7 @@ if(WITH_OPENIMAGEIO)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENCOLORIO)
|
||||
find_package_wrapper(OpenColorIO 2.0.0)
|
||||
find_package_wrapper(OpenColorIO)
|
||||
|
||||
set(OPENCOLORIO_LIBRARIES ${OPENCOLORIO_LIBRARIES})
|
||||
set(OPENCOLORIO_LIBPATH) # TODO, remove and reference the absolute path everywhere
|
||||
@@ -421,9 +416,7 @@ if(WITH_LLVM)
|
||||
endif()
|
||||
|
||||
find_package_wrapper(LLVM)
|
||||
if(WITH_CLANG)
|
||||
find_package_wrapper(Clang)
|
||||
endif()
|
||||
|
||||
# Symbol conflicts with same UTF library used by OpenCollada
|
||||
if(EXISTS ${LIBDIR})
|
||||
if(WITH_OPENCOLLADA AND (${LLVM_VERSION} VERSION_LESS "4.0.0"))
|
||||
@@ -433,13 +426,7 @@ if(WITH_LLVM)
|
||||
|
||||
if(NOT LLVM_FOUND)
|
||||
set(WITH_LLVM OFF)
|
||||
set(WITH_CLANG OFF)
|
||||
message(STATUS "LLVM not found")
|
||||
else()
|
||||
if(NOT CLANG_FOUND)
|
||||
set(WITH_CLANG OFF)
|
||||
message(STATUS "Clang not found")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@@ -483,14 +470,6 @@ if(WITH_POTRACE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_HARU)
|
||||
find_package_wrapper(Haru)
|
||||
if(NOT HARU_FOUND)
|
||||
message(WARNING "Haru not found, disabling WITH_HARU")
|
||||
set(WITH_HARU OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(EXISTS ${LIBDIR})
|
||||
without_system_libs_end()
|
||||
endif()
|
||||
|
@@ -49,7 +49,7 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
if(NOT EXISTS "${CLANG_OPENMP_DLL}")
|
||||
message(FATAL_ERROR "Clang OpenMP library (${CLANG_OPENMP_DLL}) not found.")
|
||||
endif()
|
||||
set(OpenMP_LINKER_FLAGS "\"${CLANG_OPENMP_LIB}\"")
|
||||
string(APPEND CMAKE_EXE_LINKER_FLAGS " \"${CLANG_OPENMP_LIB}\"")
|
||||
endif()
|
||||
if(WITH_WINDOWS_STRIPPED_PDB)
|
||||
message(WARNING "stripped pdb not supported with clang, disabling..")
|
||||
@@ -119,7 +119,6 @@ string(APPEND CMAKE_MODULE_LINKER_FLAGS " /SAFESEH:NO /ignore:4099")
|
||||
list(APPEND PLATFORM_LINKLIBS
|
||||
ws2_32 vfw32 winmm kernel32 user32 gdi32 comdlg32 Comctl32 version
|
||||
advapi32 shfolder shell32 ole32 oleaut32 uuid psapi Dbghelp Shlwapi
|
||||
pathcch
|
||||
)
|
||||
|
||||
if(WITH_INPUT_IME)
|
||||
@@ -422,7 +421,7 @@ if(WITH_JACK)
|
||||
endif()
|
||||
|
||||
if(WITH_PYTHON)
|
||||
set(PYTHON_VERSION 3.9) # CACHE STRING)
|
||||
set(PYTHON_VERSION 3.7) # CACHE STRING)
|
||||
|
||||
string(REPLACE "." "" _PYTHON_VERSION_NO_DOTS ${PYTHON_VERSION})
|
||||
set(PYTHON_LIBRARY ${LIBDIR}/python/${_PYTHON_VERSION_NO_DOTS}/libs/python${_PYTHON_VERSION_NO_DOTS}.lib)
|
||||
@@ -455,18 +454,10 @@ if(WITH_BOOST)
|
||||
set(BOOST ${LIBDIR}/boost)
|
||||
set(BOOST_INCLUDE_DIR ${BOOST}/include)
|
||||
set(BOOST_LIBPATH ${BOOST}/lib)
|
||||
set(BOOST_VERSION_HEADER ${BOOST_INCLUDE_DIR}/boost/version.hpp)
|
||||
if(EXISTS ${BOOST_VERSION_HEADER})
|
||||
file(STRINGS "${BOOST_VERSION_HEADER}" BOOST_LIB_VERSION REGEX "#define BOOST_LIB_VERSION ")
|
||||
if(BOOST_LIB_VERSION MATCHES "#define BOOST_LIB_VERSION \"([0-9_]+)\"")
|
||||
set(BOOST_VERSION "${CMAKE_MATCH_1}")
|
||||
endif()
|
||||
if(CMAKE_CL_64)
|
||||
set(BOOST_POSTFIX "vc141-mt-x64-1_70.lib")
|
||||
set(BOOST_DEBUG_POSTFIX "vc141-mt-gd-x64-1_70.lib")
|
||||
endif()
|
||||
if(NOT BOOST_VERSION)
|
||||
message(FATAL_ERROR "Unable to determine Boost version")
|
||||
endif()
|
||||
set(BOOST_POSTFIX "vc141-mt-x64-${BOOST_VERSION}.lib")
|
||||
set(BOOST_DEBUG_POSTFIX "vc141-mt-gd-x64-${BOOST_VERSION}.lib")
|
||||
set(BOOST_LIBRARIES
|
||||
optimized ${BOOST_LIBPATH}/libboost_date_time-${BOOST_POSTFIX}
|
||||
optimized ${BOOST_LIBPATH}/libboost_filesystem-${BOOST_POSTFIX}
|
||||
@@ -509,7 +500,7 @@ if(WITH_OPENIMAGEIO)
|
||||
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
|
||||
|
||||
set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0")
|
||||
set(OPENCOLORIO_DEFINITIONS "-DDOpenColorIO_SKIP_IMPORTS")
|
||||
set(OPENCOLORIO_DEFINITIONS "-DOCIO_STATIC_BUILD")
|
||||
set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe")
|
||||
add_definitions(-DOIIO_STATIC_DEFINE)
|
||||
add_definitions(-DOIIO_NO_SSE=1)
|
||||
@@ -547,13 +538,11 @@ if(WITH_OPENCOLORIO)
|
||||
set(OPENCOLORIO_LIBPATH ${OPENCOLORIO}/lib)
|
||||
set(OPENCOLORIO_LIBRARIES
|
||||
optimized ${OPENCOLORIO_LIBPATH}/OpenColorIO.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/tinyxml.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/libyaml-cpp.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/libexpatMD.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/pystring.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/OpencolorIO_d.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/tinyxml_d.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/libyaml-cpp_d.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/libexpatdMD.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/pystring_d.lib
|
||||
)
|
||||
set(OPENCOLORIO_DEFINITIONS)
|
||||
endif()
|
||||
@@ -672,10 +661,6 @@ endif()
|
||||
if(WITH_CYCLES_OSL)
|
||||
set(CYCLES_OSL ${LIBDIR}/osl CACHE PATH "Path to OpenShadingLanguage installation")
|
||||
set(OSL_SHADER_DIR ${CYCLES_OSL}/shaders)
|
||||
# Shaders have moved around a bit between OSL versions, check multiple locations
|
||||
if(NOT EXISTS "${OSL_SHADER_DIR}")
|
||||
set(OSL_SHADER_DIR ${CYCLES_OSL}/share/OSL/shaders)
|
||||
endif()
|
||||
find_library(OSL_LIB_EXEC NAMES oslexec PATHS ${CYCLES_OSL}/lib)
|
||||
find_library(OSL_LIB_COMP NAMES oslcomp PATHS ${CYCLES_OSL}/lib)
|
||||
find_library(OSL_LIB_QUERY NAMES oslquery PATHS ${CYCLES_OSL}/lib)
|
||||
@@ -796,14 +781,7 @@ if(WITH_XR_OPENXR)
|
||||
set(XR_OPENXR_SDK ${LIBDIR}/xr_openxr_sdk)
|
||||
set(XR_OPENXR_SDK_LIBPATH ${LIBDIR}/xr_openxr_sdk/lib)
|
||||
set(XR_OPENXR_SDK_INCLUDE_DIR ${XR_OPENXR_SDK}/include)
|
||||
# This is the old name of this library, it is checked to
|
||||
# support the transition between the old and new lib versions
|
||||
# this can be removed after the next lib update.
|
||||
if(EXISTS ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
|
||||
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
|
||||
else()
|
||||
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loaderd.lib)
|
||||
endif()
|
||||
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
|
||||
else()
|
||||
message(WARNING "OpenXR-SDK was not found, disabling WITH_XR_OPENXR")
|
||||
set(WITH_XR_OPENXR OFF)
|
||||
|
@@ -220,12 +220,14 @@ def cmake_advanced_info():
|
||||
|
||||
|
||||
def cmake_cache_var(var):
|
||||
with open(os.path.join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8') as cache_file:
|
||||
lines = [
|
||||
l_strip for l in cache_file
|
||||
if (l_strip := l.strip())
|
||||
if not l_strip.startswith(("//", "#"))
|
||||
]
|
||||
cache_file = open(join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8')
|
||||
lines = [
|
||||
l_strip for l in cache_file
|
||||
for l_strip in (l.strip(),)
|
||||
if l_strip
|
||||
if not l_strip.startswith(("//", "#"))
|
||||
]
|
||||
cache_file.close()
|
||||
|
||||
for l in lines:
|
||||
if l.split(":")[0] == var:
|
||||
|
@@ -1,64 +0,0 @@
|
||||
"""
|
||||
Dependency graph: Object.to_curve()
|
||||
+++++++++++++++++++++++++++++++++++
|
||||
|
||||
Function to get a curve from text and curve objects. It is typically used by exporters, render
|
||||
engines, and tools that need to access the curve representing the object.
|
||||
|
||||
The function takes the evaluated dependency graph as a required parameter and optionally a boolean
|
||||
apply_modifiers which defaults to false. If apply_modifiers is true and the object is a curve object,
|
||||
the spline deform modifiers are applied on the control points. Note that constructive modifiers and
|
||||
modifiers that are not spline-enabled will not be applied. So modifiers like Array will not be applied
|
||||
and deform modifiers that have Apply On Spline disabled will not be applied.
|
||||
|
||||
If the object is a text object. The text will be converted into a 3D curve and returned. Modifiers are
|
||||
never applied on text objects and apply_modifiers will be ignored. If the object is neither a curve nor
|
||||
a text object, an error will be reported.
|
||||
|
||||
.. note:: The resulting curve is owned by the object. It can be freed by calling `object.to_curve_clear()`.
|
||||
.. note::
|
||||
The resulting curve must be treated as temporary, and can not be referenced from objects in the main
|
||||
database.
|
||||
"""
|
||||
import bpy
|
||||
|
||||
|
||||
class OBJECT_OT_object_to_curve(bpy.types.Operator):
|
||||
"""Convert selected object to curve and show number of splines"""
|
||||
bl_label = "DEG Object to Curve"
|
||||
bl_idname = "object.object_to_curve"
|
||||
|
||||
def execute(self, context):
|
||||
# Access input original object.
|
||||
obj = context.object
|
||||
if obj is None:
|
||||
self.report({'INFO'}, "No active object to convert to curve")
|
||||
return {'CANCELLED'}
|
||||
if obj.type not in {'CURVE', 'FONT'}:
|
||||
self.report({'INFO'}, "Object can not be converted to curve")
|
||||
return {'CANCELLED'}
|
||||
depsgraph = context.evaluated_depsgraph_get()
|
||||
# Invoke to_curve() without applying modifiers.
|
||||
curve_without_modifiers = obj.to_curve(depsgraph)
|
||||
self.report({'INFO'}, f"{len(curve_without_modifiers.splines)} splines in a new curve without modifiers.")
|
||||
# Remove temporary curve.
|
||||
obj.to_curve_clear()
|
||||
# Invoke to_curve() with applying modifiers.
|
||||
curve_with_modifiers = obj.to_curve(depsgraph, apply_modifiers = True)
|
||||
self.report({'INFO'}, f"{len(curve_with_modifiers.splines)} splines in new curve with modifiers.")
|
||||
# Remove temporary curve.
|
||||
obj.to_curve_clear()
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(OBJECT_OT_object_to_curve)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(OBJECT_OT_object_to_curve)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
@@ -1,2 +1,2 @@
|
||||
Sphinx==3.5.1
|
||||
sphinx_rtd_theme==0.5.1
|
||||
Sphinx==3.1.1
|
||||
sphinx_rtd_theme==0.5.0
|
||||
|
@@ -186,7 +186,7 @@ For example, if you want to access the texture of a brush via Python to adjust i
|
||||
#. From the Sidebar expand the Brush Settings panel's *Texture* subpanel and add a new texture.
|
||||
*Notice the texture data-block menu itself doesn't have very useful links (you can check the tooltips).*
|
||||
#. The contrast setting isn't exposed in the Sidebar, so view the texture in the
|
||||
:ref:`Properties Editor <blender_manual:bpy.types.Texture.contrast>`.
|
||||
:ref:`Properties Editor <blender_manual:bpy.types.Texture.contrast`
|
||||
#. Open the context menu of the contrast field and select *Online Python Reference*.
|
||||
This takes you to ``bpy.types.Texture.contrast``. Now you can see that ``contrast`` is a property of texture.
|
||||
#. To find out how to access the texture from the brush check on the references at the bottom of the page.
|
||||
|
@@ -35,7 +35,7 @@ but not to fully cover each topic.
|
||||
|
||||
A quick list of helpful things to know before starting:
|
||||
|
||||
- Enable :ref:`Developer Extra <blender_manual:prefs-interface-dev-extras>`
|
||||
- Enable :ref:`Developer Extra <blender_manual:prefs-interface-dev-extras`
|
||||
and :ref:`Python Tooltips <blender_manual:prefs-interface-tooltips-python>`.
|
||||
- The :ref:`Python Console <blender_manual:bpy.types.SpaceConsole>`
|
||||
is great for testing one-liners; it has autocompletion so you can inspect the API quickly.
|
||||
|
@@ -75,12 +75,12 @@ def rna_info_BuildRNAInfo_cache():
|
||||
rna_info_BuildRNAInfo_cache.ret = None
|
||||
# --- end rna_info cache
|
||||
|
||||
# import rpdb2; rpdb2.start_embedded_debugger('test')
|
||||
import os
|
||||
import sys
|
||||
import inspect
|
||||
import shutil
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
from textwrap import indent
|
||||
|
||||
@@ -227,7 +227,6 @@ else:
|
||||
"blf",
|
||||
"bl_math",
|
||||
"imbuf",
|
||||
"imbuf.types",
|
||||
"bmesh",
|
||||
"bmesh.ops",
|
||||
"bmesh.types",
|
||||
@@ -251,9 +250,6 @@ else:
|
||||
"gpu.types",
|
||||
"gpu.matrix",
|
||||
"gpu.select",
|
||||
"gpu.shader",
|
||||
"gpu.state",
|
||||
"gpu.texture",
|
||||
"gpu_extras",
|
||||
"idprop.types",
|
||||
"mathutils",
|
||||
@@ -1208,7 +1204,7 @@ def pycontext2sphinx(basepath):
|
||||
# for member in sorted(unique):
|
||||
# print(' "%s": ("", False),' % member)
|
||||
if len(context_type_map) > len(unique):
|
||||
warnings.warn(
|
||||
raise Exception(
|
||||
"Some types are not used: %s" %
|
||||
str([member for member in context_type_map if member not in unique]))
|
||||
else:
|
||||
@@ -1718,6 +1714,7 @@ except ModuleNotFoundError:
|
||||
|
||||
fw("if html_theme == 'sphinx_rtd_theme':\n")
|
||||
fw(" html_theme_options = {\n")
|
||||
fw(" 'canonical_url': 'https://docs.blender.org/api/current/',\n")
|
||||
# fw(" 'analytics_id': '',\n")
|
||||
# fw(" 'collapse_navigation': True,\n")
|
||||
fw(" 'sticky_navigation': False,\n")
|
||||
@@ -1729,7 +1726,6 @@ except ModuleNotFoundError:
|
||||
# not helpful since the source is generated, adds to upload size.
|
||||
fw("html_copy_source = False\n")
|
||||
fw("html_show_sphinx = False\n")
|
||||
fw("html_baseurl = 'https://docs.blender.org/api/current/'\n")
|
||||
fw("html_use_opensearch = 'https://docs.blender.org/api/current'\n")
|
||||
fw("html_split_index = True\n")
|
||||
fw("html_static_path = ['static']\n")
|
||||
@@ -1979,14 +1975,11 @@ def write_rst_importable_modules(basepath):
|
||||
"aud": "Audio System",
|
||||
"blf": "Font Drawing",
|
||||
"imbuf": "Image Buffer",
|
||||
"imbuf.types": "Image Buffer Types",
|
||||
"gpu": "GPU Shader Module",
|
||||
"gpu.types": "GPU Types",
|
||||
"gpu.matrix": "GPU Matrix Utilities",
|
||||
"gpu.select": "GPU Select Utilities",
|
||||
"gpu.shader": "GPU Shader Utilities",
|
||||
"gpu.state": "GPU State Utilities",
|
||||
"gpu.texture": "GPU Texture Utilities",
|
||||
"gpu.matrix": "GPU Matrix",
|
||||
"gpu.select": "GPU Select",
|
||||
"gpu.shader": "GPU Shader",
|
||||
"bmesh": "BMesh Module",
|
||||
"bmesh.ops": "BMesh Operators",
|
||||
"bmesh.types": "BMesh Types",
|
||||
|
156
doc/python_api/sphinx_doc_gen.sh
Executable file
156
doc/python_api/sphinx_doc_gen.sh
Executable file
@@ -0,0 +1,156 @@
|
||||
#!/bin/sh
|
||||
# run from the blender source dir
|
||||
# bash doc/python_api/sphinx_doc_gen.sh
|
||||
# ssh upload means you need an account on the server
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Upload vars
|
||||
|
||||
# disable for testing
|
||||
DO_UPLOAD=true
|
||||
DO_EXE_BLENDER=true
|
||||
DO_OUT_HTML=true
|
||||
DO_OUT_HTML_ZIP=true
|
||||
DO_OUT_PDF=false
|
||||
|
||||
if [ -z $BLENDER_BIN ] ; then
|
||||
BLENDER_BIN="./blender.bin"
|
||||
fi
|
||||
|
||||
if [ "$1" == "" ] ; then
|
||||
echo "Expected a single argument for the username on blender.org, skipping upload step!"
|
||||
DO_UPLOAD=false
|
||||
else
|
||||
SSH_USER=$1
|
||||
SSH_HOST=$SSH_USER"@blender.org"
|
||||
SSH_UPLOAD="/data/www/vhosts/www.blender.org/api" # blender_python_api_VERSION, added after
|
||||
fi
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Blender Version & Info
|
||||
|
||||
# 'Blender 2.53 (sub 1) Build' --> '2_53_1' as a shell script.
|
||||
# "_".join(str(v) for v in bpy.app.version)
|
||||
# custom blender vars
|
||||
blender_srcdir=$(dirname -- $0)/../..
|
||||
blender_version_header="$blender_srcdir/source/blender/blenkernel/BKE_blender_version.h"
|
||||
blender_version=$(grep "BLENDER_VERSION\s" "$blender_version_header" | awk '{print $3}')
|
||||
blender_version_cycle=$(grep "BLENDER_VERSION_CYCLE\s" "$blender_version_header" | awk '{print $3}')
|
||||
unset blender_version_header
|
||||
|
||||
BLENDER_VERSION=$(expr $blender_version / 100)_$(expr $blender_version % 100)
|
||||
|
||||
SSH_UPLOAD_FULL=$SSH_UPLOAD/"blender_python_api_"$BLENDER_VERSION
|
||||
|
||||
SPHINXBASE=doc/python_api
|
||||
|
||||
SPHINX_WORKDIR="$(mktemp --directory --suffix=.sphinx)"
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Generate reStructuredText (blender/python only)
|
||||
|
||||
if $DO_EXE_BLENDER ; then
|
||||
# Don't delete existing docs, now partial updates are used for quick builds.
|
||||
#
|
||||
# Disable ASAN error halt since it results in nonzero exit code on any minor issue.
|
||||
ASAN_OPTIONS=halt_on_error=0:${ASAN_OPTIONS} \
|
||||
$BLENDER_BIN \
|
||||
--background \
|
||||
-noaudio \
|
||||
--factory-startup \
|
||||
--python-exit-code 1 \
|
||||
--python $SPHINXBASE/sphinx_doc_gen.py \
|
||||
-- \
|
||||
--output=$SPHINX_WORKDIR
|
||||
|
||||
|
||||
if (($? != 0)) ; then
|
||||
echo "Generating documentation failed, aborting"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Generate HTML (sphinx)
|
||||
|
||||
if $DO_OUT_HTML ; then
|
||||
sphinx-build -b html -j auto $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
|
||||
|
||||
# XXX, saves space on upload and zip, should move HTML outside
|
||||
# and zip up there, for now this is OK
|
||||
rm -rf sphinx-out/.doctrees
|
||||
|
||||
# in case we have a zip already
|
||||
rm -f blender_python_reference_$BLENDER_VERSION.zip
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# ZIP the HTML dir for upload
|
||||
|
||||
if $DO_OUT_HTML_ZIP ; then
|
||||
# lame, temp rename dir
|
||||
mv sphinx-out blender_python_reference_$BLENDER_VERSION
|
||||
zip -r -9 blender_python_reference_$BLENDER_VERSION.zip blender_python_reference_$BLENDER_VERSION
|
||||
mv blender_python_reference_$BLENDER_VERSION sphinx-out
|
||||
fi
|
||||
|
||||
cd -
|
||||
fi
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Generate PDF (sphinx/laytex)
|
||||
|
||||
if $DO_OUT_PDF ; then
|
||||
sphinx-build -n -b latex -j auto $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
|
||||
make -C $SPHINX_WORKDIR/sphinx-out
|
||||
mv $SPHINX_WORKDIR/sphinx-out/contents.pdf \
|
||||
$SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Upload to blender servers, comment this section for testing
|
||||
|
||||
if $DO_UPLOAD ; then
|
||||
|
||||
ssh $SSH_USER@blender.org 'rm -rf '$SSH_UPLOAD_FULL'/*'
|
||||
rsync --progress -ave "ssh -p 22" $SPHINX_WORKDIR/sphinx-out/* $SSH_HOST:$SSH_UPLOAD_FULL/
|
||||
|
||||
## symlink the dir to a static URL
|
||||
#ssh $SSH_USER@blender.org 'rm '$SSH_UPLOAD'/250PythonDoc && ln -s '$SSH_UPLOAD_FULL' '$SSH_UPLOAD'/250PythonDoc'
|
||||
if [ "$blender_version_cycle" = "release" ] ; then
|
||||
ssh $SSH_USER@blender.org 'rm '$SSH_UPLOAD'/blender_python_api_current && ln -s '$SSH_UPLOAD_FULL' '$SSH_UPLOAD'/blender_python_api_current'
|
||||
fi
|
||||
|
||||
# better redirect
|
||||
ssh $SSH_USER@blender.org 'echo "<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\" content=\"0;url=../blender_python_api_'$BLENDER_VERSION'/\"></head><body>Redirecting...</body></html>" > '$SSH_UPLOAD'/250PythonDoc/index.html'
|
||||
|
||||
# redirect for release only so wiki can point here
|
||||
if [ "$blender_version_cycle" = "release" ] ; then
|
||||
ssh $SSH_USER@blender.org 'echo "<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\" content=\"0;url=../blender_python_api_'$BLENDER_VERSION'/\"></head><body>Redirecting...</body></html>" > '$SSH_UPLOAD'/blender_python_api/index.html'
|
||||
fi
|
||||
|
||||
if $DO_OUT_PDF ; then
|
||||
# rename so local PDF has matching name.
|
||||
rsync --progress -ave "ssh -p 22" \
|
||||
$SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf \
|
||||
$SSH_HOST:$SSH_UPLOAD_FULL/blender_python_reference_$BLENDER_VERSION.pdf
|
||||
fi
|
||||
|
||||
if $DO_OUT_HTML_ZIP ; then
|
||||
rsync --progress -ave "ssh -p 22" \
|
||||
$SPHINX_WORKDIR/blender_python_reference_$BLENDER_VERSION.zip \
|
||||
$SSH_HOST:$SSH_UPLOAD_FULL/blender_python_reference_$BLENDER_VERSION.zip
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Print some useful text
|
||||
|
||||
echo ""
|
||||
echo "Finished! view the docs from: "
|
||||
if $DO_OUT_HTML ; then echo " html:" $SPHINX_WORKDIR/sphinx-out/index.html ; fi
|
||||
if $DO_OUT_PDF ; then echo " pdf:" $SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf ; fi
|
216
doc/python_api/sphinx_doc_update.py
Executable file
216
doc/python_api/sphinx_doc_update.py
Executable file
@@ -0,0 +1,216 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
"""
|
||||
This is a helper script to generate Blender Python API documentation (using Sphinx), and update server data using rsync.
|
||||
|
||||
You'll need to specify your user login and password, obviously.
|
||||
|
||||
Example usage:
|
||||
|
||||
./sphinx_doc_update.py --jobs 16 --mirror ../../../docs/remote_api_backup/ --source ../.. --blender ../../../build_cmake/bin/blender --user foobar --password barfoo
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
|
||||
DEFAULT_RSYNC_SERVER = "docs.blender.org"
|
||||
DEFAULT_RSYNC_ROOT = "/api/"
|
||||
DEFAULT_SYMLINK_ROOT = "/data/www/vhosts/docs.blender.org/api"
|
||||
|
||||
|
||||
def argparse_create():
|
||||
import argparse
|
||||
global __doc__
|
||||
|
||||
# When --help or no args are given, print this help
|
||||
usage_text = __doc__
|
||||
|
||||
parser = argparse.ArgumentParser(description=usage_text,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
|
||||
parser.add_argument(
|
||||
"--mirror", dest="mirror_dir",
|
||||
metavar='PATH', required=True,
|
||||
help="Path to local rsync mirror of api doc server")
|
||||
parser.add_argument(
|
||||
"--source", dest="source_dir",
|
||||
metavar='PATH', required=True,
|
||||
help="Path to Blender git repository")
|
||||
parser.add_argument(
|
||||
"--blender", dest="blender",
|
||||
metavar='PATH', required=True,
|
||||
help="Path to Blender executable")
|
||||
parser.add_argument(
|
||||
"--rsync-server", dest="rsync_server", default=DEFAULT_RSYNC_SERVER,
|
||||
metavar='RSYNCSERVER', type=str, required=False,
|
||||
help=("rsync server address"))
|
||||
parser.add_argument(
|
||||
"--rsync-root", dest="rsync_root", default=DEFAULT_RSYNC_ROOT,
|
||||
metavar='RSYNCROOT', type=str, required=False,
|
||||
help=("Root path of API doc on rsync server"))
|
||||
parser.add_argument(
|
||||
"--user", dest="user",
|
||||
metavar='USER', type=str, required=True,
|
||||
help=("User to login on rsync server"))
|
||||
parser.add_argument(
|
||||
"--password", dest="password",
|
||||
metavar='PASSWORD', type=str, required=True,
|
||||
help=("Password to login on rsync server"))
|
||||
parser.add_argument(
|
||||
"--jobs", dest="jobs_nr",
|
||||
metavar='NR', type=int, required=False, default=1,
|
||||
help="Number of sphinx building jobs to launch in parallel")
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main():
|
||||
# ----------
|
||||
# Parse Args
|
||||
|
||||
args = argparse_create().parse_args()
|
||||
|
||||
rsync_base = "rsync://%s@%s:%s" % (args.user, args.rsync_server, args.rsync_root)
|
||||
|
||||
blenver = api_blenver = api_blenver_zip = ""
|
||||
api_name = ""
|
||||
branch = ""
|
||||
is_release = is_beta = False
|
||||
|
||||
# I) Update local mirror using rsync.
|
||||
rsync_mirror_cmd = ("rsync", "--delete-after", "-avzz", rsync_base, args.mirror_dir)
|
||||
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
# II) Generate doc source in temp dir.
|
||||
doc_gen_cmd = (
|
||||
args.blender, "--background", "-noaudio", "--factory-startup", "--python-exit-code", "1",
|
||||
"--python", "%s/doc/python_api/sphinx_doc_gen.py" % args.source_dir, "--",
|
||||
"--output", tmp_dir
|
||||
)
|
||||
subprocess.run(doc_gen_cmd)
|
||||
|
||||
# III) Get Blender version info.
|
||||
getver_file = os.path.join(tmp_dir, "blendver.txt")
|
||||
getver_script = (r"""import sys, bpy
|
||||
with open(sys.argv[-1], 'w') as f:
|
||||
is_release = bpy.app.version_cycle in {'rc', 'release'}
|
||||
is_beta = bpy.app.version_cycle in {'beta'}
|
||||
branch = bpy.app.build_branch.split()[0].decode()
|
||||
f.write('%d\n' % is_release)
|
||||
f.write('%d\n' % is_beta)
|
||||
f.write('%s\n' % branch)
|
||||
f.write('%d.%d\n' % (bpy.app.version[0], bpy.app.version[1]))
|
||||
f.write('%d.%d\n' % (bpy.app.version[0], bpy.app.version[1])
|
||||
if (is_release or is_beta) else '%s\n' % branch)
|
||||
f.write('%d_%d' % (bpy.app.version[0], bpy.app.version[1]))
|
||||
""")
|
||||
get_ver_cmd = (args.blender, "--background", "-noaudio", "--factory-startup", "--python-exit-code", "1",
|
||||
"--python-expr", getver_script, "--", getver_file)
|
||||
subprocess.run(get_ver_cmd)
|
||||
with open(getver_file) as f:
|
||||
is_release, is_beta, branch, blenver, api_blenver, api_blenver_zip = f.read().split("\n")
|
||||
is_release = bool(int(is_release))
|
||||
is_beta = bool(int(is_beta))
|
||||
os.remove(getver_file)
|
||||
|
||||
# IV) Build doc.
|
||||
curr_dir = os.getcwd()
|
||||
os.chdir(tmp_dir)
|
||||
sphinx_cmd = ("sphinx-build", "-j", str(args.jobs_nr), "-b", "html", "sphinx-in", "sphinx-out")
|
||||
subprocess.run(sphinx_cmd)
|
||||
shutil.rmtree(os.path.join("sphinx-out", ".doctrees"))
|
||||
os.chdir(curr_dir)
|
||||
|
||||
# V) Cleanup existing matching dir in server mirror (if any), and copy new doc.
|
||||
api_name = api_blenver
|
||||
api_dir = os.path.join(args.mirror_dir, api_name)
|
||||
if os.path.exists(api_dir):
|
||||
if os.path.islink(api_dir):
|
||||
os.remove(api_dir)
|
||||
else:
|
||||
shutil.rmtree(api_dir)
|
||||
os.rename(os.path.join(tmp_dir, "sphinx-out"), api_dir)
|
||||
|
||||
# VI) Create zip archive.
|
||||
zip_name = "blender_python_reference_%s" % api_blenver_zip # We can't use 'release' postfix here...
|
||||
zip_path = os.path.join(args.mirror_dir, zip_name)
|
||||
with zipfile.ZipFile(zip_path, 'w') as zf:
|
||||
for dirname, _, filenames in os.walk(api_dir):
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(dirname, filename)
|
||||
zip_filepath = os.path.join(zip_name, os.path.relpath(filepath, api_dir))
|
||||
zf.write(filepath, arcname=zip_filepath)
|
||||
os.rename(zip_path, os.path.join(api_dir, "%s.zip" % zip_name))
|
||||
|
||||
# VII) Create symlinks and html redirects.
|
||||
if is_release:
|
||||
symlink = os.path.join(args.mirror_dir, "current")
|
||||
if os.path.exists(symlink):
|
||||
if os.path.islink(symlink):
|
||||
os.remove(symlink)
|
||||
else:
|
||||
shutil.rmtree(symlink)
|
||||
os.symlink("./%s" % api_name, symlink)
|
||||
with open(os.path.join(args.mirror_dir, "250PythonDoc/index.html"), 'w') as f:
|
||||
f.write("<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\""
|
||||
"content=\"0;url=../%s/\"></head><body>Redirecting...</body></html>" % api_name)
|
||||
elif is_beta:
|
||||
# We do not have any particular symlink for that stage.
|
||||
pass
|
||||
elif branch == "master":
|
||||
# Also create a symlink from version number to actual master api doc.
|
||||
symlink = os.path.join(args.mirror_dir, blenver)
|
||||
if os.path.exists(symlink):
|
||||
if os.path.islink(symlink):
|
||||
os.remove(symlink)
|
||||
else:
|
||||
shutil.rmtree(symlink)
|
||||
os.symlink("./%s" % api_name, symlink)
|
||||
with open(os.path.join(args.mirror_dir, "blender_python_api/index.html"), 'w') as f:
|
||||
f.write("<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\""
|
||||
"content=\"0;url=../%s/\"></head><body>Redirecting...</body></html>" % api_name)
|
||||
|
||||
# VIII) Upload (first do a dry-run so user can ensure everything is OK).
|
||||
print("Doc generated in local mirror %s, please check it before uploading "
|
||||
"(hit [Enter] to continue, [Ctrl-C] to exit):" % api_dir)
|
||||
sys.stdin.read(1)
|
||||
|
||||
rsync_mirror_cmd = ("rsync", "--dry-run", "--delete-after", "-avzz", args.mirror_dir, rsync_base)
|
||||
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
|
||||
|
||||
print("Rsync upload simulated, please check every thing is OK (hit [Enter] to continue, [Ctrl-C] to exit):")
|
||||
sys.stdin.read(1)
|
||||
|
||||
rsync_mirror_cmd = ("rsync", "--delete-after", "-avzz", args.mirror_dir, rsync_base)
|
||||
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
10
extern/mantaflow/CMakeLists.txt
vendored
10
extern/mantaflow/CMakeLists.txt
vendored
@@ -75,8 +75,6 @@ endif()
|
||||
|
||||
if(WITH_OPENVDB)
|
||||
add_definitions(-DOPENVDB=1)
|
||||
# OpenVDB headers use deprecated TBB headers, silence warning.
|
||||
add_definitions(-DTBB_SUPPRESS_DEPRECATED_MESSAGES=1)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENVDB_BLOSC)
|
||||
@@ -133,14 +131,6 @@ if(WITH_OPENVDB)
|
||||
list(APPEND LIB
|
||||
${OPENVDB_LIBRARIES}
|
||||
)
|
||||
if(WIN32)
|
||||
# OpenVDB emits lots of these, they should be suppressed through other
|
||||
# means but MSVC 16.8/16.9 has broken this functionality, so C4251 is
|
||||
# suppressed here explicitly. See
|
||||
# https://developercommunity.visualstudio.com/content/problem/1167590/bug.html
|
||||
# for details.
|
||||
string(APPEND CMAKE_CXX_FLAGS " /wd4251")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(SRC
|
||||
|
5
extern/mantaflow/helper/util/vectorbase.h
vendored
5
extern/mantaflow/helper/util/vectorbase.h
vendored
@@ -664,11 +664,6 @@ template<class T> inline Vec3i toVec3iRound(T v)
|
||||
return Vec3i((int)round(v[0]), (int)round(v[1]), (int)round(v[2]));
|
||||
}
|
||||
|
||||
template<class T> inline Vec3i toVec3iFloor(T v)
|
||||
{
|
||||
return Vec3i((int)floor(v[0]), (int)floor(v[1]), (int)floor(v[2]));
|
||||
}
|
||||
|
||||
//! convert to int Vector if values are close enough to an int
|
||||
template<class T> inline Vec3i toVec3iChecked(T v)
|
||||
{
|
||||
|
10
extern/mantaflow/preprocessed/fileio/iovdb.cpp
vendored
10
extern/mantaflow/preprocessed/fileio/iovdb.cpp
vendored
@@ -209,11 +209,11 @@ typename GridType::Ptr exportVDB(Grid<T> *from, float clip, openvdb::FloatGrid::
|
||||
openvdb::Coord(from->getSizeX() - 1, from->getSizeY() - 1, from->getSizeZ() - 1));
|
||||
openvdb::tools::Dense<ValueT, openvdb::tools::MemoryLayout::LayoutXYZ> dense(bbox, data);
|
||||
|
||||
// Use clip value, or (when not exporting in sparse mode) clear it in order to copy all values
|
||||
// of dense grid
|
||||
ValueT tmpClip = (from->saveSparse()) ? ValueT(clip) : ValueT(0);
|
||||
// Trick: Set clip value to very small / negative value in order to copy all values of dense
|
||||
// grids
|
||||
float tmpClip = (from->saveSparse()) ? clip : -std::numeric_limits<Real>::max();
|
||||
// Copy from dense to sparse grid structure considering clip value
|
||||
openvdb::tools::copyFromDense(dense, *to, tmpClip);
|
||||
openvdb::tools::copyFromDense(dense, *to, ValueT(tmpClip));
|
||||
|
||||
// If present, use clip grid to trim down current vdb grid even more
|
||||
if (from->saveSparse() && clipGrid && !clipGrid->empty()) {
|
||||
@@ -245,10 +245,10 @@ void exportVDB(ParticleDataImpl<MantaType> *from,
|
||||
std::vector<VDBType> vdbValues;
|
||||
std::string name = from->getName();
|
||||
|
||||
BasicParticleSystem *pp = dynamic_cast<BasicParticleSystem *>(from->getParticleSys());
|
||||
FOR_PARTS(*from)
|
||||
{
|
||||
// Optionally, skip exporting particles that have been marked as deleted
|
||||
BasicParticleSystem *pp = dynamic_cast<BasicParticleSystem *>(from->getParticleSys());
|
||||
if (skipDeletedParts && !pp->isActive(idx)) {
|
||||
continue;
|
||||
}
|
||||
|
2
extern/mantaflow/preprocessed/gitinfo.h
vendored
2
extern/mantaflow/preprocessed/gitinfo.h
vendored
@@ -1,3 +1,3 @@
|
||||
|
||||
|
||||
#define MANTA_GIT_VERSION "commit 39b7a415721ecbf6643612a24e8eadd221aeb934"
|
||||
#define MANTA_GIT_VERSION "commit 1c86d86496e7f7473c36248d12ef07bf4d9d2840"
|
||||
|
15
extern/mantaflow/preprocessed/grid.h
vendored
15
extern/mantaflow/preprocessed/grid.h
vendored
@@ -204,12 +204,14 @@ class GridBase : public PbClass {
|
||||
inline void checkIndex(int i, int j, int k) const;
|
||||
//! Check if indices are within bounds, otherwise error (should only be called when debugging)
|
||||
inline void checkIndex(IndexInt idx) const;
|
||||
//! Check if vector int is within given boundaries
|
||||
inline bool isInBounds(const Vec3i &p, int bnd = 0) const;
|
||||
//! Check if vector real is within given boundaries
|
||||
//! Check if index is within given boundaries
|
||||
inline bool isInBounds(const Vec3i &p, int bnd) const;
|
||||
//! Check if index is within given boundaries
|
||||
inline bool isInBounds(const Vec3i &p) const;
|
||||
//! Check if index is within given boundaries
|
||||
inline bool isInBounds(const Vec3 &p, int bnd = 0) const
|
||||
{
|
||||
return isInBounds(toVec3iFloor(p), bnd);
|
||||
return isInBounds(toVec3i(p), bnd);
|
||||
}
|
||||
//! Check if linear index is in the range of the array
|
||||
inline bool isInBounds(IndexInt idx) const;
|
||||
@@ -1783,6 +1785,11 @@ inline void GridBase::checkIndex(IndexInt idx) const
|
||||
}
|
||||
}
|
||||
|
||||
bool GridBase::isInBounds(const Vec3i &p) const
|
||||
{
|
||||
return (p.x >= 0 && p.y >= 0 && p.z >= 0 && p.x < mSize.x && p.y < mSize.y && p.z < mSize.z);
|
||||
}
|
||||
|
||||
bool GridBase::isInBounds(const Vec3i &p, int bnd) const
|
||||
{
|
||||
bool ret = (p.x >= bnd && p.y >= bnd && p.x < mSize.x - bnd && p.y < mSize.y - bnd);
|
||||
|
10
extern/mantaflow/preprocessed/plugin/flip.cpp
vendored
10
extern/mantaflow/preprocessed/plugin/flip.cpp
vendored
@@ -429,7 +429,7 @@ void markFluidCells(const BasicParticleSystem &parts,
|
||||
for (IndexInt idx = 0; idx < parts.size(); idx++) {
|
||||
if (!parts.isActive(idx) || (ptype && ((*ptype)[idx] & exclude)))
|
||||
continue;
|
||||
Vec3i p = toVec3iFloor(parts.getPos(idx));
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
if (flags.isInBounds(p) && flags.isEmpty(p))
|
||||
flags(p) = (flags(p) | FlagGrid::TypeFluid) & ~FlagGrid::TypeEmpty;
|
||||
}
|
||||
@@ -544,7 +544,7 @@ void adjustNumber(BasicParticleSystem &parts,
|
||||
// count particles in cells, and delete excess particles
|
||||
for (IndexInt idx = 0; idx < (int)parts.size(); idx++) {
|
||||
if (parts.isActive(idx)) {
|
||||
Vec3i p = toVec3iFloor(parts.getPos(idx));
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
if (!tmp.isInBounds(p)) {
|
||||
parts.kill(idx); // out of domain, remove
|
||||
continue;
|
||||
@@ -711,7 +711,7 @@ void gridParticleIndex(const BasicParticleSystem &parts,
|
||||
for (IndexInt idx = 0; idx < (IndexInt)parts.size(); idx++) {
|
||||
if (parts.isActive(idx)) {
|
||||
// check index for validity...
|
||||
Vec3i p = toVec3iFloor(parts.getPos(idx));
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
if (!index.isInBounds(p)) {
|
||||
inactive++;
|
||||
continue;
|
||||
@@ -740,7 +740,7 @@ void gridParticleIndex(const BasicParticleSystem &parts,
|
||||
for (IndexInt idx = 0; idx < (IndexInt)parts.size(); idx++) {
|
||||
if (!parts.isActive(idx))
|
||||
continue;
|
||||
Vec3i p = toVec3iFloor(parts.getPos(idx));
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
if (!index.isInBounds(p)) {
|
||||
continue;
|
||||
}
|
||||
@@ -1636,7 +1636,7 @@ struct knPushOutofObs : public KernelBase {
|
||||
{
|
||||
if (!parts.isActive(idx) || (ptype && ((*ptype)[idx] & exclude)))
|
||||
return;
|
||||
Vec3i p = toVec3iFloor(parts.getPos(idx));
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
|
||||
if (!flags.isInBounds(p))
|
||||
return;
|
||||
|
@@ -2278,7 +2278,7 @@ T convolveGrid(Grid<T> &originGrid, GaussianKernelCreator &gkSigma, Vec3 pos, in
|
||||
step = Vec3(0.0, 0.0, 1.0);
|
||||
T pxResult(0);
|
||||
for (int i = 0; i < gkSigma.mDim; ++i) {
|
||||
Vec3i curpos = toVec3iFloor(pos - step * (i - gkSigma.mDim / 2));
|
||||
Vec3i curpos = toVec3i(pos - step * (i - gkSigma.mDim / 2));
|
||||
if (originGrid.isInBounds(curpos))
|
||||
pxResult += gkSigma.get1DKernelValue(i) * originGrid.get(curpos);
|
||||
else { // TODO , improve...
|
||||
@@ -2423,7 +2423,7 @@ struct KnBlurMACGridGauss : public KernelBase {
|
||||
|
||||
Vec3 pxResult(0.0f);
|
||||
for (int di = 0; di < gkSigma.mDim; ++di) {
|
||||
Vec3i curpos = toVec3iFloor(pos - step * (di - gkSigma.mDim / 2));
|
||||
Vec3i curpos = toVec3i(pos - step * (di - gkSigma.mDim / 2));
|
||||
if (!originGrid.isInBounds(curpos)) {
|
||||
if (curpos.x < 0)
|
||||
curpos.x = 0;
|
||||
|
@@ -1214,8 +1214,8 @@ struct knFlipUpdateSecondaryParticlesLinear : public KernelBase {
|
||||
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1234,8 +1234,8 @@ struct knFlipUpdateSecondaryParticlesLinear : public KernelBase {
|
||||
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1252,7 +1252,7 @@ struct knFlipUpdateSecondaryParticlesLinear : public KernelBase {
|
||||
const Vec3 vj = v.getInterpolated(pts_sec[idx].pos);
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt * vj);
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt * vj);
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1474,8 +1474,8 @@ struct knFlipUpdateSecondaryParticlesCubic : public KernelBase {
|
||||
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1515,8 +1515,8 @@ struct knFlipUpdateSecondaryParticlesCubic : public KernelBase {
|
||||
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos +
|
||||
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1554,8 +1554,8 @@ struct knFlipUpdateSecondaryParticlesCubic : public KernelBase {
|
||||
|
||||
// anti tunneling for small obstacles
|
||||
for (int ct = 1; ct < antitunneling; ct++) {
|
||||
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt *
|
||||
(sumNumerator / sumDenominator));
|
||||
Vec3i tempPos = toVec3i(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt *
|
||||
(sumNumerator / sumDenominator));
|
||||
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
|
||||
pts_sec.kill(idx);
|
||||
return;
|
||||
@@ -1863,7 +1863,7 @@ struct knFlipDeleteParticlesInObstacle : public KernelBase {
|
||||
return;
|
||||
|
||||
const Vec3 &xi = pts[idx].pos;
|
||||
const Vec3i xidx = toVec3iFloor(xi);
|
||||
const Vec3i xidx = toVec3i(xi);
|
||||
// remove particles that completely left the bounds
|
||||
if (!flags.isInBounds(xidx)) {
|
||||
pts.kill(idx);
|
||||
|
@@ -2145,7 +2145,8 @@ void PbRegister_particleSurfaceTurbulence()
|
||||
void debugCheckParts(const BasicParticleSystem &parts, const FlagGrid &flags)
|
||||
{
|
||||
for (int idx = 0; idx < parts.size(); idx++) {
|
||||
if (!flags.isInBounds(parts.getPos(idx))) {
|
||||
Vec3i p = toVec3i(parts.getPos(idx));
|
||||
if (!flags.isInBounds(p)) {
|
||||
debMsg("bad position??? " << idx << " " << parts.getPos(idx), 1);
|
||||
exit(1);
|
||||
}
|
||||
|
2
extern/quadriflow/CMakeLists.txt
vendored
2
extern/quadriflow/CMakeLists.txt
vendored
@@ -102,7 +102,7 @@ set(SRC
|
||||
)
|
||||
|
||||
set(LIB
|
||||
${BOOST_LIBRARIES}
|
||||
${BOOST_LIBRARIES}
|
||||
)
|
||||
|
||||
blender_add_lib(extern_quadriflow "${SRC}" "${INC}" "${INC_SYS}" "${LIB}")
|
||||
|
@@ -64,7 +64,7 @@ if(WITH_CYCLES_NATIVE_ONLY)
|
||||
endif()
|
||||
set(CYCLES_KERNEL_FLAGS "${MSVC_NATIVE_ARCH_FLAGS}")
|
||||
endif()
|
||||
elseif(NOT WITH_CPU_SIMD OR (SUPPORT_NEON_BUILD AND SSE2NEON_FOUND))
|
||||
elseif(NOT WITH_CPU_SSE)
|
||||
set(CXX_HAS_SSE FALSE)
|
||||
set(CXX_HAS_AVX FALSE)
|
||||
set(CXX_HAS_AVX2 FALSE)
|
||||
|
@@ -103,7 +103,7 @@ static bool compile_cuda(CompilationSettings &settings)
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Transfer options to a classic C array. */
|
||||
/* Tranfer options to a classic C array. */
|
||||
vector<const char *> opts(options.size());
|
||||
for (size_t i = 0; i < options.size(); i++) {
|
||||
opts[i] = options[i].c_str();
|
||||
|
@@ -564,19 +564,19 @@ static void xml_read_transform(xml_node node, Transform &tfm)
|
||||
}
|
||||
|
||||
if (node.attribute("translate")) {
|
||||
float3 translate = zero_float3();
|
||||
float3 translate = make_float3(0.0f, 0.0f, 0.0f);
|
||||
xml_read_float3(&translate, node, "translate");
|
||||
tfm = tfm * transform_translate(translate);
|
||||
}
|
||||
|
||||
if (node.attribute("rotate")) {
|
||||
float4 rotate = zero_float4();
|
||||
float4 rotate = make_float4(0.0f, 0.0f, 0.0f, 0.0f);
|
||||
xml_read_float4(&rotate, node, "rotate");
|
||||
tfm = tfm * transform_rotate(DEG2RADF(rotate.x), make_float3(rotate.y, rotate.z, rotate.w));
|
||||
}
|
||||
|
||||
if (node.attribute("scale")) {
|
||||
float3 scale = zero_float3();
|
||||
float3 scale = make_float3(0.0f, 0.0f, 0.0f);
|
||||
xml_read_float3(&scale, node, "scale");
|
||||
tfm = tfm * transform_scale(scale);
|
||||
}
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
bl_info = {
|
||||
"name": "Cycles Render Engine",
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
def _is_using_buggy_driver():
|
||||
@@ -302,7 +301,7 @@ def list_render_passes(scene, srl):
|
||||
yield ("Denoising Clean", "RGB", 'COLOR')
|
||||
|
||||
# Custom AOV passes.
|
||||
for aov in srl.aovs:
|
||||
for aov in crl.aovs:
|
||||
if aov.type == 'VALUE':
|
||||
yield (aov.name, "X", 'VALUE')
|
||||
else:
|
||||
@@ -310,5 +309,22 @@ def list_render_passes(scene, srl):
|
||||
|
||||
|
||||
def register_passes(engine, scene, view_layer):
|
||||
# Detect duplicate render pass names, first one wins.
|
||||
listed = set()
|
||||
for name, channelids, channeltype in list_render_passes(scene, view_layer):
|
||||
engine.register_pass(scene, view_layer, name, len(channelids), channelids, channeltype)
|
||||
if name not in listed:
|
||||
engine.register_pass(scene, view_layer, name, len(channelids), channelids, channeltype)
|
||||
listed.add(name)
|
||||
|
||||
|
||||
def detect_conflicting_passes(scene, view_layer):
|
||||
# Detect conflicting render pass names for UI.
|
||||
counter = {}
|
||||
for name, _, _ in list_render_passes(scene, view_layer):
|
||||
counter[name] = counter.get(name, 0) + 1
|
||||
|
||||
for aov in view_layer.cycles.aovs:
|
||||
if counter[aov.name] > 1:
|
||||
aov.conflict = "Conflicts with another render pass with the same name"
|
||||
else:
|
||||
aov.conflict = ""
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
import bpy
|
||||
from bpy.types import Operator
|
||||
@@ -45,6 +44,36 @@ class CYCLES_OT_use_shading_nodes(Operator):
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class CYCLES_OT_add_aov(bpy.types.Operator):
|
||||
"""Add an AOV pass"""
|
||||
bl_idname = "cycles.add_aov"
|
||||
bl_label = "Add AOV"
|
||||
|
||||
def execute(self, context):
|
||||
view_layer = context.view_layer
|
||||
cycles_view_layer = view_layer.cycles
|
||||
|
||||
cycles_view_layer.aovs.add()
|
||||
|
||||
view_layer.update_render_passes()
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class CYCLES_OT_remove_aov(bpy.types.Operator):
|
||||
"""Remove an AOV pass"""
|
||||
bl_idname = "cycles.remove_aov"
|
||||
bl_label = "Remove AOV"
|
||||
|
||||
def execute(self, context):
|
||||
view_layer = context.view_layer
|
||||
cycles_view_layer = view_layer.cycles
|
||||
|
||||
cycles_view_layer.aovs.remove(cycles_view_layer.active_aov)
|
||||
|
||||
view_layer.update_render_passes()
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class CYCLES_OT_denoise_animation(Operator):
|
||||
"Denoise rendered animation sequence using current scene and view " \
|
||||
"layer settings. Requires denoising data passes and output to " \
|
||||
@@ -168,6 +197,8 @@ class CYCLES_OT_merge_images(Operator):
|
||||
|
||||
classes = (
|
||||
CYCLES_OT_use_shading_nodes,
|
||||
CYCLES_OT_add_aov,
|
||||
CYCLES_OT_remove_aov,
|
||||
CYCLES_OT_denoise_animation,
|
||||
CYCLES_OT_merge_images
|
||||
)
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
import bpy
|
||||
import _cycles
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
from bl_operators.presets import AddPresetBase
|
||||
from bpy.types import Operator
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
import bpy
|
||||
from bpy.props import (
|
||||
@@ -179,6 +178,11 @@ enum_view3d_shading_render_pass = (
|
||||
('MIST', "Mist", "Show the Mist render pass", 32),
|
||||
)
|
||||
|
||||
enum_aov_types = (
|
||||
('VALUE', "Value", "Write a Value pass", 0),
|
||||
('COLOR', "Color", "Write a Color pass", 1),
|
||||
)
|
||||
|
||||
|
||||
def enum_openimagedenoise_denoiser(self, context):
|
||||
import _cycles
|
||||
@@ -225,6 +229,7 @@ def update_render_passes(self, context):
|
||||
scene = context.scene
|
||||
view_layer = context.view_layer
|
||||
view_layer.update_render_passes()
|
||||
engine.detect_conflicting_passes(scene, view_layer)
|
||||
|
||||
|
||||
class CyclesRenderSettings(bpy.types.PropertyGroup):
|
||||
@@ -646,12 +651,6 @@ class CyclesRenderSettings(bpy.types.PropertyGroup):
|
||||
min=0, max=(1 << 24),
|
||||
default=1,
|
||||
)
|
||||
preview_denoising_input_passes: EnumProperty(
|
||||
name="Viewport Input Passes",
|
||||
description="Passes used by the denoiser to distinguish noise from shader and geometry detail",
|
||||
items=enum_denoising_input_passes,
|
||||
default='RGB_ALBEDO',
|
||||
)
|
||||
|
||||
debug_reset_timeout: FloatProperty(
|
||||
name="Reset timeout",
|
||||
@@ -848,7 +847,7 @@ class CyclesRenderSettings(bpy.types.PropertyGroup):
|
||||
('MEGA', "Mega", ""),
|
||||
('SPLIT', "Split", ""),
|
||||
),
|
||||
update=CyclesRenderSettings._devices_update_callback
|
||||
update=_devices_update_callback
|
||||
)
|
||||
|
||||
debug_opencl_device_type: EnumProperty(
|
||||
@@ -862,9 +861,11 @@ class CyclesRenderSettings(bpy.types.PropertyGroup):
|
||||
('GPU', "GPU", ""),
|
||||
('ACCELERATOR', "Accelerator", ""),
|
||||
),
|
||||
update=CyclesRenderSettings._devices_update_callback
|
||||
update=_devices_update_callback
|
||||
)
|
||||
|
||||
del _devices_update_callback
|
||||
|
||||
debug_use_opencl_debug: BoolProperty(name="Debug OpenCL", default=False)
|
||||
|
||||
debug_opencl_mem_limit: IntProperty(
|
||||
@@ -1310,6 +1311,27 @@ class CyclesCurveRenderSettings(bpy.types.PropertyGroup):
|
||||
del bpy.types.Scene.cycles_curves
|
||||
|
||||
|
||||
class CyclesAOVPass(bpy.types.PropertyGroup):
|
||||
name: StringProperty(
|
||||
name="Name",
|
||||
description="Name of the pass, to use in the AOV Output shader node",
|
||||
update=update_render_passes,
|
||||
default="AOV"
|
||||
)
|
||||
type: EnumProperty(
|
||||
name="Type",
|
||||
description="Pass data type",
|
||||
update=update_render_passes,
|
||||
items=enum_aov_types,
|
||||
default='COLOR'
|
||||
)
|
||||
conflict: StringProperty(
|
||||
name="Conflict",
|
||||
description="If there is a conflict with another render passes, message explaining why",
|
||||
default=""
|
||||
)
|
||||
|
||||
|
||||
class CyclesRenderLayerSettings(bpy.types.PropertyGroup):
|
||||
|
||||
pass_debug_bvh_traversed_nodes: BoolProperty(
|
||||
@@ -1440,6 +1462,7 @@ class CyclesRenderLayerSettings(bpy.types.PropertyGroup):
|
||||
items=enum_denoising_input_passes,
|
||||
default='RGB_ALBEDO',
|
||||
)
|
||||
|
||||
denoising_openimagedenoise_input_passes: EnumProperty(
|
||||
name="Input Passes",
|
||||
description="Passes used by the denoiser to distinguish noise from shader and geometry detail",
|
||||
@@ -1447,6 +1470,15 @@ class CyclesRenderLayerSettings(bpy.types.PropertyGroup):
|
||||
default='RGB_ALBEDO_NORMAL',
|
||||
)
|
||||
|
||||
aovs: CollectionProperty(
|
||||
type=CyclesAOVPass,
|
||||
description="Custom render passes that can be output by shader nodes",
|
||||
)
|
||||
active_aov: IntProperty(
|
||||
default=0,
|
||||
min=0
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.ViewLayer.cycles = PointerProperty(
|
||||
@@ -1485,7 +1517,7 @@ class CyclesPreferences(bpy.types.AddonPreferences):
|
||||
compute_device_type: EnumProperty(
|
||||
name="Compute Device Type",
|
||||
description="Device to use for computation (rendering with Cycles)",
|
||||
items=CyclesPreferences.get_device_types,
|
||||
items=get_device_types,
|
||||
)
|
||||
|
||||
devices: bpy.props.CollectionProperty(type=CyclesDeviceSettings)
|
||||
@@ -1633,6 +1665,7 @@ def register():
|
||||
bpy.utils.register_class(CyclesCurveRenderSettings)
|
||||
bpy.utils.register_class(CyclesDeviceSettings)
|
||||
bpy.utils.register_class(CyclesPreferences)
|
||||
bpy.utils.register_class(CyclesAOVPass)
|
||||
bpy.utils.register_class(CyclesRenderLayerSettings)
|
||||
bpy.utils.register_class(CyclesView3DShadingSettings)
|
||||
|
||||
@@ -1654,5 +1687,6 @@ def unregister():
|
||||
bpy.utils.unregister_class(CyclesCurveRenderSettings)
|
||||
bpy.utils.unregister_class(CyclesDeviceSettings)
|
||||
bpy.utils.unregister_class(CyclesPreferences)
|
||||
bpy.utils.unregister_class(CyclesAOVPass)
|
||||
bpy.utils.unregister_class(CyclesRenderLayerSettings)
|
||||
bpy.utils.unregister_class(CyclesView3DShadingSettings)
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
import bpy
|
||||
from bpy_extras.node_utils import find_node_input
|
||||
@@ -24,7 +23,7 @@ from bl_ui.utils import PresetPanel
|
||||
from bpy.types import Panel
|
||||
|
||||
from bl_ui.properties_grease_pencil_common import GreasePencilSimplifyPanel
|
||||
from bl_ui.properties_view_layer import ViewLayerCryptomattePanel, ViewLayerAOVPanel
|
||||
from bl_ui.properties_view_layer import ViewLayerCryptomattePanel
|
||||
|
||||
|
||||
class CYCLES_PT_sampling_presets(PresetPanel, Panel):
|
||||
@@ -275,8 +274,6 @@ class CYCLES_RENDER_PT_sampling_denoising(CyclesButtonsPanel, Panel):
|
||||
|
||||
sub.prop(cscene, "denoiser", text="")
|
||||
|
||||
layout.separator()
|
||||
|
||||
heading = layout.column(align=False, heading="Viewport")
|
||||
row = heading.row(align=True)
|
||||
row.prop(cscene, "use_preview_denoising", text="")
|
||||
@@ -287,9 +284,6 @@ class CYCLES_RENDER_PT_sampling_denoising(CyclesButtonsPanel, Panel):
|
||||
sub = heading.row(align=True)
|
||||
sub.active = cscene.use_preview_denoising
|
||||
sub.prop(cscene, "preview_denoising_start_sample", text="Start Sample")
|
||||
sub = heading.row(align=True)
|
||||
sub.active = cscene.use_preview_denoising
|
||||
sub.prop(cscene, "preview_denoising_input_passes", text="Input Passes")
|
||||
|
||||
|
||||
class CYCLES_RENDER_PT_sampling_advanced(CyclesButtonsPanel, Panel):
|
||||
@@ -892,7 +886,7 @@ class CYCLES_RENDER_PT_passes_light(CyclesButtonsPanel, Panel):
|
||||
col.prop(view_layer, "use_pass_ambient_occlusion", text="Ambient Occlusion")
|
||||
|
||||
|
||||
class CYCLES_RENDER_PT_passes_crypto(CyclesButtonsPanel, ViewLayerCryptomattePanel, Panel):
|
||||
class CYCLES_RENDER_PT_passes_crypto(CyclesButtonsPanel, ViewLayerCryptomattePanel):
|
||||
bl_label = "Cryptomatte"
|
||||
bl_context = "view_layer"
|
||||
bl_parent_id = "CYCLES_RENDER_PT_passes"
|
||||
@@ -921,11 +915,49 @@ class CYCLES_RENDER_PT_passes_debug(CyclesButtonsPanel, Panel):
|
||||
layout.prop(cycles_view_layer, "pass_debug_ray_bounces")
|
||||
|
||||
|
||||
class CYCLES_RENDER_PT_passes_aov(CyclesButtonsPanel, ViewLayerAOVPanel):
|
||||
class CYCLES_RENDER_UL_aov(bpy.types.UIList):
|
||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
|
||||
row = layout.row()
|
||||
split = row.split(factor=0.65)
|
||||
icon = 'ERROR' if item.conflict else 'NONE'
|
||||
split.row().prop(item, "name", text="", icon=icon, emboss=False)
|
||||
split.row().prop(item, "type", text="", emboss=False)
|
||||
|
||||
|
||||
class CYCLES_RENDER_PT_passes_aov(CyclesButtonsPanel, Panel):
|
||||
bl_label = "Shader AOV"
|
||||
bl_context = "view_layer"
|
||||
bl_parent_id = "CYCLES_RENDER_PT_passes"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
|
||||
cycles_view_layer = context.view_layer.cycles
|
||||
|
||||
row = layout.row()
|
||||
col = row.column()
|
||||
col.template_list(
|
||||
"CYCLES_RENDER_UL_aov",
|
||||
"aovs",
|
||||
cycles_view_layer,
|
||||
"aovs",
|
||||
cycles_view_layer,
|
||||
"active_aov",
|
||||
rows=2,
|
||||
)
|
||||
|
||||
col = row.column()
|
||||
sub = col.column(align=True)
|
||||
sub.operator("cycles.add_aov", icon='ADD', text="")
|
||||
sub.operator("cycles.remove_aov", icon='REMOVE', text="")
|
||||
|
||||
if cycles_view_layer.active_aov < len(cycles_view_layer.aovs):
|
||||
active_aov = cycles_view_layer.aovs[cycles_view_layer.active_aov]
|
||||
if active_aov.conflict:
|
||||
layout.label(text=active_aov.conflict, icon='ERROR')
|
||||
|
||||
|
||||
class CYCLES_RENDER_PT_denoising(CyclesButtonsPanel, Panel):
|
||||
bl_label = "Denoising"
|
||||
@@ -2268,6 +2300,7 @@ classes = (
|
||||
CYCLES_RENDER_PT_passes_light,
|
||||
CYCLES_RENDER_PT_passes_crypto,
|
||||
CYCLES_RENDER_PT_passes_debug,
|
||||
CYCLES_RENDER_UL_aov,
|
||||
CYCLES_RENDER_PT_passes_aov,
|
||||
CYCLES_RENDER_PT_filter,
|
||||
CYCLES_RENDER_PT_override,
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#
|
||||
|
||||
# <pep8 compliant>
|
||||
from __future__ import annotations
|
||||
|
||||
import bpy
|
||||
import math
|
||||
@@ -109,7 +108,7 @@ def do_versions(self):
|
||||
library_versions.setdefault(library.version, []).append(library)
|
||||
|
||||
# Do versioning per library, since they might have different versions.
|
||||
max_need_versioning = (2, 93, 7)
|
||||
max_need_versioning = (2, 92, 4)
|
||||
for version, libraries in library_versions.items():
|
||||
if version > max_need_versioning:
|
||||
continue
|
||||
@@ -205,18 +204,6 @@ def do_versions(self):
|
||||
view_layer.pass_cryptomatte_depth = cview_layer.get("pass_crypto_depth", 6)
|
||||
view_layer.use_pass_cryptomatte_accurate = cview_layer.get("pass_crypto_accurate", True)
|
||||
|
||||
if version <= (2, 93, 7):
|
||||
if scene.render.engine == 'CYCLES':
|
||||
for view_layer in scene.view_layers:
|
||||
cview_layer = view_layer.cycles
|
||||
for caov in cview_layer.get("aovs", []):
|
||||
aov_name = caov.get("name", "AOV")
|
||||
if aov_name in view_layer.aovs:
|
||||
continue
|
||||
baov = view_layer.aovs.add()
|
||||
baov.name = caov.get("name", "AOV")
|
||||
baov.type = "COLOR" if caov.get("type", 1) == 1 else "VALUE"
|
||||
|
||||
# Lamps
|
||||
for light in bpy.data.lights:
|
||||
if light.library not in libraries:
|
||||
|
@@ -112,7 +112,7 @@ static void blender_camera_init(BlenderCamera *bcam, BL::RenderSettings &b_rende
|
||||
bcam->focaldistance = 10.0f;
|
||||
|
||||
bcam->zoom = 1.0f;
|
||||
bcam->pixelaspect = one_float2();
|
||||
bcam->pixelaspect = make_float2(1.0f, 1.0f);
|
||||
bcam->aperture_ratio = 1.0f;
|
||||
|
||||
bcam->sensor_width = 36.0f;
|
||||
|
@@ -121,8 +121,8 @@ static bool ObtainCacheParticleData(
|
||||
CData->curve_firstkey.push_back_slow(keyno);
|
||||
|
||||
float curve_length = 0.0f;
|
||||
float3 prev_co_world = zero_float3();
|
||||
float3 prev_co_object = zero_float3();
|
||||
float3 prev_co_world = make_float3(0.0f, 0.0f, 0.0f);
|
||||
float3 prev_co_object = make_float3(0.0f, 0.0f, 0.0f);
|
||||
for (int step_no = 0; step_no < ren_step; step_no++) {
|
||||
float3 co_world = prev_co_world;
|
||||
b_psys.co_hair(*b_ob, pa_no, step_no, &co_world.x);
|
||||
@@ -197,7 +197,7 @@ static bool ObtainCacheParticleUV(Hair *hair,
|
||||
BL::Mesh::uv_layers_iterator l;
|
||||
b_mesh->uv_layers.begin(l);
|
||||
|
||||
float2 uv = zero_float2();
|
||||
float2 uv = make_float2(0.0f, 0.0f);
|
||||
if (b_mesh->uv_layers.length())
|
||||
b_psys.uv_on_emitter(psmd, *b_pa, pa_no, uv_num, &uv.x);
|
||||
CData->curve_uv.push_back_slow(uv);
|
||||
@@ -678,7 +678,7 @@ static void export_hair_curves(Scene *scene, Hair *hair, BL::Hair b_hair)
|
||||
const int first_point_index = b_curve.first_point_index();
|
||||
const int num_points = b_curve.num_points();
|
||||
|
||||
float3 prev_co = zero_float3();
|
||||
float3 prev_co = make_float3(0.0f, 0.0f, 0.0f);
|
||||
float length = 0.0f;
|
||||
if (attr_intercept) {
|
||||
points_length.clear();
|
||||
|
@@ -310,143 +310,6 @@ static void attr_create_sculpt_vertex_color(Scene *scene,
|
||||
}
|
||||
}
|
||||
|
||||
template<typename TypeInCycles, typename GetValueAtIndex>
|
||||
static void fill_generic_attribute(BL::Mesh &b_mesh,
|
||||
TypeInCycles *data,
|
||||
const AttributeElement element,
|
||||
const GetValueAtIndex &get_value_at_index)
|
||||
{
|
||||
switch (element) {
|
||||
case ATTR_ELEMENT_CORNER: {
|
||||
for (BL::MeshLoopTriangle &t : b_mesh.loop_triangles) {
|
||||
const int index = t.index() * 3;
|
||||
BL::Array<int, 3> loops = t.loops();
|
||||
data[index] = get_value_at_index(loops[0]);
|
||||
data[index + 1] = get_value_at_index(loops[1]);
|
||||
data[index + 2] = get_value_at_index(loops[2]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case ATTR_ELEMENT_VERTEX: {
|
||||
const int num_verts = b_mesh.vertices.length();
|
||||
for (int i = 0; i < num_verts; i++) {
|
||||
data[i] = get_value_at_index(i);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case ATTR_ELEMENT_FACE: {
|
||||
for (BL::MeshLoopTriangle &t : b_mesh.loop_triangles) {
|
||||
data[t.index()] = get_value_at_index(t.polygon_index());
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
assert(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void attr_create_generic(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh, bool subdivision)
|
||||
{
|
||||
if (subdivision) {
|
||||
/* TODO: Handle subdivision correctly. */
|
||||
return;
|
||||
}
|
||||
AttributeSet &attributes = mesh->attributes;
|
||||
|
||||
for (BL::Attribute &b_attribute : b_mesh.attributes) {
|
||||
const ustring name{b_attribute.name().c_str()};
|
||||
if (!mesh->need_attribute(scene, name)) {
|
||||
continue;
|
||||
}
|
||||
if (attributes.find(name)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const BL::Attribute::domain_enum b_domain = b_attribute.domain();
|
||||
const BL::Attribute::data_type_enum b_data_type = b_attribute.data_type();
|
||||
|
||||
AttributeElement element = ATTR_ELEMENT_NONE;
|
||||
switch (b_domain) {
|
||||
case BL::Attribute::domain_CORNER:
|
||||
element = ATTR_ELEMENT_CORNER;
|
||||
break;
|
||||
case BL::Attribute::domain_POINT:
|
||||
element = ATTR_ELEMENT_VERTEX;
|
||||
break;
|
||||
case BL::Attribute::domain_POLYGON:
|
||||
element = ATTR_ELEMENT_FACE;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (element == ATTR_ELEMENT_NONE) {
|
||||
/* Not supported. */
|
||||
continue;
|
||||
}
|
||||
switch (b_data_type) {
|
||||
case BL::Attribute::data_type_FLOAT: {
|
||||
BL::FloatAttribute b_float_attribute{b_attribute};
|
||||
Attribute *attr = attributes.add(name, TypeFloat, element);
|
||||
float *data = attr->data_float();
|
||||
fill_generic_attribute(
|
||||
b_mesh, data, element, [&](int i) { return b_float_attribute.data[i].value(); });
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_BOOLEAN: {
|
||||
BL::BoolAttribute b_bool_attribute{b_attribute};
|
||||
Attribute *attr = attributes.add(name, TypeFloat, element);
|
||||
float *data = attr->data_float();
|
||||
fill_generic_attribute(
|
||||
b_mesh, data, element, [&](int i) { return (float)b_bool_attribute.data[i].value(); });
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_INT: {
|
||||
BL::IntAttribute b_int_attribute{b_attribute};
|
||||
Attribute *attr = attributes.add(name, TypeFloat, element);
|
||||
float *data = attr->data_float();
|
||||
fill_generic_attribute(
|
||||
b_mesh, data, element, [&](int i) { return (float)b_int_attribute.data[i].value(); });
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_FLOAT_VECTOR: {
|
||||
BL::FloatVectorAttribute b_vector_attribute{b_attribute};
|
||||
Attribute *attr = attributes.add(name, TypeVector, element);
|
||||
float3 *data = attr->data_float3();
|
||||
fill_generic_attribute(b_mesh, data, element, [&](int i) {
|
||||
BL::Array<float, 3> v = b_vector_attribute.data[i].vector();
|
||||
return make_float3(v[0], v[1], v[2]);
|
||||
});
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_FLOAT_COLOR: {
|
||||
BL::FloatColorAttribute b_color_attribute{b_attribute};
|
||||
Attribute *attr = attributes.add(name, TypeRGBA, element);
|
||||
float4 *data = attr->data_float4();
|
||||
fill_generic_attribute(b_mesh, data, element, [&](int i) {
|
||||
BL::Array<float, 4> v = b_color_attribute.data[i].color();
|
||||
return make_float4(v[0], v[1], v[2], v[3]);
|
||||
});
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_FLOAT2: {
|
||||
BL::Float2Attribute b_float2_attribute{b_attribute};
|
||||
Attribute *attr = attributes.add(name, TypeFloat2, element);
|
||||
float2 *data = attr->data_float2();
|
||||
fill_generic_attribute(b_mesh, data, element, [&](int i) {
|
||||
BL::Array<float, 2> v = b_float2_attribute.data[i].vector();
|
||||
return make_float2(v[0], v[1]);
|
||||
});
|
||||
break;
|
||||
}
|
||||
default:
|
||||
/* Not supported. */
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Create vertex color attributes. */
|
||||
static void attr_create_vertex_color(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh, bool subdivision)
|
||||
{
|
||||
@@ -726,7 +589,7 @@ static void attr_create_pointiness(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh, b
|
||||
/* STEP 2: Calculate vertex normals taking into account their possible
|
||||
* duplicates which gets "welded" together.
|
||||
*/
|
||||
vector<float3> vert_normal(num_verts, zero_float3());
|
||||
vector<float3> vert_normal(num_verts, make_float3(0.0f, 0.0f, 0.0f));
|
||||
/* First we accumulate all vertex normals in the original index. */
|
||||
for (int vert_index = 0; vert_index < num_verts; ++vert_index) {
|
||||
const float3 normal = get_float3(b_mesh.vertices[vert_index].normal());
|
||||
@@ -743,7 +606,7 @@ static void attr_create_pointiness(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh, b
|
||||
/* STEP 3: Calculate pointiness using single ring neighborhood. */
|
||||
vector<int> counter(num_verts, 0);
|
||||
vector<float> raw_data(num_verts, 0.0f);
|
||||
vector<float3> edge_accum(num_verts, zero_float3());
|
||||
vector<float3> edge_accum(num_verts, make_float3(0.0f, 0.0f, 0.0f));
|
||||
BL::Mesh::edges_iterator e;
|
||||
EdgeMap visited_edges;
|
||||
int edge_index = 0;
|
||||
@@ -974,7 +837,6 @@ static void create_mesh(Scene *scene,
|
||||
attr_create_vertex_color(scene, mesh, b_mesh, subdivision);
|
||||
attr_create_sculpt_vertex_color(scene, mesh, b_mesh, subdivision);
|
||||
attr_create_random_per_island(scene, mesh, b_mesh, subdivision);
|
||||
attr_create_generic(scene, mesh, b_mesh, subdivision);
|
||||
|
||||
if (subdivision) {
|
||||
attr_create_subd_uv_map(scene, mesh, b_mesh, subdivide_uvs);
|
||||
|
@@ -323,8 +323,8 @@ Object *BlenderSync::sync_object(BL::Depsgraph &b_depsgraph,
|
||||
object->set_random_id(b_instance.random_id());
|
||||
}
|
||||
else {
|
||||
object->set_dupli_generated(zero_float3());
|
||||
object->set_dupli_uv(zero_float2());
|
||||
object->set_dupli_generated(make_float3(0.0f, 0.0f, 0.0f));
|
||||
object->set_dupli_uv(make_float2(0.0f, 0.0f));
|
||||
object->set_random_id(hash_uint2(hash_string(object->name.c_str()), 0));
|
||||
}
|
||||
|
||||
|
@@ -31,7 +31,7 @@ bool BlenderSync::sync_dupli_particle(BL::Object &b_ob,
|
||||
BL::DepsgraphObjectInstance &b_instance,
|
||||
Object *object)
|
||||
{
|
||||
/* Test if this dupli was generated from a particle system. */
|
||||
/* test if this dupli was generated from a particle sytem */
|
||||
BL::ParticleSystem b_psys = b_instance.particle_system();
|
||||
if (!b_psys)
|
||||
return false;
|
||||
|
@@ -147,7 +147,7 @@ void python_thread_state_restore(void **python_thread_state)
|
||||
|
||||
static const char *PyC_UnicodeAsByte(PyObject *py_str, PyObject **coerce)
|
||||
{
|
||||
const char *result = PyUnicode_AsUTF8(py_str);
|
||||
const char *result = _PyUnicode_AsString(py_str);
|
||||
if (result) {
|
||||
/* 99% of the time this is enough but we better support non unicode
|
||||
* chars since blender doesn't limit this.
|
||||
|
@@ -136,7 +136,7 @@ class BlenderSession {
|
||||
|
||||
/* ** Resumable render ** */
|
||||
|
||||
/* Overall number of chunks in which the sample range is to be divided. */
|
||||
/* Overall number of chunks in which the sample range is to be devided. */
|
||||
static int num_resumable_chunks;
|
||||
|
||||
/* Current resumable chunk index to render. */
|
||||
|
@@ -1404,7 +1404,7 @@ void BlenderSync::sync_world(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d,
|
||||
world_color = get_float3(b_world.color());
|
||||
}
|
||||
else {
|
||||
world_color = zero_float3();
|
||||
world_color = make_float3(0.0f, 0.0f, 0.0f);
|
||||
}
|
||||
|
||||
BackgroundNode *background = graph->create_node<BackgroundNode>();
|
||||
@@ -1535,7 +1535,7 @@ void BlenderSync::sync_lights(BL::Depsgraph &b_depsgraph, bool update_all)
|
||||
}
|
||||
else {
|
||||
EmissionNode *emission = graph->create_node<EmissionNode>();
|
||||
emission->set_color(one_float3());
|
||||
emission->set_color(make_float3(1.0f, 1.0f, 1.0f));
|
||||
emission->set_strength(1.0f);
|
||||
graph->add(emission);
|
||||
|
||||
|
@@ -697,15 +697,9 @@ vector<Pass> BlenderSync::sync_render_passes(BL::RenderLayer &b_rlay,
|
||||
}
|
||||
}
|
||||
|
||||
BL::ViewLayer::aovs_iterator b_aov_iter;
|
||||
for (b_view_layer.aovs.begin(b_aov_iter); b_aov_iter != b_view_layer.aovs.end(); ++b_aov_iter) {
|
||||
BL::AOV b_aov(*b_aov_iter);
|
||||
if (!b_aov.is_valid()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
string name = b_aov.name();
|
||||
bool is_color = b_aov.type() == BL::AOV::type_COLOR;
|
||||
RNA_BEGIN (&crl, b_aov, "aovs") {
|
||||
bool is_color = (get_enum(b_aov, "type") == 1);
|
||||
string name = get_string(b_aov, "name");
|
||||
|
||||
if (is_color) {
|
||||
b_engine.add_pass(name.c_str(), 4, "RGBA", b_view_layer.name().c_str());
|
||||
@@ -716,6 +710,7 @@ vector<Pass> BlenderSync::sync_render_passes(BL::RenderLayer &b_rlay,
|
||||
Pass::add(PASS_AOV_VALUE, passes, name.c_str());
|
||||
}
|
||||
}
|
||||
RNA_END;
|
||||
|
||||
scene->film->set_denoising_data_pass(denoising.use || denoising.store_passes);
|
||||
scene->film->set_denoising_clean_pass(scene->film->get_denoising_flags() &
|
||||
@@ -947,7 +942,7 @@ SessionParams BlenderSync::get_session_params(BL::RenderEngine &b_engine,
|
||||
else if (shadingsystem == 1)
|
||||
params.shadingsystem = SHADINGSYSTEM_OSL;
|
||||
|
||||
/* Color management. */
|
||||
/* color managagement */
|
||||
params.display_buffer_linear = b_engine.support_display_space_shader(b_scene);
|
||||
|
||||
if (b_engine.is_preview()) {
|
||||
@@ -1005,9 +1000,6 @@ DenoiseParams BlenderSync::get_denoise_params(BL::Scene &b_scene,
|
||||
cscene, "preview_denoiser", DENOISER_NUM, DENOISER_NONE);
|
||||
denoising.start_sample = get_int(cscene, "preview_denoising_start_sample");
|
||||
|
||||
denoising.input_passes = (DenoiserInput)get_enum(
|
||||
cscene, "preview_denoising_input_passes", DENOISER_INPUT_NUM, (int)denoising.input_passes);
|
||||
|
||||
/* Auto select fastest denoiser. */
|
||||
if (denoising.type == DENOISER_NONE) {
|
||||
if (!Device::available_devices(DEVICE_MASK_OPTIX).empty()) {
|
||||
|
@@ -43,8 +43,8 @@ void point_density_texture_space(BL::Depsgraph &b_depsgraph,
|
||||
{
|
||||
BL::Object b_ob(b_point_density_node.object());
|
||||
if (!b_ob) {
|
||||
loc = zero_float3();
|
||||
size = zero_float3();
|
||||
loc = make_float3(0.0f, 0.0f, 0.0f);
|
||||
size = make_float3(0.0f, 0.0f, 0.0f);
|
||||
return;
|
||||
}
|
||||
float3 min, max;
|
||||
|
@@ -69,7 +69,7 @@ BVHLayout BVHParams::best_bvh_layout(BVHLayout requested_layout, BVHLayoutMask s
|
||||
allowed_layouts_mask = supported_layouts;
|
||||
}
|
||||
/* We get widest from allowed ones and convert mask to actual layout. */
|
||||
const BVHLayoutMask widest_allowed_layout_mask = __bsr((uint32_t)allowed_layouts_mask);
|
||||
const BVHLayoutMask widest_allowed_layout_mask = __bsr(allowed_layouts_mask);
|
||||
return (BVHLayout)(1 << widest_allowed_layout_mask);
|
||||
}
|
||||
|
||||
|
@@ -107,9 +107,9 @@ BVHObjectBinning::BVHObjectBinning(const BVHRange &job,
|
||||
|
||||
/* map geometry to bins, unrolled once */
|
||||
{
|
||||
int64_t i;
|
||||
ssize_t i;
|
||||
|
||||
for (i = 0; i < int64_t(size()) - 1; i += 2) {
|
||||
for (i = 0; i < ssize_t(size()) - 1; i += 2) {
|
||||
prefetch_L2(&prims[start() + i + 8]);
|
||||
|
||||
/* map even and odd primitive to bin */
|
||||
@@ -146,7 +146,7 @@ BVHObjectBinning::BVHObjectBinning(const BVHRange &job,
|
||||
}
|
||||
|
||||
/* for uneven number of primitives */
|
||||
if (i < int64_t(size())) {
|
||||
if (i < ssize_t(size())) {
|
||||
/* map primitive to bin */
|
||||
const BVHReference &prim0 = prims[start() + i];
|
||||
BoundBox bounds0 = get_prim_bounds(prim0);
|
||||
@@ -237,7 +237,7 @@ void BVHObjectBinning::split(BVHReference *prims,
|
||||
BoundBox lcent_bounds = BoundBox::empty;
|
||||
BoundBox rcent_bounds = BoundBox::empty;
|
||||
|
||||
int64_t l = 0, r = N - 1;
|
||||
ssize_t l = 0, r = N - 1;
|
||||
|
||||
while (l <= r) {
|
||||
prefetch_L2(&prims[start() + l + 8]);
|
||||
|
@@ -360,7 +360,7 @@ void BVHBuild::add_references(BVHRange &root)
|
||||
|
||||
/* happens mostly on empty meshes */
|
||||
if (!bounds.valid())
|
||||
bounds.grow(zero_float3());
|
||||
bounds.grow(make_float3(0.0f, 0.0f, 0.0f));
|
||||
|
||||
root = BVHRange(bounds, center, 0, references.size());
|
||||
}
|
||||
@@ -703,7 +703,7 @@ BVHNode *BVHBuild::build_node(const BVHRange &range,
|
||||
unalignedSplitSAH = params.sah_node_cost * unaligned_split.bounds.half_area() +
|
||||
params.sah_primitive_cost * unaligned_split.nodeSAH;
|
||||
/* TOOD(sergey): Check we can create leaf already. */
|
||||
/* Check whether unaligned split is better than the regular one. */
|
||||
/* Check whether unaligned split is better than the regulat one. */
|
||||
if (unalignedSplitSAH < splitSAH) {
|
||||
do_unalinged_split = true;
|
||||
}
|
||||
@@ -842,7 +842,7 @@ BVHNode *BVHBuild::create_leaf_node(const BVHRange &range, const vector<BVHRefer
|
||||
vector<BVHReference, LeafReferenceStackAllocator> object_references;
|
||||
|
||||
uint visibility[PRIMITIVE_NUM_TOTAL] = {0};
|
||||
/* NOTE: Keep initialization in sync with actual number of primitives. */
|
||||
/* NOTE: Keep initializtion in sync with actual number of primitives. */
|
||||
BoundBox bounds[PRIMITIVE_NUM_TOTAL] = {
|
||||
BoundBox::empty, BoundBox::empty, BoundBox::empty, BoundBox::empty};
|
||||
int ob_num = 0;
|
||||
@@ -851,7 +851,7 @@ BVHNode *BVHBuild::create_leaf_node(const BVHRange &range, const vector<BVHRefer
|
||||
for (int i = 0; i < range.size(); i++) {
|
||||
const BVHReference &ref = references[range.start() + i];
|
||||
if (ref.prim_index() != -1) {
|
||||
uint32_t type_index = bitscan((uint32_t)(ref.prim_type() & PRIMITIVE_ALL));
|
||||
int type_index = bitscan(ref.prim_type() & PRIMITIVE_ALL);
|
||||
p_ref[type_index].push_back(ref);
|
||||
p_type[type_index].push_back(ref.prim_type());
|
||||
p_index[type_index].push_back(ref.prim_index());
|
||||
|
@@ -31,6 +31,8 @@
|
||||
#ifdef WITH_EMBREE
|
||||
|
||||
# include <embree3/rtcore_geometry.h>
|
||||
# include <pmmintrin.h>
|
||||
# include <xmmintrin.h>
|
||||
|
||||
# include "bvh/bvh_embree.h"
|
||||
|
||||
@@ -304,7 +306,8 @@ BVHEmbree::BVHEmbree(const BVHParams ¶ms_,
|
||||
rtc_device(NULL),
|
||||
build_quality(RTC_BUILD_QUALITY_REFIT)
|
||||
{
|
||||
SIMD_SET_FLUSH_TO_ZERO;
|
||||
_MM_SET_FLUSH_ZERO_MODE(_MM_FLUSH_ZERO_ON);
|
||||
_MM_SET_DENORMALS_ZERO_MODE(_MM_DENORMALS_ZERO_ON);
|
||||
}
|
||||
|
||||
BVHEmbree::~BVHEmbree()
|
||||
|
@@ -383,7 +383,7 @@ if(WITH_CYCLES_OPENSUBDIV)
|
||||
optimized ${OPENSUBDIV_ROOT_DIR}/lib/osdGPU.lib
|
||||
debug ${OPENSUBDIV_ROOT_DIR}/lib/osdCPU_d.lib
|
||||
debug ${OPENSUBDIV_ROOT_DIR}/lib/osdGPU_d.lib
|
||||
)
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user