Compare commits

..

9 Commits

Author SHA1 Message Date
8b11d36cda More progress converting the point distribute node to operations
I might not continue with this approach, it seems like building a
temporary vector of geometry components will be much simpler
2021-02-03 12:58:11 -06:00
d726aaec13 Merge branch 'master' into temp-geometry-nodes-instances-api 2021-02-03 11:59:12 -06:00
c407647469 WIP: Rewrite point distribute node to correctly support instances
This does not compile, and isn't close to finished.
2021-02-02 23:27:58 -06:00
c9383993f8 Add a second idea for the instances API 2021-02-02 23:22:35 -06:00
89d5710830 Merge branch 'master' into temp-geometry-nodes-instances-api 2021-02-02 22:16:42 -06:00
7746c562a4 Merge branch 'master' into temp-geometry-nodes-instances-api 2021-02-02 07:47:09 -06:00
525d36813c Geometry Nodes: Support instances in the point instance node 2021-02-01 17:18:32 -06:00
5494ad43fa Geometry Nodes: Output instanced geometry from the object info node 2021-02-01 17:17:39 -06:00
8268e733f6 Geometry Nodes: First pass on instance geometry set API
I think a fair amount of this will change, but this API uses a callback on
each component of a geometry set and its instances recursively.

Example uses will come in a following commit.
2021-02-01 17:17:15 -06:00
1807 changed files with 53068 additions and 85495 deletions

View File

@@ -39,8 +39,9 @@ Checks: >
-modernize-use-nodiscard, -modernize-use-nodiscard,
-modernize-loop-convert, -modernize-loop-convert,
-modernize-pass-by-value, -modernize-pass-by-value,
-modernize-use-default-member-init,
-modernize-raw-string-literal,
-modernize-avoid-bind,
-modernize-use-transparent-functors,
WarningsAsErrors: '*' WarningsAsErrors: '*'
CheckOptions:
- key: modernize-use-default-member-init.UseAssignment
value: 1

View File

@@ -63,9 +63,6 @@ list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/build_files/cmake/platform")
# avoid having empty buildtype # avoid having empty buildtype
if(NOT DEFINED CMAKE_BUILD_TYPE_INIT) if(NOT DEFINED CMAKE_BUILD_TYPE_INIT)
set(CMAKE_BUILD_TYPE_INIT "Release") set(CMAKE_BUILD_TYPE_INIT "Release")
# Internal logic caches this variable, avoid showing it by default
# since it's easy to accidentally set instead of the build type.
mark_as_advanced(CMAKE_BUILD_TYPE_INIT)
endif() endif()
# Omit superfluous "Up-to-date" messages. # Omit superfluous "Up-to-date" messages.
@@ -167,6 +164,10 @@ if(APPLE)
endif() endif()
option(WITH_BUILDINFO "Include extra build details (only disable for development & faster builds)" ON) option(WITH_BUILDINFO "Include extra build details (only disable for development & faster builds)" ON)
if(${CMAKE_VERSION} VERSION_LESS 2.8.8)
# add_library OBJECT arg unsupported
set(WITH_BUILDINFO OFF)
endif()
set(BUILDINFO_OVERRIDE_DATE "" CACHE STRING "Use instead of the current date for reproducible builds (empty string disables this option)") set(BUILDINFO_OVERRIDE_DATE "" CACHE STRING "Use instead of the current date for reproducible builds (empty string disables this option)")
set(BUILDINFO_OVERRIDE_TIME "" CACHE STRING "Use instead of the current time for reproducible builds (empty string disables this option)") set(BUILDINFO_OVERRIDE_TIME "" CACHE STRING "Use instead of the current time for reproducible builds (empty string disables this option)")
set(CPACK_OVERRIDE_PACKAGENAME "" CACHE STRING "Use instead of the standard packagename (empty string disables this option)") set(CPACK_OVERRIDE_PACKAGENAME "" CACHE STRING "Use instead of the standard packagename (empty string disables this option)")
@@ -370,8 +371,8 @@ if(WITH_PYTHON_INSTALL)
endif() endif()
endif() endif()
option(WITH_CPU_SIMD "Enable SIMD instruction if they're detected on the host machine" ON) option(WITH_CPU_SSE "Enable SIMD instruction if they're detected on the host machine" ON)
mark_as_advanced(WITH_CPU_SIMD) mark_as_advanced(WITH_CPU_SSE)
# Cycles # Cycles
option(WITH_CYCLES "Enable Cycles Render Engine" ON) option(WITH_CYCLES "Enable Cycles Render Engine" ON)
@@ -416,7 +417,6 @@ else()
option(LLVM_STATIC "Link with LLVM static libraries" OFF) option(LLVM_STATIC "Link with LLVM static libraries" OFF)
endif() endif()
mark_as_advanced(LLVM_STATIC) mark_as_advanced(LLVM_STATIC)
option(WITH_CLANG "Use Clang" OFF)
# disable for now, but plan to support on all platforms eventually # disable for now, but plan to support on all platforms eventually
option(WITH_MEM_JEMALLOC "Enable malloc replacement (http://www.canonware.com/jemalloc)" ON) option(WITH_MEM_JEMALLOC "Enable malloc replacement (http://www.canonware.com/jemalloc)" ON)
@@ -614,7 +614,6 @@ endif()
if(UNIX) if(UNIX)
# See WITH_WINDOWS_SCCACHE for Windows. # See WITH_WINDOWS_SCCACHE for Windows.
option(WITH_COMPILER_CCACHE "Use ccache to improve rebuild times (Works with Ninja, Makefiles and Xcode)" OFF) option(WITH_COMPILER_CCACHE "Use ccache to improve rebuild times (Works with Ninja, Makefiles and Xcode)" OFF)
mark_as_advanced(WITH_COMPILER_CCACHE)
endif() endif()
# The following only works with the Ninja generator in CMake >= 3.0. # The following only works with the Ninja generator in CMake >= 3.0.
@@ -729,9 +728,8 @@ set_and_warn_dependency(WITH_TBB WITH_MOD_FLUID OFF)
# NanoVDB requires OpenVDB to convert the data structure # NanoVDB requires OpenVDB to convert the data structure
set_and_warn_dependency(WITH_OPENVDB WITH_NANOVDB OFF) set_and_warn_dependency(WITH_OPENVDB WITH_NANOVDB OFF)
# OpenVDB and OpenColorIO uses 'half' type from OpenEXR # OpenVDB uses 'half' type from OpenEXR & fails to link without OpenEXR enabled.
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENVDB OFF) set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENVDB OFF)
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENCOLORIO OFF)
# Haru needs `TIFFFaxBlackCodes` & `TIFFFaxWhiteCodes` symbols from TIFF. # Haru needs `TIFFFaxBlackCodes` & `TIFFFaxWhiteCodes` symbols from TIFF.
set_and_warn_dependency(WITH_IMAGE_TIFF WITH_HARU OFF) set_and_warn_dependency(WITH_IMAGE_TIFF WITH_HARU OFF)
@@ -743,7 +741,6 @@ if(WITH_CYCLES)
# auto enable llvm for cycles_osl # auto enable llvm for cycles_osl
if(WITH_CYCLES_OSL) if(WITH_CYCLES_OSL)
set(WITH_LLVM ON CACHE BOOL "" FORCE) set(WITH_LLVM ON CACHE BOOL "" FORCE)
set(WITH_CLANG ON CACHE BOOL "" FORCE)
endif() endif()
else() else()
set(WITH_CYCLES_OSL OFF) set(WITH_CYCLES_OSL OFF)
@@ -777,6 +774,14 @@ if(WITH_GHOST_SDL OR WITH_HEADLESS)
set(WITH_XR_OPENXR OFF) set(WITH_XR_OPENXR OFF)
endif() endif()
if(WITH_CPU_SSE)
TEST_SSE_SUPPORT(COMPILER_SSE_FLAG COMPILER_SSE2_FLAG)
else()
message(STATUS "SSE and SSE2 optimizations are DISABLED!")
set(COMPILER_SSE_FLAG)
set(COMPILER_SSE2_FLAG)
endif()
if(WITH_BUILDINFO) if(WITH_BUILDINFO)
find_package(Git) find_package(Git)
if(NOT GIT_FOUND) if(NOT GIT_FOUND)
@@ -825,8 +830,8 @@ if(WITH_PYTHON)
# Do this before main 'platform_*' checks, # Do this before main 'platform_*' checks,
# because UNIX will search for the old Python paths which may not exist. # because UNIX will search for the old Python paths which may not exist.
# giving errors about missing paths before this case is met. # giving errors about missing paths before this case is met.
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.9") if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.7")
message(FATAL_ERROR "At least Python 3.9 is required to build") message(FATAL_ERROR "At least Python 3.7 is required to build")
endif() endif()
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/scripts/addons") file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/scripts/addons")
@@ -901,18 +906,6 @@ if(NOT CMAKE_BUILD_TYPE MATCHES "Release")
endif() endif()
endif() endif()
# Test SIMD support, before platform includes to determine if sse2neon is needed.
if(WITH_CPU_SIMD)
set(COMPILER_SSE_FLAG)
set(COMPILER_SSE2_FLAG)
# Test Neon first since macOS Arm can compile and run x86-64 SSE binaries.
TEST_NEON_SUPPORT()
if(NOT SUPPORT_NEON_BUILD)
TEST_SSE_SUPPORT(COMPILER_SSE_FLAG COMPILER_SSE2_FLAG)
endif()
endif()
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# Main Platform Checks # Main Platform Checks
# #
@@ -968,50 +961,23 @@ if(WITH_INTERNATIONAL)
endif() endif()
endif() endif()
# Enable SIMD support if detected by TEST_SSE_SUPPORT() or TEST_NEON_SUPPORT(). # See TEST_SSE_SUPPORT() for how this is defined.
#
# This is done globally, so that all modules can use it if available, and # Do it globally, SSE2 is required for quite some time now.
# because these are used in headers used by many modules. # Doing it now allows to use SSE/SSE2 in inline headers.
if(WITH_CPU_SIMD) if(SUPPORT_SSE_BUILD)
if(SUPPORT_NEON_BUILD)
# Neon
if(SSE2NEON_FOUND)
blender_include_dirs_sys("${SSE2NEON_INCLUDE_DIRS}")
add_definitions(-DWITH_SSE2NEON)
endif()
else()
# SSE
if(SUPPORT_SSE_BUILD)
string(PREPEND PLATFORM_CFLAGS "${COMPILER_SSE_FLAG} ") string(PREPEND PLATFORM_CFLAGS "${COMPILER_SSE_FLAG} ")
add_definitions(-D__SSE__ -D__MMX__) add_definitions(-D__SSE__ -D__MMX__)
endif() endif()
if(SUPPORT_SSE2_BUILD) if(SUPPORT_SSE2_BUILD)
string(APPEND PLATFORM_CFLAGS " ${COMPILER_SSE2_FLAG}") string(APPEND PLATFORM_CFLAGS " ${COMPILER_SSE2_FLAG}")
add_definitions(-D__SSE2__) add_definitions(-D__SSE2__)
if(NOT SUPPORT_SSE_BUILD) # don't double up if(NOT SUPPORT_SSE_BUILD) # don't double up
add_definitions(-D__MMX__) add_definitions(-D__MMX__)
endif() endif()
endif()
endif()
# Print instructions used
if(SUPPORT_NEON_BUILD)
if(SSE2NEON_FOUND)
message(STATUS "Neon SIMD instructions enabled")
else()
message(STATUS "Neon SIMD instructions detected but unused, requires sse2neon")
endif()
elseif(SUPPORT_SSE2_BUILD)
message(STATUS "SSE2 SIMD instructions enabled")
elseif(SUPPORT_SSE_BUILD)
message(STATUS "SSE SIMD instructions enabled")
else()
message(STATUS "No SIMD instructions detected")
endif()
else()
message(STATUS "SIMD instructions disabled")
endif() endif()
# set the endian define # set the endian define
if(MSVC) if(MSVC)
# for some reason this fails on msvc # for some reason this fails on msvc
@@ -1056,9 +1022,6 @@ if(WITH_OPENVDB)
list(APPEND OPENVDB_DEFINITIONS -DOPENVDB_3_ABI_COMPATIBLE) list(APPEND OPENVDB_DEFINITIONS -DOPENVDB_3_ABI_COMPATIBLE)
endif() endif()
# OpenVDB headers use deprecated TBB headers, silence warning.
list(APPEND OPENVDB_DEFINITIONS -DTBB_SUPPRESS_DEPRECATED_MESSAGES=1)
list(APPEND OPENVDB_INCLUDE_DIRS list(APPEND OPENVDB_INCLUDE_DIRS
${BOOST_INCLUDE_DIR} ${BOOST_INCLUDE_DIR}
${TBB_INCLUDE_DIRS} ${TBB_INCLUDE_DIRS}
@@ -1221,7 +1184,6 @@ if(WITH_OPENMP)
if(NOT WITH_OPENMP_STATIC) if(NOT WITH_OPENMP_STATIC)
string(APPEND CMAKE_C_FLAGS " ${OpenMP_C_FLAGS}") string(APPEND CMAKE_C_FLAGS " ${OpenMP_C_FLAGS}")
string(APPEND CMAKE_CXX_FLAGS " ${OpenMP_CXX_FLAGS}") string(APPEND CMAKE_CXX_FLAGS " ${OpenMP_CXX_FLAGS}")
string(APPEND CMAKE_EXE_LINKER_FLAGS " ${OpenMP_LINKER_FLAGS}")
else() else()
# Typically avoid adding flags as defines but we can't # Typically avoid adding flags as defines but we can't
# pass OpenMP flags to the linker for static builds, meaning # pass OpenMP flags to the linker for static builds, meaning
@@ -1475,7 +1437,6 @@ if(CMAKE_COMPILER_IS_GNUCC)
# gcc 4.2 gives annoying warnings on every file with this # gcc 4.2 gives annoying warnings on every file with this
if(NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "4.3") if(NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "4.3")
ADD_CHECK_C_COMPILER_FLAG(C_WARNINGS C_WARN_UNINITIALIZED -Wuninitialized) ADD_CHECK_C_COMPILER_FLAG(C_WARNINGS C_WARN_UNINITIALIZED -Wuninitialized)
ADD_CHECK_CXX_COMPILER_FLAG(CXX_WARNINGS CXX_WARN_UNINITIALIZED -Wuninitialized)
endif() endif()
# versions before gcc4.6 give many BLI_math warnings # versions before gcc4.6 give many BLI_math warnings
@@ -1540,13 +1501,11 @@ if(CMAKE_COMPILER_IS_GNUCC)
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_FORMAT -Wno-format) ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_FORMAT -Wno-format)
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_SWITCH -Wno-switch) ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_SWITCH -Wno-switch)
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_UNUSED_VARIABLE -Wno-unused-variable) ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_UNUSED_VARIABLE -Wno-unused-variable)
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_UNUSED_VARIABLE -Wno-uninitialized)
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_CLASS_MEMACCESS -Wno-class-memaccess) ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_CLASS_MEMACCESS -Wno-class-memaccess)
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_COMMENT -Wno-comment) ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_COMMENT -Wno-comment)
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_TYPEDEFS -Wno-unused-local-typedefs) ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_TYPEDEFS -Wno-unused-local-typedefs)
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_VARIABLE -Wno-unused-variable) ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_VARIABLE -Wno-unused-variable)
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_VARIABLE -Wno-uninitialized)
if(CMAKE_COMPILER_IS_GNUCC AND (NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "7.0")) if(CMAKE_COMPILER_IS_GNUCC AND (NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "7.0"))
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_IMPLICIT_FALLTHROUGH -Wno-implicit-fallthrough) ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_IMPLICIT_FALLTHROUGH -Wno-implicit-fallthrough)
@@ -1890,7 +1849,6 @@ if(FIRST_RUN)
info_cfg_text("Build Options:") info_cfg_text("Build Options:")
info_cfg_option(WITH_ALEMBIC) info_cfg_option(WITH_ALEMBIC)
info_cfg_option(WITH_BULLET) info_cfg_option(WITH_BULLET)
info_cfg_option(WITH_CLANG)
info_cfg_option(WITH_CYCLES) info_cfg_option(WITH_CYCLES)
info_cfg_option(WITH_FFTW3) info_cfg_option(WITH_FFTW3)
info_cfg_option(WITH_FREESTYLE) info_cfg_option(WITH_FREESTYLE)

View File

@@ -26,31 +26,26 @@
define HELP_TEXT define HELP_TEXT
Blender Convenience Targets Convenience Targets
Provided for building Blender, (multiple at once can be used). Provided for building Blender, (multiple at once can be used).
* debug: Build a debug binary. * debug: Build a debug binary.
* full: Enable all supported dependencies & options. * full: Enable all supported dependencies & options.
* lite: Disable non essential features for a smaller binary and faster build. * lite: Disable non essential features for a smaller binary and faster build.
* release: Complete build with all options enabled including CUDA and Optix, matching the releases on blender.org * release Complete build with all options enabled including CUDA and Optix, matching the releases on blender.org
* headless: Build without an interface (renderfarm or server automation). * headless: Build without an interface (renderfarm or server automation).
* cycles: Build Cycles standalone only, without Blender. * cycles: Build Cycles standalone only, without Blender.
* bpy: Build as a python module which can be loaded from python directly. * bpy: Build as a python module which can be loaded from python directly.
* deps: Build library dependencies (intended only for platform maintainers).
* developer: Enable faster builds, error checking and tests, recommended for developers. * developer: Enable faster builds, error checking and tests, recommended for developers.
* config: Run cmake configuration tool to set build options.
* ninja: Use ninja build tool for faster builds. * ninja: Use ninja build tool for faster builds.
* ccache: Use ccache for faster rebuilds. * ccache: Use ccache for faster rebuilds.
Note: passing the argument 'BUILD_DIR=path' when calling make will override the default build dir. Note: passing the argument 'BUILD_DIR=path' when calling make will override the default build dir.
Note: passing the argument 'BUILD_CMAKE_ARGS=args' lets you add cmake arguments. Note: passing the argument 'BUILD_CMAKE_ARGS=args' lets you add cmake arguments.
Other Convenience Targets
Provided for other building operations.
* config: Run cmake configuration tool to set build options.
* deps: Build library dependencies (intended only for platform maintainers).
The existance of locally build dependancies overrides the pre-built dependencies from subversion.
These must be manually removed from '../lib/' to go back to using the pre-compiled libraries.
Project Files Project Files
Generate project files for development environments. Generate project files for development environments.
@@ -90,15 +85,12 @@ Static Source Code Checking
* check_descriptions: Check for duplicate/invalid descriptions. * check_descriptions: Check for duplicate/invalid descriptions.
Spell Checkers Spell Checkers
This runs the spell checker from the developer tools repositor.
* check_spelling_c: Check for spelling errors (C/C++ only), * check_spelling_c: Check for spelling errors (C/C++ only),
* check_spelling_osl: Check for spelling errors (OSL only). * check_spelling_osl: Check for spelling errors (OSL only).
* check_spelling_py: Check for spelling errors (Python only). * check_spelling_py: Check for spelling errors (Python only).
Note: an additional word-list is maintained at: 'source/tools/check_source/check_spelling_c_config.py' Note that spell checkers can take a 'CHECK_SPELLING_CACHE' filepath argument,
Note: that spell checkers can take a 'CHECK_SPELLING_CACHE' filepath argument,
so re-running does not need to re-check unchanged files. so re-running does not need to re-check unchanged files.
Example: Example:
@@ -131,7 +123,7 @@ Utilities
* update: * update:
updates git and all submodules updates git and all submodules
* format: * format
Format source code using clang (uses PATHS if passed in). For example:: Format source code using clang (uses PATHS if passed in). For example::
make format PATHS="source/blender/blenlib source/blender/blenkernel" make format PATHS="source/blender/blenlib source/blender/blenkernel"

View File

@@ -31,7 +31,6 @@
# #
# MAC OS X USAGE: # MAC OS X USAGE:
# Install with homebrew: brew install autoconf automake bison cmake libtool pkg-config yasm # Install with homebrew: brew install autoconf automake bison cmake libtool pkg-config yasm
# Additional requirements for macOS arm64: brew install flex
# Run "make deps" from main Blender directory # Run "make deps" from main Blender directory
# #
# LINUX USAGE: # LINUX USAGE:
@@ -71,7 +70,9 @@ include(cmake/cuew.cmake)
include(cmake/opensubdiv.cmake) include(cmake/opensubdiv.cmake)
include(cmake/sdl.cmake) include(cmake/sdl.cmake)
include(cmake/opencollada.cmake) include(cmake/opencollada.cmake)
include(cmake/opencolorio.cmake)
include(cmake/llvm.cmake) include(cmake/llvm.cmake)
include(cmake/clang.cmake)
if(APPLE) if(APPLE)
include(cmake/openmp.cmake) include(cmake/openmp.cmake)
endif() endif()
@@ -86,40 +87,36 @@ include(cmake/tbb.cmake)
include(cmake/openvdb.cmake) include(cmake/openvdb.cmake)
include(cmake/nanovdb.cmake) include(cmake/nanovdb.cmake)
include(cmake/python.cmake) include(cmake/python.cmake)
option(USE_PIP_NUMPY "Install NumPy using pip wheel instead of building from source" OFF)
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
set(USE_PIP_NUMPY ON)
else()
include(cmake/numpy.cmake)
endif()
include(cmake/python_site_packages.cmake) include(cmake/python_site_packages.cmake)
include(cmake/package_python.cmake) include(cmake/package_python.cmake)
include(cmake/numpy.cmake)
include(cmake/usd.cmake) include(cmake/usd.cmake)
include(cmake/potrace.cmake) include(cmake/potrace.cmake)
include(cmake/haru.cmake) include(cmake/haru.cmake)
# Boost needs to be included after python.cmake due to the PYTHON_BINARY variable being needed. # Boost needs to be included after python.cmake due to the PYTHON_BINARY variable being needed.
include(cmake/boost.cmake) include(cmake/boost.cmake)
include(cmake/pugixml.cmake) include(cmake/pugixml.cmake)
include(cmake/ispc.cmake) if((NOT APPLE) OR ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
include(cmake/openimagedenoise.cmake) include(cmake/ispc.cmake)
include(cmake/embree.cmake) include(cmake/openimagedenoise.cmake)
include(cmake/embree.cmake)
endif()
if(NOT APPLE) if(NOT APPLE)
include(cmake/xr_openxr.cmake) include(cmake/xr_openxr.cmake)
endif() endif()
# OpenColorIO and dependencies.
include(cmake/expat.cmake)
include(cmake/yamlcpp.cmake)
include(cmake/opencolorio.cmake)
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
include(cmake/sse2neon.cmake)
endif()
if(WITH_WEBP) if(WITH_WEBP)
include(cmake/webp.cmake) include(cmake/webp.cmake)
endif() endif()
if(WIN32)
# OCIO deps
include(cmake/tinyxml.cmake)
include(cmake/yamlcpp.cmake)
# LCMS is an OCIO dep, but only if you build the apps, leaving it here for convenience
# include(cmake/lcms.cmake)
endif()
if(NOT WIN32 OR ENABLE_MINGW64) if(NOT WIN32 OR ENABLE_MINGW64)
include(cmake/gmp.cmake) include(cmake/gmp.cmake)
include(cmake/openjpeg.cmake) include(cmake/openjpeg.cmake)

View File

@@ -19,6 +19,16 @@
set(ALEMBIC_EXTRA_ARGS set(ALEMBIC_EXTRA_ARGS
-DBUILDSTATIC=ON -DBUILDSTATIC=ON
-DLINKSTATIC=ON -DLINKSTATIC=ON
-DALEMBIC_LIB_USES_BOOST=ON
-DBoost_COMPILER:STRING=${BOOST_COMPILER_STRING}
-DBoost_USE_MULTITHREADED=ON
-DUSE_STATIC_BOOST=On
-DBoost_USE_STATIC_LIBS=ON
-DBoost_USE_STATIC_RUNTIME=OFF
-DBoost_DEBUG=ON
-DBOOST_ROOT=${LIBDIR}/boost
-DBoost_NO_SYSTEM_PATHS=ON
-DBoost_NO_BOOST_CMAKE=ON
-DILMBASE_ROOT=${LIBDIR}/openexr -DILMBASE_ROOT=${LIBDIR}/openexr
-DALEMBIC_ILMBASE_INCLUDE_DIRECTORY=${LIBDIR}/openexr/include/OpenEXR -DALEMBIC_ILMBASE_INCLUDE_DIRECTORY=${LIBDIR}/openexr/include/OpenEXR
-DALEMBIC_ILMBASE_HALF_LIB=${LIBDIR}/openexr/lib/${LIBPREFIX}Half${OPENEXR_VERSION_POSTFIX}${LIBEXT} -DALEMBIC_ILMBASE_HALF_LIB=${LIBDIR}/openexr/lib/${LIBPREFIX}Half${OPENEXR_VERSION_POSTFIX}${LIBEXT}
@@ -71,6 +81,7 @@ endif()
add_dependencies( add_dependencies(
external_alembic external_alembic
external_boost
external_zlib external_zlib
external_openexr external_openexr
) )

View File

@@ -23,7 +23,7 @@ if(WIN32)
set(BOOST_COMPILER_STRING -vc141) set(BOOST_COMPILER_STRING -vc141)
set(BOOST_CONFIGURE_COMMAND bootstrap.bat) set(BOOST_CONFIGURE_COMMAND bootstrap.bat)
set(BOOST_BUILD_COMMAND b2) set(BOOST_BUILD_COMMAND bjam)
set(BOOST_BUILD_OPTIONS runtime-link=shared ) set(BOOST_BUILD_OPTIONS runtime-link=shared )
set(BOOST_HARVEST_CMD ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/boost/lib/ ${HARVEST_TARGET}/boost/lib/ ) set(BOOST_HARVEST_CMD ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/boost/lib/ ${HARVEST_TARGET}/boost/lib/ )
if(BUILD_MODE STREQUAL Release) if(BUILD_MODE STREQUAL Release)

View File

@@ -33,16 +33,6 @@ if(UNIX)
yasm yasm
) )
if(NOT APPLE)
set(_required_software
${_required_software}
# Needed for Mesa.
meson
ninja
)
endif()
foreach(_software ${_required_software}) foreach(_software ${_required_software})
find_program(_software_find NAMES ${_software}) find_program(_software_find NAMES ${_software})
if(NOT _software_find) if(NOT _software_find)
@@ -70,7 +60,7 @@ if(UNIX)
" ${_software_missing}\n" " ${_software_missing}\n"
"\n" "\n"
"On Debian and Ubuntu:\n" "On Debian and Ubuntu:\n"
" apt install autoconf automake libtool yasm tcl ninja-build meson python3-mako\n" " apt install autoconf automake libtool yasm tcl\n"
"\n" "\n"
"On macOS (with homebrew):\n" "On macOS (with homebrew):\n"
" brew install autoconf automake bison libtool pkg-config yasm\n" " brew install autoconf automake bison libtool pkg-config yasm\n"

View File

@@ -0,0 +1,104 @@
# ***** BEGIN GPL LICENSE BLOCK *****
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENSE BLOCK *****
set(CLANG_EXTRA_ARGS
-DLLVM_DIR="${LIBDIR}/llvm/lib/cmake/llvm/"
-DLLVM_USE_CRT_RELEASE=MD
-DLLVM_USE_CRT_DEBUG=MDd
-DLLVM_CONFIG=${LIBDIR}/llvm/bin/llvm-config
)
set(BUILD_CLANG_TOOLS OFF)
if(WIN32)
set(CLANG_GENERATOR "Ninja")
else()
set(CLANG_GENERATOR "Unix Makefiles")
endif()
if(APPLE)
set(BUILD_CLANG_TOOLS ON)
set(CLANG_EXTRA_ARGS ${CLANG_EXTRA_ARGS}
-DLIBXML2_LIBRARY=${LIBDIR}/xml2/lib/libxml2.a
)
endif()
if(BUILD_CLANG_TOOLS)
# ExternalProject_Add does not allow multiple tarballs to be
# downloaded. Work around this by having an empty build action
# for the extra tools, and referring the clang build to the location
# of the clang-tools-extra source.
ExternalProject_Add(external_clang_tools
URL ${CLANG_TOOLS_URI}
DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH MD5=${CLANG_TOOLS_HASH}
INSTALL_DIR ${LIBDIR}/clang_tools
PREFIX ${BUILD_DIR}/clang_tools
CONFIGURE_COMMAND echo "."
BUILD_COMMAND echo "."
INSTALL_COMMAND echo "."
)
list(APPEND CLANG_EXTRA_ARGS
-DLLVM_EXTERNAL_CLANG_TOOLS_EXTRA_SOURCE_DIR=${BUILD_DIR}/clang_tools/src/external_clang_tools/
)
endif()
ExternalProject_Add(external_clang
URL ${CLANG_URI}
DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH MD5=${CLANG_HASH}
PREFIX ${BUILD_DIR}/clang
CMAKE_GENERATOR ${CLANG_GENERATOR}
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/clang ${DEFAULT_CMAKE_FLAGS} ${CLANG_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/clang
)
if(MSVC)
if(BUILD_MODE STREQUAL Release)
set(CLANG_HARVEST_COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/clang/ ${HARVEST_TARGET}/llvm/)
else()
set(CLANG_HARVEST_COMMAND
${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/clang/lib/ ${HARVEST_TARGET}/llvm/debug/lib/
)
endif()
ExternalProject_Add_Step(external_clang after_install
COMMAND ${CLANG_HARVEST_COMMAND}
DEPENDEES mkdir update patch download configure build install
)
endif()
add_dependencies(
external_clang
ll
)
if(BUILD_CLANG_TOOLS)
# `external_clang_tools` is for downloading the source, not compiling it.
add_dependencies(
external_clang
external_clang_tools
)
endif()
# We currently do not build libxml2 on Windows.
if(NOT WIN32)
add_dependencies(
external_clang
external_xml2
)
endif()

View File

@@ -29,7 +29,6 @@ set(EMBREE_EXTRA_ARGS
-DEMBREE_MAX_ISA=AVX2 -DEMBREE_MAX_ISA=AVX2
-DEMBREE_TASKING_SYSTEM=TBB -DEMBREE_TASKING_SYSTEM=TBB
-DEMBREE_TBB_ROOT=${LIBDIR}/tbb -DEMBREE_TBB_ROOT=${LIBDIR}/tbb
-DTBB_ROOT=${LIBDIR}/tbb
-DTBB_STATIC_LIB=${TBB_STATIC_LIBRARY} -DTBB_STATIC_LIB=${TBB_STATIC_LIBRARY}
) )
@@ -47,17 +46,7 @@ else()
set(EMBREE_BUILD_DIR) set(EMBREE_BUILD_DIR)
endif() endif()
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")) ExternalProject_Add(external_embree
ExternalProject_Add(external_embree
GIT_REPOSITORY ${EMBREE_ARM_GIT}
GIT_TAG "blender-arm"
DOWNLOAD_DIR ${DOWNLOAD_DIR}
PREFIX ${BUILD_DIR}/embree
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/embree
)
else()
ExternalProject_Add(external_embree
URL ${EMBREE_URI} URL ${EMBREE_URI}
DOWNLOAD_DIR ${DOWNLOAD_DIR} DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH MD5=${EMBREE_HASH} URL_HASH MD5=${EMBREE_HASH}
@@ -65,8 +54,7 @@ else()
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/embree/src/external_embree < ${PATCH_DIR}/embree.diff PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/embree/src/external_embree < ${PATCH_DIR}/embree.diff
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS} CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/embree INSTALL_DIR ${LIBDIR}/embree
) )
endif()
add_dependencies( add_dependencies(
external_embree external_embree

View File

@@ -97,21 +97,18 @@ harvest(jemalloc/lib jemalloc/lib "*.a")
harvest(jpg/include jpeg/include "*.h") harvest(jpg/include jpeg/include "*.h")
harvest(jpg/lib jpeg/lib "libjpeg.a") harvest(jpg/lib jpeg/lib "libjpeg.a")
harvest(lame/lib ffmpeg/lib "*.a") harvest(lame/lib ffmpeg/lib "*.a")
harvest(llvm/bin llvm/bin "clang-format") harvest(clang/bin llvm/bin "clang-format")
if(BUILD_CLANG_TOOLS) if(BUILD_CLANG_TOOLS)
harvest(llvm/bin llvm/bin "clang-tidy") harvest(clang/bin llvm/bin "clang-tidy")
harvest(llvm/share/clang llvm/share "run-clang-tidy.py") harvest(clang/share/clang llvm/share "run-clang-tidy.py")
endif() endif()
harvest(clang/include llvm/include "*")
harvest(llvm/include llvm/include "*") harvest(llvm/include llvm/include "*")
harvest(llvm/bin llvm/bin "llvm-config") harvest(llvm/bin llvm/bin "llvm-config")
harvest(llvm/lib llvm/lib "libLLVM*.a") harvest(llvm/lib llvm/lib "libLLVM*.a")
harvest(llvm/lib llvm/lib "libclang*.a")
if(APPLE) if(APPLE)
harvest(openmp/lib openmp/lib "*") harvest(openmp/lib openmp/lib "*")
harvest(openmp/include openmp/include "*.h") harvest(openmp/include openmp/include "*.h")
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
harvest(sse2neon sse2neon "*.h")
endif()
endif() endif()
harvest(ogg/lib ffmpeg/lib "*.a") harvest(ogg/lib ffmpeg/lib "*.a")
harvest(openal/include openal/include "*.h") harvest(openal/include openal/include "*.h")
@@ -142,10 +139,12 @@ harvest(openimageio/bin openimageio/bin "maketx")
harvest(openimageio/bin openimageio/bin "oiiotool") harvest(openimageio/bin openimageio/bin "oiiotool")
harvest(openimageio/include openimageio/include "*") harvest(openimageio/include openimageio/include "*")
harvest(openimageio/lib openimageio/lib "*.a") harvest(openimageio/lib openimageio/lib "*.a")
harvest(openimagedenoise/include openimagedenoise/include "*") if((NOT APPLE) OR ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
harvest(openimagedenoise/lib openimagedenoise/lib "*.a") harvest(openimagedenoise/include openimagedenoise/include "*")
harvest(embree/include embree/include "*.h") harvest(openimagedenoise/lib openimagedenoise/lib "*.a")
harvest(embree/lib embree/lib "*.a") harvest(embree/include embree/include "*.h")
harvest(embree/lib embree/lib "*.a")
endif()
harvest(openjpeg/include/openjpeg-2.3 openjpeg/include "*.h") harvest(openjpeg/include/openjpeg-2.3 openjpeg/include "*.h")
harvest(openjpeg/lib openjpeg/lib "*.a") harvest(openjpeg/lib openjpeg/lib "*.a")
harvest(opensubdiv/include opensubdiv/include "*.h") harvest(opensubdiv/include opensubdiv/include "*.h")
@@ -158,12 +157,12 @@ harvest(xr_openxr_sdk/lib xr_openxr_sdk/lib "*.a")
harvest(osl/bin osl/bin "oslc") harvest(osl/bin osl/bin "oslc")
harvest(osl/include osl/include "*.h") harvest(osl/include osl/include "*.h")
harvest(osl/lib osl/lib "*.a") harvest(osl/lib osl/lib "*.a")
harvest(osl/share/OSL/shaders osl/share/OSL/shaders "*.h") harvest(osl/shaders osl/shaders "*.h")
harvest(png/include png/include "*.h") harvest(png/include png/include "*.h")
harvest(png/lib png/lib "*.a") harvest(png/lib png/lib "*.a")
harvest(pugixml/include pugixml/include "*.hpp") harvest(pugixml/include pugixml/include "*.hpp")
harvest(pugixml/lib pugixml/lib "*.a") harvest(pugixml/lib pugixml/lib "*.a")
harvest(python/bin python/bin "python${PYTHON_SHORT_VERSION}") harvest(python/bin python/bin "python${PYTHON_SHORT_VERSION}m")
harvest(python/include python/include "*h") harvest(python/include python/include "*h")
harvest(python/lib python/lib "*") harvest(python/lib python/lib "*")
harvest(sdl/include/SDL2 sdl/include "*.h") harvest(sdl/include/SDL2 sdl/include "*.h")
@@ -193,7 +192,7 @@ harvest(haru/lib haru/lib "*.a")
if(UNIX AND NOT APPLE) if(UNIX AND NOT APPLE)
harvest(libglu/lib mesa/lib "*.so*") harvest(libglu/lib mesa/lib "*.so*")
harvest(mesa/lib64 mesa/lib "*.so*") harvest(mesa/lib mesa/lib "*.so*")
endif() endif()
endif() endif()

View File

@@ -21,42 +21,37 @@ if(WIN32)
-DFLEX_EXECUTABLE=${LIBDIR}/flexbison/win_flex.exe -DFLEX_EXECUTABLE=${LIBDIR}/flexbison/win_flex.exe
-DBISON_EXECUTABLE=${LIBDIR}/flexbison/win_bison.exe -DBISON_EXECUTABLE=${LIBDIR}/flexbison/win_bison.exe
-DM4_EXECUTABLE=${DOWNLOAD_DIR}/mingw/mingw64/msys/1.0/bin/m4.exe -DM4_EXECUTABLE=${DOWNLOAD_DIR}/mingw/mingw64/msys/1.0/bin/m4.exe
-DARM_ENABLED=Off
) )
elseif(APPLE) elseif(APPLE)
# Use bison and flex installed via Homebrew. # Use bison installed via Homebrew.
# The ones that come with Xcode toolset are too old. # The one which comes which Xcode toolset is too old.
if("${CMAKE_HOST_SYSTEM_PROCESSOR}" STREQUAL "arm64") if("${CMAKE_HOST_SYSTEM_PROCESSOR}" STREQUAL "arm64")
set(ISPC_EXTRA_ARGS_APPLE set(HOMEBREW_LOCATION "/opt/homebrew")
-DBISON_EXECUTABLE=/opt/homebrew/opt/bison/bin/bison
-DFLEX_EXECUTABLE=/opt/homebrew/opt/flex/bin/flex
-DARM_ENABLED=On
)
else() else()
set(ISPC_EXTRA_ARGS_APPLE set(HOMEBREW_LOCATION "/usr/local")
-DBISON_EXECUTABLE=/usr/local/opt/bison/bin/bison
-DARM_ENABLED=Off
)
endif() endif()
set(ISPC_EXTRA_ARGS_APPLE
-DBISON_EXECUTABLE=${HOMEBREW_LOCATION}/opt/bison/bin/bison
)
elseif(UNIX) elseif(UNIX)
set(ISPC_EXTRA_ARGS_UNIX set(ISPC_EXTRA_ARGS_UNIX
-DCMAKE_C_COMPILER=${LIBDIR}/llvm/bin/clang -DCMAKE_C_COMPILER=${LIBDIR}/clang/bin/clang
-DCMAKE_CXX_COMPILER=${LIBDIR}/llvm/bin/clang++ -DCMAKE_CXX_COMPILER=${LIBDIR}/clang/bin/clang++
-DARM_ENABLED=Off
) )
endif() endif()
set(ISPC_EXTRA_ARGS set(ISPC_EXTRA_ARGS
-DARM_ENABLED=Off
-DISPC_NO_DUMPS=On -DISPC_NO_DUMPS=On
-DISPC_INCLUDE_EXAMPLES=Off -DISPC_INCLUDE_EXAMPLES=Off
-DISPC_INCLUDE_TESTS=Off -DISPC_INCLUDE_TESTS=Off
-DLLVM_ROOT=${LIBDIR}/llvm/lib/cmake/llvm -DLLVM_ROOT=${LIBDIR}/llvm/lib/cmake/llvm
-DLLVM_LIBRARY_DIR=${LIBDIR}/llvm/lib -DLLVM_LIBRARY_DIR=${LIBDIR}/llvm/lib
-DCLANG_EXECUTABLE=${LIBDIR}/llvm/bin/clang -DCLANG_EXECUTABLE=${LIBDIR}/clang/bin/clang
-DCLANGPP_EXECUTABLE=${LIBDIR}/llvm/bin/clang++ -DCLANGPP_EXECUTABLE=${LIBDIR}/clang/bin/clang++
-DISPC_INCLUDE_TESTS=Off -DISPC_INCLUDE_TESTS=Off
-DCLANG_LIBRARY_DIR=${LIBDIR}/llvm/lib -DCLANG_LIBRARY_DIR=${LIBDIR}/clang/lib
-DCLANG_INCLUDE_DIRS=${LIBDIR}/llvm/include -DCLANG_INCLUDE_DIRS=${LIBDIR}/clang/include
${ISPC_EXTRA_ARGS_WIN} ${ISPC_EXTRA_ARGS_WIN}
${ISPC_EXTRA_ARGS_APPLE} ${ISPC_EXTRA_ARGS_APPLE}
${ISPC_EXTRA_ARGS_UNIX} ${ISPC_EXTRA_ARGS_UNIX}
@@ -75,6 +70,7 @@ ExternalProject_Add(external_ispc
add_dependencies( add_dependencies(
external_ispc external_ispc
ll ll
external_clang
) )
if(WIN32) if(WIN32)

View File

@@ -16,20 +16,16 @@
# #
# ***** END GPL LICENSE BLOCK ***** # ***** END GPL LICENSE BLOCK *****
set(EXPAT_EXTRA_ARGS set(LCMS_EXTRA_ARGS
-DEXPAT_BUILD_DOCS=OFF
-DEXPAT_BUILD_EXAMPLES=OFF
-DEXPAT_BUILD_TESTS=OFF
-DEXPAT_BUILD_TOOLS=OFF
-DEXPAT_SHARED_LIBS=OFF
) )
ExternalProject_Add(external_expat ExternalProject_Add(external_lcms
URL ${EXPAT_URI} URL ${LCMS_URI}
DOWNLOAD_DIR ${DOWNLOAD_DIR} DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH MD5=${EXPAT_HASH} URL_HASH MD5=${LCMS_HASH}
PREFIX ${BUILD_DIR}/expat PREFIX ${BUILD_DIR}/lcms
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/expat ${DEFAULT_CMAKE_FLAGS} ${EXPAT_EXTRA_ARGS} # Patch taken from ocio.
INSTALL_DIR ${LIBDIR}/expat PATCH_COMMAND ${CMAKE_COMMAND} -E copy ${PATCH_DIR}/cmakelists_lcms.txt ${BUILD_DIR}/lcms/src/external_lcms/CMakeLists.txt
SOURCE_SUBDIR expat CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/lcms ${DEFAULT_CMAKE_FLAGS} ${LCMS_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/lcms
) )

View File

@@ -17,20 +17,11 @@
# ***** END GPL LICENSE BLOCK ***** # ***** END GPL LICENSE BLOCK *****
if(APPLE AND "${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64") if(APPLE AND "${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
set(LLVM_TARGETS AArch64$<SEMICOLON>ARM) set(LLVM_TARGETS AArch64)
else() else()
set(LLVM_TARGETS X86) set(LLVM_TARGETS X86)
endif() endif()
if(APPLE)
set(LLVM_XML2_ARGS
-DLIBXML2_LIBRARY=${LIBDIR}/xml2/lib/libxml2.a
)
set(LLVM_BUILD_CLANG_TOOLS_EXTRA ^^clang-tools-extra)
set(BUILD_CLANG_TOOLS ON)
endif()
set(LLVM_EXTRA_ARGS set(LLVM_EXTRA_ARGS
-DLLVM_USE_CRT_RELEASE=MD -DLLVM_USE_CRT_RELEASE=MD
-DLLVM_USE_CRT_DEBUG=MDd -DLLVM_USE_CRT_DEBUG=MDd
@@ -40,8 +31,6 @@ set(LLVM_EXTRA_ARGS
-DLLVM_ENABLE_TERMINFO=OFF -DLLVM_ENABLE_TERMINFO=OFF
-DLLVM_BUILD_LLVM_C_DYLIB=OFF -DLLVM_BUILD_LLVM_C_DYLIB=OFF
-DLLVM_ENABLE_UNWIND_TABLES=OFF -DLLVM_ENABLE_UNWIND_TABLES=OFF
-DLLVM_ENABLE_PROJECTS=clang${LLVM_BUILD_CLANG_TOOLS_EXTRA}
${LLVM_XML2_ARGS}
) )
if(WIN32) if(WIN32)
@@ -56,9 +45,7 @@ ExternalProject_Add(ll
DOWNLOAD_DIR ${DOWNLOAD_DIR} DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH MD5=${LLVM_HASH} URL_HASH MD5=${LLVM_HASH}
CMAKE_GENERATOR ${LLVM_GENERATOR} CMAKE_GENERATOR ${LLVM_GENERATOR}
LIST_SEPARATOR ^^
PREFIX ${BUILD_DIR}/ll PREFIX ${BUILD_DIR}/ll
SOURCE_SUBDIR llvm
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/ll/src/ll < ${PATCH_DIR}/llvm.diff PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/ll/src/ll < ${PATCH_DIR}/llvm.diff
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/llvm ${DEFAULT_CMAKE_FLAGS} ${LLVM_EXTRA_ARGS} CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/llvm ${DEFAULT_CMAKE_FLAGS} ${LLVM_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/llvm INSTALL_DIR ${LIBDIR}/llvm
@@ -78,11 +65,3 @@ if(MSVC)
DEPENDEES mkdir update patch download configure build install DEPENDEES mkdir update patch download configure build install
) )
endif() endif()
# We currently do not build libxml2 on Windows.
if(APPLE)
add_dependencies(
ll
external_xml2
)
endif()

View File

@@ -20,36 +20,19 @@ set(MESA_CFLAGS "-static-libgcc")
set(MESA_CXXFLAGS "-static-libgcc -static-libstdc++ -Bstatic -lstdc++ -Bdynamic -l:libstdc++.a") set(MESA_CXXFLAGS "-static-libgcc -static-libstdc++ -Bstatic -lstdc++ -Bdynamic -l:libstdc++.a")
set(MESA_LDFLAGS "-L${LIBDIR}/zlib/lib -pthread -static-libgcc -static-libstdc++ -Bstatic -lstdc++ -Bdynamic -l:libstdc++.a -l:libz_pic.a") set(MESA_LDFLAGS "-L${LIBDIR}/zlib/lib -pthread -static-libgcc -static-libstdc++ -Bstatic -lstdc++ -Bdynamic -l:libstdc++.a -l:libz_pic.a")
# The 'native-file', used for overrides with the meson build system.
# meson does not provide a way to do this using command line arguments.
#
# Note that we can't output to "${BUILD_DIR}/mesa/src/external_mesa" as
# it doesn't exist when CMake first executes.
file(WRITE ${BUILD_DIR}/mesa/tmp/native-file.ini "\
[binaries]
llvm-config = '${LIBDIR}/llvm/bin/llvm-config'"
)
set(MESA_EXTRA_FLAGS set(MESA_EXTRA_FLAGS
-Dbuildtype=release CFLAGS=${MESA_CFLAGS}
-Dc_args=${MESA_CFLAGS} CXXFLAGS=${MESA_CXXFLAGS}
-Dcpp_args=${MESA_CXXFLAGS} LDFLAGS=${MESA_LDFLAGS}
-Dc_link_args=${MESA_LDFLAGS} --enable-glx=gallium-xlib
-Dcpp_link_args=${MESA_LDFLAGS} --with-gallium-drivers=swrast
-Dglx=gallium-xlib --disable-dri
-Dgallium-drivers=swrast --disable-gbm
-Ddri-drivers= --disable-egl
-Dvulkan-drivers= --disable-gles1
-Dgbm=disabled --disable-gles2
-Degl=disabled --disable-llvm-shared-libs
-Dgles1=disabled --with-llvm-prefix=${LIBDIR}/llvm
-Dgles2=disabled
-Dshared-llvm=disabled
# Without this, the build fails when: `wayland-scanner` is not found.
# At some point we will likely want to support Wayland.
# Disable for now since it's not officially supported.
-Dplatforms=x11
--native-file ${BUILD_DIR}/mesa/tmp/native-file.ini
) )
ExternalProject_Add(external_mesa ExternalProject_Add(external_mesa
@@ -59,9 +42,9 @@ ExternalProject_Add(external_mesa
PREFIX ${BUILD_DIR}/mesa PREFIX ${BUILD_DIR}/mesa
CONFIGURE_COMMAND ${CONFIGURE_ENV} && CONFIGURE_COMMAND ${CONFIGURE_ENV} &&
cd ${BUILD_DIR}/mesa/src/external_mesa/ && cd ${BUILD_DIR}/mesa/src/external_mesa/ &&
meson ${BUILD_DIR}/mesa/src/external_mesa-build --prefix=${LIBDIR}/mesa ${MESA_EXTRA_FLAGS} ${CONFIGURE_COMMAND_NO_TARGET} --prefix=${LIBDIR}/mesa ${MESA_EXTRA_FLAGS}
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa-build && ninja -j${MAKE_THREADS} BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa/ && make -j${MAKE_THREADS}
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa-build && ninja install INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa/ && make install
INSTALL_DIR ${LIBDIR}/mesa INSTALL_DIR ${LIBDIR}/mesa
) )

View File

@@ -22,17 +22,8 @@ ExternalProject_Add(external_nasm
URL_HASH SHA256=${NASM_HASH} URL_HASH SHA256=${NASM_HASH}
PREFIX ${BUILD_DIR}/nasm PREFIX ${BUILD_DIR}/nasm
PATCH_COMMAND ${PATCH_CMD} --verbose -p 1 -N -d ${BUILD_DIR}/nasm/src/external_nasm < ${PATCH_DIR}/nasm.diff PATCH_COMMAND ${PATCH_CMD} --verbose -p 1 -N -d ${BUILD_DIR}/nasm/src/external_nasm < ${PATCH_DIR}/nasm.diff
CONFIGURE_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && ./autogen.sh && ${CONFIGURE_COMMAND} --prefix=${LIBDIR}/nasm CONFIGURE_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && ${CONFIGURE_COMMAND} --prefix=${LIBDIR}/nasm
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make -j${MAKE_THREADS} && make manpages BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make -j${MAKE_THREADS}
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make install INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make install
INSTALL_DIR ${LIBDIR}/nasm INSTALL_DIR ${LIBDIR}/nasm
) )
if(UNIX)
# `touch nasm.1 ndisasm.1` helps to create the manual pages files, even when
# local `asciidoc` and `xmlto` packages are not installed.
ExternalProject_Add_Step(external_nasm after_configure
COMMAND ${CMAKE_COMMAND} -E touch ${BUILD_DIR}/nasm/src/external_nasm/nasm.1 ${BUILD_DIR}/nasm/src/external_nasm/ndisasm.1
DEPENDEES configure
)
endif()

View File

@@ -47,5 +47,4 @@ ExternalProject_Add(external_numpy
add_dependencies( add_dependencies(
external_numpy external_numpy
external_python external_python
external_python_site_packages
) )

View File

@@ -18,22 +18,16 @@
set(OPENCOLORIO_EXTRA_ARGS set(OPENCOLORIO_EXTRA_ARGS
-DOCIO_BUILD_APPS=OFF -DOCIO_BUILD_APPS=OFF
-DOCIO_BUILD_PYTHON=OFF -DOCIO_BUILD_PYGLUE=OFF
-DOCIO_BUILD_NUKE=OFF -DOCIO_BUILD_NUKE=OFF
-DOCIO_BUILD_JAVA=OFF -DOCIO_USE_BOOST_PTR=OFF
-DBUILD_SHARED_LIBS=OFF -DOCIO_BUILD_STATIC=ON
-DOCIO_BUILD_SHARED=OFF
-DOCIO_BUILD_TRUELIGHT=OFF
-DOCIO_BUILD_DOCS=OFF -DOCIO_BUILD_DOCS=OFF
-DOCIO_BUILD_TESTS=OFF -DOCIO_BUILD_PYGLUE=OFF
-DOCIO_BUILD_GPU_TESTS=OFF -DOCIO_BUILD_JNIGLUE=OFF
-DOCIO_USE_SSE=ON -DOCIO_STATIC_JNIGLUE=OFF
# Manually build ext packages except for pystring, which does not have
# a CMake or autotools build system that we can easily use.
-DOCIO_INSTALL_EXT_PACKAGES=MISSING
-DHalf_ROOT=${LIBDIR}/openexr
-DHalf_STATIC_LIBRARY=ON
-Dexpat_ROOT=${LIBDIR}/expat
-Dyaml-cpp_ROOT=${LIBDIR}/yamlcpp
) )
if(APPLE AND NOT("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64")) if(APPLE AND NOT("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
@@ -44,11 +38,30 @@ if(APPLE AND NOT("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
endif() endif()
if(WIN32) if(WIN32)
set(OCIO_PATCH opencolorio_win.diff)
set(OPENCOLORIO_EXTRA_ARGS set(OPENCOLORIO_EXTRA_ARGS
${OPENCOLORIO_EXTRA_ARGS} ${OPENCOLORIO_EXTRA_ARGS}
-DOCIO_BUILD_TESTS=OFF
-DOCIO_USE_SSE=ON
-DOCIO_INLINES_HIDDEN=OFF -DOCIO_INLINES_HIDDEN=OFF
-DOCIO_PYGLUE_LINK=OFF
-DOCIO_PYGLUE_RESPECT_ABI=OFF
-DOCIO_PYGLUE_SONAME=OFF
-DOCIO_PYGLUE_LIB_PREFIX=OFF
-DUSE_EXTERNAL_TINYXML=ON
-DTINYXML_INCLUDE_DIR=${LIBDIR}/tinyxml/include
-DTINYXML_LIBRARY=${LIBDIR}/tinyxml/lib/tinyxml${libext}
-DUSE_EXTERNAL_YAML=ON
-DYAML_CPP_FOUND=ON
-DYAML_CPP_VERSION=${YAMLCPP_VERSION}
-DUSE_EXTERNAL_LCMS=ON
-DINC_1=${LIBDIR}/tinyxml/include
-DINC_2=${LIBDIR}/yamlcpp/include
# Lie because ocio cmake is demanding boost even though it is not needed.
-DYAML_CPP_VERSION=0.5.0
) )
else() else()
set(OCIO_PATCH opencolorio.diff)
set(OPENCOLORIO_EXTRA_ARGS set(OPENCOLORIO_EXTRA_ARGS
${OPENCOLORIO_EXTRA_ARGS} ${OPENCOLORIO_EXTRA_ARGS}
) )
@@ -59,43 +72,48 @@ ExternalProject_Add(external_opencolorio
DOWNLOAD_DIR ${DOWNLOAD_DIR} DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH MD5=${OPENCOLORIO_HASH} URL_HASH MD5=${OPENCOLORIO_HASH}
PREFIX ${BUILD_DIR}/opencolorio PREFIX ${BUILD_DIR}/opencolorio
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/opencolorio/src/external_opencolorio < ${PATCH_DIR}/opencolorio.diff PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/opencolorio/src/external_opencolorio < ${PATCH_DIR}/${OCIO_PATCH}
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/opencolorio ${DEFAULT_CMAKE_FLAGS} ${OPENCOLORIO_EXTRA_ARGS} CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/opencolorio ${DEFAULT_CMAKE_FLAGS} ${OPENCOLORIO_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/opencolorio INSTALL_DIR ${LIBDIR}/opencolorio
) )
if(NOT WIN32)
add_custom_command(
OUTPUT ${LIBDIR}/opencolorio/lib/libtinyxml.a
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libtinyxml.a ${LIBDIR}/opencolorio/lib/libtinyxml.a
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libyaml-cpp.a ${LIBDIR}/opencolorio/lib/libyaml-cpp.a
)
add_custom_target(external_opencolorio_extra ALL DEPENDS external_opencolorio ${LIBDIR}/opencolorio/lib/libtinyxml.a)
endif()
add_dependencies( add_dependencies(
external_opencolorio external_opencolorio
external_yamlcpp external_boost
external_expat
external_openexr
) )
if(WIN32) if(WIN32)
add_dependencies(
external_opencolorio
external_tinyxml
external_yamlcpp
)
if(BUILD_MODE STREQUAL Release) if(BUILD_MODE STREQUAL Release)
ExternalProject_Add_Step(external_opencolorio after_install ExternalProject_Add_Step(external_opencolorio after_install
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/include ${HARVEST_TARGET}/opencolorio/include COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/include ${HARVEST_TARGET}/opencolorio/include
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/lib ${HARVEST_TARGET}/opencolorio/lib COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/lib/static ${HARVEST_TARGET}/opencolorio/lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/yamlcpp/lib/libyaml-cppmd.lib ${HARVEST_TARGET}/opencolorio/lib/libyaml-cpp.lib COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/yamlcpp/lib/libyaml-cppmd.lib ${HARVEST_TARGET}/opencolorio/lib/libyaml-cpp.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/expat/lib/libexpatMD.lib ${HARVEST_TARGET}/opencolorio/lib/libexpatMD.lib COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tinyxml/lib/tinyxml.lib ${HARVEST_TARGET}/opencolorio/lib/tinyxml.lib
COMMAND ${CMAKE_COMMAND} -E copy ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/pystring.lib ${HARVEST_TARGET}/opencolorio/lib/pystring.lib
DEPENDEES install DEPENDEES install
) )
endif() endif()
if(BUILD_MODE STREQUAL Debug) if(BUILD_MODE STREQUAL Debug)
ExternalProject_Add_Step(external_opencolorio after_install ExternalProject_Add_Step(external_opencolorio after_install
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/opencolorio/lib/Opencolorio.lib ${HARVEST_TARGET}/opencolorio/lib/OpencolorIO_d.lib COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/opencolorio/lib/static/Opencolorio.lib ${HARVEST_TARGET}/opencolorio/lib/OpencolorIO_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/yamlcpp/lib/libyaml-cppmdd.lib ${HARVEST_TARGET}/opencolorio/lib/libyaml-cpp_d.lib COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/yamlcpp/lib/libyaml-cppmdd.lib ${HARVEST_TARGET}/opencolorio/lib/libyaml-cpp_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/expat/lib/libexpatdMD.lib ${HARVEST_TARGET}/opencolorio/lib/libexpatdMD.lib COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tinyxml/lib/tinyxml.lib ${HARVEST_TARGET}/opencolorio/lib/tinyxml_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/pystring.lib ${HARVEST_TARGET}/opencolorio/lib/pystring_d.lib
DEPENDEES install DEPENDEES install
) )
endif() endif()
else()
ExternalProject_Add_Step(external_opencolorio after_install
COMMAND cp ${LIBDIR}/yamlcpp/lib/libyaml-cpp.a ${LIBDIR}/opencolorio/lib/
COMMAND cp ${LIBDIR}/expat/lib/libexpat.a ${LIBDIR}/opencolorio/lib/
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libpystring.a ${LIBDIR}/opencolorio/lib/
DEPENDEES install
)
endif() endif()

View File

@@ -45,7 +45,6 @@ ExternalProject_Add(external_openimagedenoise
DOWNLOAD_DIR ${DOWNLOAD_DIR} DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH MD5=${OIDN_HASH} URL_HASH MD5=${OIDN_HASH}
PREFIX ${BUILD_DIR}/openimagedenoise PREFIX ${BUILD_DIR}/openimagedenoise
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/openimagedenoise/src/external_openimagedenoise < ${PATCH_DIR}/oidn.diff
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/openimagedenoise ${DEFAULT_CMAKE_FLAGS} ${OIDN_EXTRA_ARGS} CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/openimagedenoise ${DEFAULT_CMAKE_FLAGS} ${OIDN_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/openimagedenoise INSTALL_DIR ${LIBDIR}/openimagedenoise
) )

View File

@@ -30,5 +30,5 @@ ExternalProject_Add(external_openmp
add_dependencies( add_dependencies(
external_openmp external_openmp
ll external_clang
) )

View File

@@ -54,8 +54,6 @@ set(OPENVDB_EXTRA_ARGS
-DOPENVDB_CORE_STATIC=${OPENVDB_STATIC} -DOPENVDB_CORE_STATIC=${OPENVDB_STATIC}
-DOPENVDB_BUILD_BINARIES=Off -DOPENVDB_BUILD_BINARIES=Off
-DCMAKE_DEBUG_POSTFIX=_d -DCMAKE_DEBUG_POSTFIX=_d
-DILMBASE_USE_STATIC_LIBS=On
-DOPENEXR_USE_STATIC_LIBS=On
) )
if(WIN32) if(WIN32)

View File

@@ -19,7 +19,12 @@
if(WIN32) if(WIN32)
set(OSL_CMAKE_CXX_STANDARD_LIBRARIES "kernel32${LIBEXT} user32${LIBEXT} gdi32${LIBEXT} winspool${LIBEXT} shell32${LIBEXT} ole32${LIBEXT} oleaut32${LIBEXT} uuid${LIBEXT} comdlg32${LIBEXT} advapi32${LIBEXT} psapi${LIBEXT}") set(OSL_CMAKE_CXX_STANDARD_LIBRARIES "kernel32${LIBEXT} user32${LIBEXT} gdi32${LIBEXT} winspool${LIBEXT} shell32${LIBEXT} ole32${LIBEXT} oleaut32${LIBEXT} uuid${LIBEXT} comdlg32${LIBEXT} advapi32${LIBEXT} psapi${LIBEXT}")
set(OSL_FLEX_BISON -DFLEX_EXECUTABLE=${LIBDIR}/flexbison/win_flex.exe -DBISON_EXECUTABLE=${LIBDIR}/flexbison/win_bison.exe) set(OSL_FLEX_BISON -DFLEX_EXECUTABLE=${LIBDIR}/flexbison/win_flex.exe -DBISON_EXECUTABLE=${LIBDIR}/flexbison/win_bison.exe)
set(OSL_OPENIMAGEIO_LIBRARY "${LIBDIR}/openimageio/lib/${LIBPREFIX}OpenImageIO${LIBEXT};${LIBDIR}/openimageio/lib/${LIBPREFIX}OpenImageIO_Util${LIBEXT};${LIBDIR}/png/lib/libpng16${LIBEXT};${LIBDIR}/jpg/lib/${LIBPREFIX}jpeg${LIBEXT};${LIBDIR}/tiff/lib/${LIBPREFIX}tiff${LIBEXT};${LIBDIR}/openexr/lib/${LIBPREFIX}IlmImf${OPENEXR_VERSION_POSTFIX}${LIBEXT}")
if("${CMAKE_SIZEOF_VOID_P}" EQUAL "4")
set(OSL_SIMD_FLAGS -DOIIO_NOSIMD=1 -DOIIO_SIMD=0)
else()
set(OSL_SIMD_FLAGS -DOIIO_NOSIMD=1 -DOIIO_SIMD=sse2) set(OSL_SIMD_FLAGS -DOIIO_NOSIMD=1 -DOIIO_SIMD=sse2)
endif()
SET(OSL_PLATFORM_FLAGS -DLINKSTATIC=ON) SET(OSL_PLATFORM_FLAGS -DLINKSTATIC=ON)
else() else()
set(OSL_CMAKE_CXX_STANDARD_LIBRARIES) set(OSL_CMAKE_CXX_STANDARD_LIBRARIES)
@@ -29,6 +34,7 @@ else()
endif() endif()
set(OSL_ILMBASE_CUSTOM_LIBRARIES "${LIBDIR}/openexr/lib/Imath${OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/Half{OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/IlmThread${OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/Iex${OPENEXR_VERSION_POSTFIX}.lib") set(OSL_ILMBASE_CUSTOM_LIBRARIES "${LIBDIR}/openexr/lib/Imath${OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/Half{OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/IlmThread${OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/Iex${OPENEXR_VERSION_POSTFIX}.lib")
set(OSL_LLVM_LIBRARY "${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMAnalysis${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMAsmParser${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMAsmPrinter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMBitReader${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMBitWriter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMCodeGen${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMCore${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMDebugInfo${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMExecutionEngine${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMInstCombine${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMInstrumentation${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMInterpreter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMJIT${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMLinker${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMC${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMCDisassembler${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMCJIT${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMCParser${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMObject${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMRuntimeDyld${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMScalarOpts${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMSelectionDAG${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMSupport${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMTableGen${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMTarget${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMTransformUtils${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMVectorize${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86AsmParser${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86AsmPrinter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86CodeGen${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Desc${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Disassembler${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Info${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Utils${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMipa${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMipo${LIBEXT}")
set(OSL_EXTRA_ARGS set(OSL_EXTRA_ARGS
-DBoost_COMPILER:STRING=${BOOST_COMPILER_STRING} -DBoost_COMPILER:STRING=${BOOST_COMPILER_STRING}
@@ -39,8 +45,13 @@ set(OSL_EXTRA_ARGS
-DBOOST_LIBRARYDIR=${LIBDIR}/boost/lib/ -DBOOST_LIBRARYDIR=${LIBDIR}/boost/lib/
-DBoost_NO_SYSTEM_PATHS=ON -DBoost_NO_SYSTEM_PATHS=ON
-DBoost_NO_BOOST_CMAKE=ON -DBoost_NO_BOOST_CMAKE=ON
-DOpenEXR_ROOT=${LIBDIR}/openexr/ -DLLVM_DIRECTORY=${LIBDIR}/llvm
-DIlmBase_ROOT=${LIBDIR}/openexr/ -DLLVM_INCLUDES=${LIBDIR}/llvm/include
-DLLVM_LIB_DIR=${LIBDIR}/llvm/lib
-DLLVM_VERSION=3.4
-DLLVM_LIBRARY=${OSL_LLVM_LIBRARY}
-DOPENEXR_HOME=${LIBDIR}/openexr/
-DILMBASE_HOME=${LIBDIR}/openexr/
-DILMBASE_INCLUDE_DIR=${LIBDIR}/openexr/include/ -DILMBASE_INCLUDE_DIR=${LIBDIR}/openexr/include/
-DOPENEXR_HALF_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Half${OPENEXR_VERSION_POSTFIX}${LIBEXT} -DOPENEXR_HALF_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Half${OPENEXR_VERSION_POSTFIX}${LIBEXT}
-DOPENEXR_IMATH_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Imath${OPENEXR_VERSION_POSTFIX}${LIBEXT} -DOPENEXR_IMATH_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Imath${OPENEXR_VERSION_POSTFIX}${LIBEXT}
@@ -48,32 +59,35 @@ set(OSL_EXTRA_ARGS
-DOPENEXR_IEX_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Iex${OPENEXR_VERSION_POSTFIX}${LIBEXT} -DOPENEXR_IEX_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Iex${OPENEXR_VERSION_POSTFIX}${LIBEXT}
-DOPENEXR_INCLUDE_DIR=${LIBDIR}/openexr/include/ -DOPENEXR_INCLUDE_DIR=${LIBDIR}/openexr/include/
-DOPENEXR_ILMIMF_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}IlmImf${OPENEXR_VERSION_POSTFIX}${LIBEXT} -DOPENEXR_ILMIMF_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}IlmImf${OPENEXR_VERSION_POSTFIX}${LIBEXT}
-DOpenImageIO_ROOT=${LIBDIR}/openimageio/
-DOSL_BUILD_TESTS=OFF -DOSL_BUILD_TESTS=OFF
-DOSL_BUILD_MATERIALX=OFF -DOSL_BUILD_MATERIALX=OFF
-DZLIB_LIBRARY=${LIBDIR}/zlib/lib/${ZLIB_LIBRARY} -DZLIB_LIBRARY=${LIBDIR}/zlib/lib/${ZLIB_LIBRARY}
-DZLIB_INCLUDE_DIR=${LIBDIR}/zlib/include/ -DZLIB_INCLUDE_DIR=${LIBDIR}/zlib/include/
-DOPENIMAGEIOHOME=${LIBDIR}/openimageio/
-DOPENIMAGEIO_INCLUDE_DIR=${LIBDIR}/openimageio/include
-DOPENIMAGEIO_LIBRARY=${OSL_OPENIMAGEIO_LIBRARY}
${OSL_FLEX_BISON} ${OSL_FLEX_BISON}
-DCMAKE_CXX_STANDARD_LIBRARIES=${OSL_CMAKE_CXX_STANDARD_LIBRARIES} -DCMAKE_CXX_STANDARD_LIBRARIES=${OSL_CMAKE_CXX_STANDARD_LIBRARIES}
-DBUILD_SHARED_LIBS=OFF -DBUILDSTATIC=ON
${OSL_PLATFORM_FLAGS} ${OSL_PLATFORM_FLAGS}
-DOSL_BUILD_PLUGINS=OFF -DOSL_BUILD_PLUGINS=Off
-DSTOP_ON_WARNING=OFF -DSTOP_ON_WARNING=OFF
-DUSE_LLVM_BITCODE=OFF -DUSE_LLVM_BITCODE=OFF
-DLLVM_ROOT=${LIBDIR}/llvm/
-DLLVM_DIRECTORY=${LIBDIR}/llvm/
-DUSE_PARTIO=OFF -DUSE_PARTIO=OFF
-DUSE_QT=OFF -DUSE_QT=OFF
-DUSE_Qt5=OFF
-DINSTALL_DOCS=OFF -DINSTALL_DOCS=OFF
${OSL_SIMD_FLAGS} ${OSL_SIMD_FLAGS}
-Dpugixml_ROOT=${LIBDIR}/pugixml -DPARTIO_LIBRARIES=
-DUSE_PYTHON=OFF -DPUGIXML_HOME=${LIBDIR}/pugixml
) )
# Apple arm64 uses LLVM 11, LLVM 10+ requires C++14 if(APPLE)
if (APPLE AND "${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64") # Make symbol hiding consistent with OIIO which defaults to OFF,
list(APPEND OSL_EXTRA_ARGS -DCMAKE_CXX_STANDARD=14) # avoids linker warnings on macOS
set(OSL_EXTRA_ARGS
${OSL_EXTRA_ARGS}
-DHIDE_SYMBOLS=OFF
)
endif() endif()
ExternalProject_Add(external_osl ExternalProject_Add(external_osl
@@ -91,6 +105,7 @@ add_dependencies(
external_osl external_osl
external_boost external_boost
ll ll
external_clang
external_openexr external_openexr
external_zlib external_zlib
external_flexbison external_flexbison

View File

@@ -43,7 +43,7 @@ if(WIN32)
PREFIX ${BUILD_DIR}/python PREFIX ${BUILD_DIR}/python
CONFIGURE_COMMAND "" CONFIGURE_COMMAND ""
BUILD_COMMAND cd ${BUILD_DIR}/python/src/external_python/pcbuild/ && set IncludeTkinter=false && call build.bat -e -p x64 -c ${BUILD_MODE} BUILD_COMMAND cd ${BUILD_DIR}/python/src/external_python/pcbuild/ && set IncludeTkinter=false && call build.bat -e -p x64 -c ${BUILD_MODE}
INSTALL_COMMAND ${PYTHON_BINARY_INTERNAL} ${PYTHON_SRC}/PC/layout/main.py -b ${PYTHON_SRC}/PCbuild/amd64 -s ${PYTHON_SRC} -t ${PYTHON_SRC}/tmp/ --include-stable --include-pip --include-dev --include-launchers --include-venv --include-symbols ${PYTHON_EXTRA_INSTLAL_FLAGS} --copy ${LIBDIR}/python INSTALL_COMMAND ${PYTHON_BINARY_INTERNAL} ${PYTHON_SRC}/PC/layout/main.py -b ${PYTHON_SRC}/PCbuild/amd64 -s ${PYTHON_SRC} -t ${PYTHON_SRC}/tmp/ --include-underpth --include-stable --include-pip --include-dev --include-launchers --include-venv --include-symbols ${PYTHON_EXTRA_INSTLAL_FLAGS} --copy ${LIBDIR}/python
) )
else() else()
@@ -74,15 +74,16 @@ else()
endif() endif()
set(PYTHON_CONFIGURE_ENV ${CONFIGURE_ENV} && ${PYTHON_FUNC_CONFIGS}) set(PYTHON_CONFIGURE_ENV ${CONFIGURE_ENV} && ${PYTHON_FUNC_CONFIGS})
set(PYTHON_BINARY ${BUILD_DIR}/python/src/external_python/python.exe) set(PYTHON_BINARY ${BUILD_DIR}/python/src/external_python/python.exe)
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_macos.diff)
else() else()
set(PYTHON_CONFIGURE_ENV ${CONFIGURE_ENV}) set(PYTHON_CONFIGURE_ENV ${CONFIGURE_ENV})
set(PYTHON_BINARY ${BUILD_DIR}/python/src/external_python/python) set(PYTHON_BINARY ${BUILD_DIR}/python/src/external_python/python)
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_linux.diff)
endif() endif()
# Link against zlib statically (Unix). Avoid rpath issues (macOS).
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_unix.diff)
set(PYTHON_CONFIGURE_EXTRA_ARGS "--with-openssl=${LIBDIR}/ssl") set(PYTHON_CONFIGURE_EXTRA_ARGS "--with-openssl=${LIBDIR}/ssl")
set(PYTHON_CFLAGS "-I${LIBDIR}/sqlite/include -I${LIBDIR}/bzip2/include -I${LIBDIR}/lzma/include -I${LIBDIR}/zlib/include ${PLATFORM_CFLAGS}") set(PYTHON_CFLAGS "-I${LIBDIR}/sqlite/include -I${LIBDIR}/bzip2/include -I${LIBDIR}/lzma/include -I${LIBDIR}/zlib/include")
set(PYTHON_LDFLAGS "-L${LIBDIR}/ffi/lib -L${LIBDIR}/sqlite/lib -L${LIBDIR}/bzip2/lib -L${LIBDIR}/lzma/lib -L${LIBDIR}/zlib/lib ${PLATFORM_LDFLAGS}") set(PYTHON_LDFLAGS "-L${LIBDIR}/ffi/lib -L${LIBDIR}/sqlite/lib -L${LIBDIR}/bzip2/lib -L${LIBDIR}/lzma/lib -L${LIBDIR}/zlib/lib")
set(PYTHON_CONFIGURE_EXTRA_ENV set(PYTHON_CONFIGURE_EXTRA_ENV
export CFLAGS=${PYTHON_CFLAGS} && export CFLAGS=${PYTHON_CFLAGS} &&
export CPPFLAGS=${PYTHON_CFLAGS} && export CPPFLAGS=${PYTHON_CFLAGS} &&

View File

@@ -16,27 +16,14 @@
# #
# ***** END GPL LICENSE BLOCK ***** # ***** END GPL LICENSE BLOCK *****
if(WIN32 AND BUILD_MODE STREQUAL Debug)
set(SITE_PACKAGES_EXTRA --global-option build --global-option --debug)
endif()
ExternalProject_Add(external_python_site_packages ExternalProject_Add(external_python_site_packages
DOWNLOAD_COMMAND "" DOWNLOAD_COMMAND ""
CONFIGURE_COMMAND "" CONFIGURE_COMMAND ""
BUILD_COMMAND "" BUILD_COMMAND ""
PREFIX ${BUILD_DIR}/site_packages PREFIX ${BUILD_DIR}/site_packages
INSTALL_COMMAND ${PYTHON_BINARY} -m pip install ${SITE_PACKAGES_EXTRA} cython==${CYTHON_VERSION} idna==${IDNA_VERSION} chardet==${CHARDET_VERSION} urllib3==${URLLIB3_VERSION} certifi==${CERTIFI_VERSION} requests==${REQUESTS_VERSION} --no-binary :all: INSTALL_COMMAND ${PYTHON_BINARY} -m pip install idna==${IDNA_VERSION} chardet==${CHARDET_VERSION} urllib3==${URLLIB3_VERSION} certifi==${CERTIFI_VERSION} requests==${REQUESTS_VERSION} --no-binary :all:
) )
if(USE_PIP_NUMPY)
# Use only wheel (and not build from source) to stop NumPy from linking against buggy
# Accelerate framework backend on macOS. Official wheels are built with OpenBLAS.
ExternalProject_Add_Step(external_python_site_packages after_install
COMMAND ${PYTHON_BINARY} -m pip install --no-cache-dir numpy==${NUMPY_VERSION} --only-binary :all:
DEPENDEES install
)
endif()
add_dependencies( add_dependencies(
external_python_site_packages external_python_site_packages
external_python external_python

View File

@@ -42,21 +42,8 @@ if(UNIX)
-DSQLITE_MAX_VARIABLE_NUMBER=250000 \ -DSQLITE_MAX_VARIABLE_NUMBER=250000 \
-fPIC") -fPIC")
set(SQLITE_CONFIGURE_ENV ${SQLITE_CONFIGURE_ENV} && export LDFLAGS=${SQLITE_LDFLAGS} && export CFLAGS=${SQLITE_CFLAGS}) set(SQLITE_CONFIGURE_ENV ${SQLITE_CONFIGURE_ENV} && export LDFLAGS=${SQLITE_LDFLAGS} && export CFLAGS=${SQLITE_CFLAGS})
set(SQLITE_CONFIGURATION_ARGS set(SQLITE_CONFIGURATION_ARGS ${SQLITE_CONFIGURATION_ARGS} --enable-threadsafe --enable-load-extension --enable-json1 --enable-fts4 --enable-fts5 --disable-tcl
${SQLITE_CONFIGURATION_ARGS} --enable-shared=no)
--enable-threadsafe
--enable-load-extension
--enable-json1
--enable-fts4
--enable-fts5
# While building `tcl` is harmless, it causes problems when the install step
# tries to copy the files into the system path.
# Since this isn't required by Python or Blender this can be disabled.
# Note that Debian (for example), splits this off into a separate package,
# so it's safe to turn off.
--disable-tcl
--enable-shared=no
)
endif() endif()
ExternalProject_Add(external_sqlite ExternalProject_Add(external_sqlite

View File

@@ -42,8 +42,7 @@ ExternalProject_Add(external_tbb
URL_HASH MD5=${TBB_HASH} URL_HASH MD5=${TBB_HASH}
PREFIX ${BUILD_DIR}/tbb PREFIX ${BUILD_DIR}/tbb
PATCH_COMMAND COMMAND ${CMAKE_COMMAND} -E copy ${PATCH_DIR}/cmakelists_tbb.txt ${BUILD_DIR}/tbb/src/external_tbb/CMakeLists.txt && PATCH_COMMAND COMMAND ${CMAKE_COMMAND} -E copy ${PATCH_DIR}/cmakelists_tbb.txt ${BUILD_DIR}/tbb/src/external_tbb/CMakeLists.txt &&
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/tbb/src/external_tbb/build/vs2013/version_string.ver ${BUILD_DIR}/tbb/src/external_tbb/src/tbb/version_string.ver && ${CMAKE_COMMAND} -E copy ${BUILD_DIR}/tbb/src/external_tbb/build/vs2013/version_string.ver ${BUILD_DIR}/tbb/src/external_tbb/src/tbb/version_string.ver
${PATCH_CMD} -p 1 -d ${BUILD_DIR}/tbb/src/external_tbb < ${PATCH_DIR}/tbb.diff
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/tbb ${DEFAULT_CMAKE_FLAGS} ${TBB_EXTRA_ARGS} CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/tbb ${DEFAULT_CMAKE_FLAGS} ${TBB_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/tbb INSTALL_DIR ${LIBDIR}/tbb
) )

View File

@@ -16,15 +16,16 @@
# #
# ***** END GPL LICENSE BLOCK ***** # ***** END GPL LICENSE BLOCK *****
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")) set(TINYXML_EXTRA_ARGS
ExternalProject_Add(external_sse2neon )
GIT_REPOSITORY ${SSE2NEON_GIT}
GIT_TAG ${SSE2NEON_GIT_HASH} ExternalProject_Add(external_tinyxml
URL ${TINYXML_URI}
DOWNLOAD_DIR ${DOWNLOAD_DIR} DOWNLOAD_DIR ${DOWNLOAD_DIR}
PREFIX ${BUILD_DIR}/sse2neon URL_HASH MD5=${TINYXML_HASH}
CONFIGURE_COMMAND echo sse2neon - Nothing to configure PREFIX ${BUILD_DIR}/tinyxml
BUILD_COMMAND echo sse2neon - nothing to build # patch taken from ocio
INSTALL_COMMAND mkdir -p ${LIBDIR}/sse2neon && cp ${BUILD_DIR}/sse2neon/src/external_sse2neon/sse2neon.h ${LIBDIR}/sse2neon PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/tinyxml/src/external_tinyxml < ${PATCH_DIR}/tinyxml.diff
INSTALL_DIR ${LIBDIR}/sse2neon CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/tinyxml ${DEFAULT_CMAKE_FLAGS} ${TINYXML_EXTRA_ARGS}
) INSTALL_DIR ${LIBDIR}/tinyxml
endif() )

View File

@@ -32,11 +32,11 @@ set(JPEG_VERSION 2.0.4)
set(JPEG_URI https://github.com/libjpeg-turbo/libjpeg-turbo/archive/${JPEG_VERSION}.tar.gz) set(JPEG_URI https://github.com/libjpeg-turbo/libjpeg-turbo/archive/${JPEG_VERSION}.tar.gz)
set(JPEG_HASH 44c43e4a9fb352f47090804529317c88) set(JPEG_HASH 44c43e4a9fb352f47090804529317c88)
set(BOOST_VERSION 1.73.0) set(BOOST_VERSION 1.70.0)
set(BOOST_VERSION_NODOTS 1_73_0) set(BOOST_VERSION_NODOTS 1_70_0)
set(BOOST_VERSION_NODOTS_SHORT 1_73) set(BOOST_VERSION_NODOTS_SHORT 1_70)
set(BOOST_URI https://dl.bintray.com/boostorg/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz) set(BOOST_URI https://dl.bintray.com/boostorg/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz)
set(BOOST_HASH 4036cd27ef7548b8d29c30ea10956196) set(BOOST_HASH fea771fe8176828fabf9c09242ee8c26)
# Using old version as recommended by OpenVDB build documentation. # Using old version as recommended by OpenVDB build documentation.
set(BLOSC_VERSION 1.5.0) set(BLOSC_VERSION 1.5.0)
@@ -47,9 +47,9 @@ set(PTHREADS_VERSION 3.0.0)
set(PTHREADS_URI http://sourceforge.mirrorservice.org/p/pt/pthreads4w/pthreads4w-code-v${PTHREADS_VERSION}.zip) set(PTHREADS_URI http://sourceforge.mirrorservice.org/p/pt/pthreads4w/pthreads4w-code-v${PTHREADS_VERSION}.zip)
set(PTHREADS_HASH f3bf81bb395840b3446197bcf4ecd653) set(PTHREADS_HASH f3bf81bb395840b3446197bcf4ecd653)
set(OPENEXR_VERSION 2.5.5) set(OPENEXR_VERSION 2.4.0)
set(OPENEXR_URI https://github.com/AcademySoftwareFoundation/openexr/archive/v${OPENEXR_VERSION}.tar.gz) set(OPENEXR_URI https://github.com/AcademySoftwareFoundation/openexr/archive/v${OPENEXR_VERSION}.tar.gz)
set(OPENEXR_HASH 85e8a979092c9055d10ed103062d31a0) set(OPENEXR_HASH 9e4d69cf2a12c6fb19b98af7c5e0eaee)
if(WIN32) if(WIN32)
# Openexr started appending _d on its own so now # Openexr started appending _d on its own so now
# we need to tell the build the postfix is _s while # we need to tell the build the postfix is _s while
@@ -78,9 +78,9 @@ set(FREEGLUT_VERSION 3.0.0)
set(FREEGLUT_URI http://pilotfiber.dl.sourceforge.net/project/freeglut/freeglut/${FREEGLUT_VERSION}/freeglut-${FREEGLUT_VERSION}.tar.gz) set(FREEGLUT_URI http://pilotfiber.dl.sourceforge.net/project/freeglut/freeglut/${FREEGLUT_VERSION}/freeglut-${FREEGLUT_VERSION}.tar.gz)
set(FREEGLUT_HASH 90c3ca4dd9d51cf32276bc5344ec9754) set(FREEGLUT_HASH 90c3ca4dd9d51cf32276bc5344ec9754)
set(ALEMBIC_VERSION 1.7.16) set(ALEMBIC_VERSION 1.7.12)
set(ALEMBIC_URI https://github.com/alembic/alembic/archive/${ALEMBIC_VERSION}.tar.gz) set(ALEMBIC_URI https://github.com/alembic/alembic/archive/${ALEMBIC_VERSION}.tar.gz)
set(ALEMBIC_MD5 effcc86e42fe6605588e3de57bde6677) set(ALEMBIC_MD5 e2b3777f23c5c09481a008cc6f0f8a40)
# hash is for 3.1.2 # hash is for 3.1.2
set(GLFW_GIT_UID 30306e54705c3adae9fe082c816a3be71963485c) set(GLFW_GIT_UID 30306e54705c3adae9fe082c816a3be71963485c)
@@ -109,27 +109,22 @@ set(OPENCOLLADA_VERSION v1.6.68)
set(OPENCOLLADA_URI https://github.com/KhronosGroup/OpenCOLLADA/archive/${OPENCOLLADA_VERSION}.tar.gz) set(OPENCOLLADA_URI https://github.com/KhronosGroup/OpenCOLLADA/archive/${OPENCOLLADA_VERSION}.tar.gz)
set(OPENCOLLADA_HASH ee7dae874019fea7be11613d07567493) set(OPENCOLLADA_HASH ee7dae874019fea7be11613d07567493)
set(OPENCOLORIO_VERSION 2.0.0) set(OPENCOLORIO_VERSION 1.1.1)
set(OPENCOLORIO_URI https://github.com/AcademySoftwareFoundation/OpenColorIO/archive/v${OPENCOLORIO_VERSION}.tar.gz) set(OPENCOLORIO_URI https://github.com/AcademySoftwareFoundation/OpenColorIO/archive/v${OPENCOLORIO_VERSION}.tar.gz)
set(OPENCOLORIO_HASH 1a2e3478b6cd9a1549f24e1b2205e3f0) set(OPENCOLORIO_HASH 23d8b9ac81599305539a5a8674b94a3d)
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")) set(LLVM_VERSION 9.0.1)
# Newer version required by ISPC with arm support. set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-${LLVM_VERSION}.src.tar.xz)
set(LLVM_VERSION 11.0.1) set(LLVM_HASH 31eb9ce73dd2a0f8dcab8319fb03f8fc)
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-project-${LLVM_VERSION}.src.tar.xz)
set(LLVM_HASH e700af40ab83463e4e9ab0ba3708312e)
set(OPENMP_VERSION 9.0.1) set(CLANG_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-${LLVM_VERSION}.src.tar.xz)
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${OPENMP_VERSION}/openmp-${OPENMP_VERSION}.src.tar.xz) set(CLANG_HASH 13468e4a44940efef1b75e8641752f90)
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
else()
set(LLVM_VERSION 9.0.1)
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-project-${LLVM_VERSION}.tar.xz)
set(LLVM_HASH b4268e733dfe352960140dc07ef2efcb)
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/openmp-${LLVM_VERSION}.src.tar.xz) set(CLANG_TOOLS_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-tools-extra-${LLVM_VERSION}.src.tar.xz)
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf) set(CLANG_TOOLS_HASH c76293870b564c6a7968622b475b7646)
endif()
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/openmp-${LLVM_VERSION}.src.tar.xz)
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
set(OPENIMAGEIO_VERSION 2.1.15.0) set(OPENIMAGEIO_VERSION 2.1.15.0)
set(OPENIMAGEIO_URI https://github.com/OpenImageIO/oiio/archive/Release-${OPENIMAGEIO_VERSION}.tar.gz) set(OPENIMAGEIO_URI https://github.com/OpenImageIO/oiio/archive/Release-${OPENIMAGEIO_VERSION}.tar.gz)
@@ -139,39 +134,38 @@ set(TIFF_VERSION 4.1.0)
set(TIFF_URI http://download.osgeo.org/libtiff/tiff-${TIFF_VERSION}.tar.gz) set(TIFF_URI http://download.osgeo.org/libtiff/tiff-${TIFF_VERSION}.tar.gz)
set(TIFF_HASH 2165e7aba557463acc0664e71a3ed424) set(TIFF_HASH 2165e7aba557463acc0664e71a3ed424)
set(OSL_VERSION 1.11.10.0) set(OSL_VERSION 1.10.10)
set(OSL_URI https://github.com/imageworks/OpenShadingLanguage/archive/Release-${OSL_VERSION}.tar.gz) set(OSL_URI https://github.com/imageworks/OpenShadingLanguage/archive/Release-${OSL_VERSION}.tar.gz)
set(OSL_HASH dfdc23597aeef083832cbada62211756) set(OSL_HASH 00dec08a93c8084e53848b9ad047889f)
set(PYTHON_VERSION 3.9.2) set(PYTHON_VERSION 3.7.7)
set(PYTHON_SHORT_VERSION 3.9) set(PYTHON_SHORT_VERSION 3.7)
set(PYTHON_SHORT_VERSION_NO_DOTS 39) set(PYTHON_SHORT_VERSION_NO_DOTS 37)
set(PYTHON_URI https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tar.xz) set(PYTHON_URI https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tar.xz)
set(PYTHON_HASH f0dc9000312abeb16de4eccce9a870ab) set(PYTHON_HASH 172c650156f7bea68ce31b2fd01fa766)
set(TBB_VERSION 2020_U2) set(TBB_VERSION 2019_U9)
set(TBB_URI https://github.com/oneapi-src/oneTBB/archive/${TBB_VERSION}.tar.gz) set(TBB_URI https://github.com/oneapi-src/oneTBB/archive/${TBB_VERSION}.tar.gz)
set(TBB_HASH 1b711ae956524855088df3bbf5ec65dc) set(TBB_HASH 26263622e9187212ec240dcf01b66207)
set(OPENVDB_VERSION 8.0.1) set(OPENVDB_VERSION 7.0.0)
set(OPENVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/v${OPENVDB_VERSION}.tar.gz) set(OPENVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/v${OPENVDB_VERSION}.tar.gz)
set(OPENVDB_HASH 01b490be16cc0e15c690f9a153c21461) set(OPENVDB_HASH fd6c4f168282f7e0e494d290cd531fa8)
set(NANOVDB_GIT_UID e62f7a0bf1e27397223c61ddeaaf57edf111b77f) set(NANOVDB_GIT_UID e62f7a0bf1e27397223c61ddeaaf57edf111b77f)
set(NANOVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_GIT_UID}.tar.gz) set(NANOVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_GIT_UID}.tar.gz)
set(NANOVDB_HASH 90919510bc6ccd630fedc56f748cb199) set(NANOVDB_HASH 90919510bc6ccd630fedc56f748cb199)
set(IDNA_VERSION 2.10) set(IDNA_VERSION 2.9)
set(CHARDET_VERSION 4.0.0) set(CHARDET_VERSION 3.0.4)
set(URLLIB3_VERSION 1.26.3) set(URLLIB3_VERSION 1.25.9)
set(CERTIFI_VERSION 2020.12.5) set(CERTIFI_VERSION 2020.4.5.2)
set(REQUESTS_VERSION 2.25.1) set(REQUESTS_VERSION 2.23.0)
set(CYTHON_VERSION 0.29.21)
set(NUMPY_VERSION 1.19.5) set(NUMPY_VERSION 1.17.5)
set(NUMPY_SHORT_VERSION 1.19) set(NUMPY_SHORT_VERSION 1.17)
set(NUMPY_URI https://github.com/numpy/numpy/releases/download/v${NUMPY_VERSION}/numpy-${NUMPY_VERSION}.zip) set(NUMPY_URI https://github.com/numpy/numpy/releases/download/v${NUMPY_VERSION}/numpy-${NUMPY_VERSION}.zip)
set(NUMPY_HASH f6a1b48717c552bbc18f1adc3cc1fe0e) set(NUMPY_HASH 763a5646fa6eef7a22f4895bca0524f2)
set(LAME_VERSION 3.100) set(LAME_VERSION 3.100)
set(LAME_URI http://downloads.sourceforge.net/project/lame/lame/3.100/lame-${LAME_VERSION}.tar.gz) set(LAME_URI http://downloads.sourceforge.net/project/lame/lame/3.100/lame-${LAME_VERSION}.tar.gz)
@@ -262,16 +256,16 @@ set(YAMLCPP_VERSION 0.6.3)
set(YAMLCPP_URI https://codeload.github.com/jbeder/yaml-cpp/tar.gz/yaml-cpp-${YAMLCPP_VERSION}) set(YAMLCPP_URI https://codeload.github.com/jbeder/yaml-cpp/tar.gz/yaml-cpp-${YAMLCPP_VERSION})
set(YAMLCPP_HASH b45bf1089a382e81f6b661062c10d0c2) set(YAMLCPP_HASH b45bf1089a382e81f6b661062c10d0c2)
set(EXPAT_VERSION 2_2_10) set(LCMS_VERSION 2.9)
set(EXPAT_URI https://github.com/libexpat/libexpat/archive/R_${EXPAT_VERSION}.tar.gz) set(LCMS_URI https://nchc.dl.sourceforge.net/project/lcms/lcms/${LCMS_VERSION}/lcms2-${LCMS_VERSION}.tar.gz)
set(EXPAT_HASH 7ca5f09959fcb9a57618368deb627b9f) set(LCMS_HASH 8de1b7724f578d2995c8fdfa35c3ad0e)
set(PUGIXML_VERSION 1.10) set(PUGIXML_VERSION 1.10)
set(PUGIXML_URI https://github.com/zeux/pugixml/archive/v${PUGIXML_VERSION}.tar.gz) set(PUGIXML_URI https://github.com/zeux/pugixml/archive/v${PUGIXML_VERSION}.tar.gz)
set(PUGIXML_HASH 0c208b0664c7fb822bf1b49ad035e8fd) set(PUGIXML_HASH 0c208b0664c7fb822bf1b49ad035e8fd)
set(FLEXBISON_VERSION 2.5.5) set(FLEXBISON_VERSION 2.5.5)
set(FLEXBISON_URI http://prdownloads.sourceforge.net/winflexbison/win_flex_bison-2.5.5.zip) set(FLEXBISON_URI http://prdownloads.sourceforge.net/winflexbison//win_flex_bison-2.5.5.zip)
set(FLEXBISON_HASH d87a3938194520d904013abef3df10ce) set(FLEXBISON_HASH d87a3938194520d904013abef3df10ce)
# Libraries to keep Python modules static on Linux. # Libraries to keep Python modules static on Linux.
@@ -301,41 +295,34 @@ set(SQLITE_HASH fb558c49ee21a837713c4f1e7e413309aabdd9c7)
set(EMBREE_VERSION 3.10.0) set(EMBREE_VERSION 3.10.0)
set(EMBREE_URI https://github.com/embree/embree/archive/v${EMBREE_VERSION}.zip) set(EMBREE_URI https://github.com/embree/embree/archive/v${EMBREE_VERSION}.zip)
set(EMBREE_HASH 4bbe29e7eaa46417efc75fc5f1e8eb87) set(EMBREE_HASH 4bbe29e7eaa46417efc75fc5f1e8eb87)
set(EMBREE_ARM_GIT https://github.com/brechtvl/embree.git)
set(USD_VERSION 21.02) set(USD_VERSION 20.05)
set(USD_URI https://github.com/PixarAnimationStudios/USD/archive/v${USD_VERSION}.tar.gz) set(USD_URI https://github.com/PixarAnimationStudios/USD/archive/v${USD_VERSION}.tar.gz)
set(USD_HASH 1dd1e2092d085ed393c1f7c450a4155a) set(USD_HASH 6d679e739e7f65725d9c029e37dda9fc)
set(OIDN_VERSION 1.3.0) set(OIDN_VERSION 1.2.3)
set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz) set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz)
set(OIDN_HASH 301a5a0958d375a942014df0679b9270) set(OIDN_HASH 1f11466c2c3efc27faba5ec7078d12b2)
set(LIBGLU_VERSION 9.0.1) set(LIBGLU_VERSION 9.0.1)
set(LIBGLU_URI ftp://ftp.freedesktop.org/pub/mesa/glu/glu-${LIBGLU_VERSION}.tar.xz) set(LIBGLU_URI ftp://ftp.freedesktop.org/pub/mesa/glu/glu-${LIBGLU_VERSION}.tar.xz)
set(LIBGLU_HASH 151aef599b8259efe9acd599c96ea2a3) set(LIBGLU_HASH 151aef599b8259efe9acd599c96ea2a3)
set(MESA_VERSION 20.3.4) set(MESA_VERSION 18.3.1)
set(MESA_URI ftp://ftp.freedesktop.org/pub/mesa/mesa-${MESA_VERSION}.tar.xz) set(MESA_URI ftp://ftp.freedesktop.org/pub/mesa//mesa-${MESA_VERSION}.tar.xz)
set(MESA_HASH 556338446aef8ae947a789b3e0b5e056) set(MESA_HASH d60828056d77bfdbae0970f9b15fb1be)
set(NASM_VERSION 2.15.02) set(NASM_VERSION 2.15.02)
set(NASM_URI https://github.com/netwide-assembler/nasm/archive/nasm-${NASM_VERSION}.tar.gz) set(NASM_URI https://www.nasm.us/pub/nasm/releasebuilds/${NASM_VERSION}/nasm-${NASM_VERSION}.tar.xz)
set(NASM_HASH aded8b796c996a486a56e0515c83e414116decc3b184d88043480b32eb0a8589) set(NASM_HASH f4fd1329b1713e1ccd34b2fc121c4bcd278c9f91cc4cb205ae8fcd2e4728dd14)
set(XR_OPENXR_SDK_VERSION 1.0.14) set(XR_OPENXR_SDK_VERSION 1.0.8)
set(XR_OPENXR_SDK_URI https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_SDK_VERSION}.tar.gz) set(XR_OPENXR_SDK_URI https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_SDK_VERSION}.tar.gz)
set(XR_OPENXR_SDK_HASH 0df6b2fd6045423451a77ff6bc3e1a75) set(XR_OPENXR_SDK_HASH c6de63d2e0f9029aa58dfa97cad8ce07)
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")) set(ISPC_VERSION v1.14.1)
# Unreleased version with macOS arm support. set(ISPC_URI https://github.com/ispc/ispc/archive/${ISPC_VERSION}.tar.gz)
set(ISPC_URI https://github.com/ispc/ispc/archive/f5949c055eb9eeb93696978a3da4bfb3a6a30b35.zip) set(ISPC_HASH 968fbc8dfd16a60ba4e32d2e0e03ea7a)
set(ISPC_HASH d382fea18d01dbd0cd05d9e1ede36d7d)
else()
set(ISPC_VERSION v1.14.1)
set(ISPC_URI https://github.com/ispc/ispc/archive/${ISPC_VERSION}.tar.gz)
set(ISPC_HASH 968fbc8dfd16a60ba4e32d2e0e03ea7a)
endif()
set(GMP_VERSION 6.2.0) set(GMP_VERSION 6.2.0)
set(GMP_URI https://gmplib.org/download/gmp/gmp-${GMP_VERSION}.tar.xz) set(GMP_URI https://gmplib.org/download/gmp/gmp-${GMP_VERSION}.tar.xz)
@@ -348,6 +335,3 @@ set(POTRACE_HASH 5f0bd87ddd9a620b0c4e65652ef93d69)
set(HARU_VERSION 2_3_0) set(HARU_VERSION 2_3_0)
set(HARU_URI https://github.com/libharu/libharu/archive/RELEASE_${HARU_VERSION}.tar.gz) set(HARU_URI https://github.com/libharu/libharu/archive/RELEASE_${HARU_VERSION}.tar.gz)
set(HARU_HASH 4f916aa49c3069b3a10850013c507460) set(HARU_HASH 4f916aa49c3069b3a10850013c507460)
set(SSE2NEON_GIT https://github.com/DLTcollab/sse2neon.git)
set(SSE2NEON_GIT_HASH fe5ff00bb8d19b327714a3c290f3e2ce81ba3525)

View File

@@ -53,7 +53,7 @@ if(WIN32)
endif() endif()
if(BUILD_MODE STREQUAL Debug) if(BUILD_MODE STREQUAL Debug)
ExternalProject_Add_Step(external_xr_openxr_sdk after_install ExternalProject_Add_Step(external_xr_openxr_sdk after_install
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/xr_openxr_sdk/lib/openxr_loaderd.lib ${HARVEST_TARGET}/xr_openxr_sdk/lib/openxr_loaderd.lib COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/xr_openxr_sdk/lib/openxr_loader.lib ${HARVEST_TARGET}/xr_openxr_sdk/lib/openxr_loader_d.lib
DEPENDEES install DEPENDEES install
) )
endif() endif()

View File

@@ -17,18 +17,13 @@
# ***** END GPL LICENSE BLOCK ***** # ***** END GPL LICENSE BLOCK *****
set(YAMLCPP_EXTRA_ARGS set(YAMLCPP_EXTRA_ARGS
-DBUILD_GMOCK=OFF
-DYAML_CPP_BUILD_TESTS=OFF -DYAML_CPP_BUILD_TESTS=OFF
-DYAML_CPP_BUILD_TOOLS=OFF -DYAML_CPP_BUILD_TOOLS=OFF
-DYAML_CPP_BUILD_CONTRIB=OFF -DYAML_CPP_BUILD_CONTRIB=OFF
-DYAML_MSVC_SHARED_RT=ON
) )
if(WIN32)
set(YAMLCPP_EXTRA_ARGS
${YAMLCPP_EXTRA_ARGS}
-DBUILD_GMOCK=OFF
-DYAML_MSVC_SHARED_RT=ON)
endif()
ExternalProject_Add(external_yamlcpp ExternalProject_Add(external_yamlcpp
URL ${YAMLCPP_URI} URL ${YAMLCPP_URI}
DOWNLOAD_DIR ${DOWNLOAD_DIR} DOWNLOAD_DIR ${DOWNLOAD_DIR}

File diff suppressed because it is too large Load Diff

View File

@@ -20,7 +20,6 @@ if(APPLE)
endif() endif()
file(GLOB tbb_src "${CMAKE_CURRENT_SOURCE_DIR}/src/tbb/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/src/old/*.cpp") file(GLOB tbb_src "${CMAKE_CURRENT_SOURCE_DIR}/src/tbb/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/src/old/*.cpp")
list(REMOVE_ITEM tbb_src ${CMAKE_CURRENT_SOURCE_DIR}/src/tbb/tbb_bind.cpp)
list(APPEND tbb_src ${CMAKE_CURRENT_SOURCE_DIR}/src/rml/client/rml_tbb.cpp) list(APPEND tbb_src ${CMAKE_CURRENT_SOURCE_DIR}/src/rml/client/rml_tbb.cpp)
file(GLOB to_remove "${CMAKE_CURRENT_SOURCE_DIR}/src/old/test*.cpp") file(GLOB to_remove "${CMAKE_CURRENT_SOURCE_DIR}/src/old/test*.cpp")
list(REMOVE_ITEM tbb_src ${to_remove}) list(REMOVE_ITEM tbb_src ${to_remove})

View File

@@ -1,5 +1,5 @@
--- a/llvm/lib/Support/Unix/Path.inc 2020-02-17 09:24:26.000000000 +0100 --- a/lib/Support/Unix/Path.inc 2020-02-17 09:24:26.000000000 +0100
+++ b/llvm/lib/Support/Unix/Path.inc 2020-02-17 09:26:25.000000000 +0100 +++ b/lib/Support/Unix/Path.inc 2020-02-17 09:26:25.000000000 +0100
@@ -1200,7 +1200,9 @@ @@ -1200,7 +1200,9 @@
/// implementation. /// implementation.
std::error_code copy_file(const Twine &From, const Twine &To) { std::error_code copy_file(const Twine &From, const Twine &To) {

View File

@@ -1,40 +0,0 @@
diff -Naur oidn-1.3.0/cmake/FindTBB.cmake external_openimagedenoise/cmake/FindTBB.cmake
--- oidn-1.3.0/cmake/FindTBB.cmake 2021-02-04 16:20:26 -0700
+++ external_openimagedenoise/cmake/FindTBB.cmake 2021-02-12 09:35:53 -0700
@@ -332,20 +332,22 @@
${TBB_ROOT}/lib/${TBB_ARCH}/${TBB_VCVER}
${TBB_ROOT}/lib
)
-
# On Windows, also search the DLL so that the client may install it.
file(GLOB DLL_NAMES
${TBB_ROOT}/bin/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
${TBB_ROOT}/bin/${LIB_NAME}.dll
+ ${TBB_ROOT}/lib/${LIB_NAME}.dll
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB1}.dll
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB2}.dll
${TBB_ROOT}/../redist/${TBB_ARCH}/tbb/${TBB_VCVER}/${LIB_NAME}.dll
${TBB_ROOT}/../redist/${TBB_ARCH}_win/tbb/${TBB_VCVER}/${LIB_NAME}.dll
)
- list(GET DLL_NAMES 0 DLL_NAME)
- get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
- set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
+ if (DLL_NAMES)
+ list(GET DLL_NAMES 0 DLL_NAME)
+ get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
+ set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
+ endif()
elseif(APPLE)
set(LIB_PATHS ${TBB_ROOT}/lib)
else()
--- external_openimagedenoise/cmake/oidn_ispc.cmake 2021-02-15 17:29:34.000000000 +0100
+++ external_openimagedenoise/cmake/oidn_ispc.cmake2 2021-02-15 17:29:28.000000000 +0100
@@ -98,7 +98,7 @@
elseif(OIDN_ARCH STREQUAL "ARM64")
set(ISPC_ARCHITECTURE "aarch64")
if(APPLE)
- set(ISPC_TARGET_OS "--target-os=ios")
+ set(ISPC_TARGET_OS "--target-os=macos")
endif()
endif()

View File

@@ -1,16 +1,33 @@
diff --git a/share/cmake/modules/Findpystring.cmake b/share/cmake/modules/Findpystring.cmake diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
index 7b894a45..92618215 100644 index 1eb691b..cff9bd8 100644
--- a/share/cmake/modules/Findpystring.cmake --- a/src/core/CMakeLists.txt
+++ b/share/cmake/modules/Findpystring.cmake +++ b/src/core/CMakeLists.txt
@@ -113,6 +113,11 @@ if(NOT pystring_FOUND) @@ -23,8 +23,6 @@ if(WIN32)
-DCMAKE_INSTALL_MESSAGE=${CMAKE_INSTALL_MESSAGE} if("${CMAKE_BUILD_TYPE}" STREQUAL "Release")
-DCMAKE_INSTALL_PREFIX=${_EXT_DIST_ROOT} set(EXTERNAL_COMPILE_FLAGS "${EXTERNAL_COMPILE_FLAGS} /WX")
-DCMAKE_OBJECT_PATH_MAX=${CMAKE_OBJECT_PATH_MAX} endif()
+ -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES} -else()
+ -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET} - set(EXTERNAL_COMPILE_FLAGS "${EXTERNAL_COMPILE_FLAGS} -Werror")
+ -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} endif()
+ -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG}
+ -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} # SHARED
--- a/CMakeLists.txt 2018-09-10 22:15:29.000000000 +0200
+++ b/CMakeLists.txt 2018-09-10 22:17:40.000000000 +0200
@@ -229,7 +229,7 @@
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-whitespace ${TINYXML_PATCHFILE}
BINARY_DIR ext/build/tinyxml
INSTALL_DIR ext/dist
- CMAKE_ARGS ${TINYXML_CMAKE_ARGS}
+ CMAKE_ARGS ${TINYXML_CMAKE_ARGS} -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET} -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} -DCMAKE_C_FLAGS_DEBUG=${CMAKE_C_FLAGS_DEBUG} -DCMAKE_C_FLAGS_RELEASE=${CMAKE_C_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
) )
if(CMAKE_TOOLCHAIN_FILE) if(WIN32)
set(pystring_CMAKE_ARGS set(TINYXML_STATIC_LIBRARIES ${PROJECT_BINARY_DIR}/ext/dist/lib/tinyxml.lib)
@@ -343,7 +343,7 @@
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-whitespace ${YAML_CPP_PATCHFILE}
BINARY_DIR ext/build/yaml-cpp
INSTALL_DIR ext/dist
- CMAKE_ARGS ${YAML_CPP_CMAKE_ARGS}
+ CMAKE_ARGS ${YAML_CPP_CMAKE_ARGS} -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET} -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} -DCMAKE_C_FLAGS_DEBUG=${CMAKE_C_FLAGS_DEBUG} -DCMAKE_C_FLAGS_RELEASE=${CMAKE_C_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
)
set(YAML_CPP_INCLUDE_DIRS ${PROJECT_BINARY_DIR}/ext/dist/include)
set(YAML_CPP_LIBRARY_DIRS ${PROJECT_BINARY_DIR}/ext/dist/lib)

View File

@@ -0,0 +1,51 @@
diff -Naur external_opencolorio/CMakeLists.txt external_opencolorio.patched/CMakeLists.txt
--- external_opencolorio/CMakeLists.txt 2018-01-04 18:38:27 -0700
+++ external_opencolorio.patched/CMakeLists.txt 2018-08-15 11:46:53 -0600
@@ -251,25 +251,30 @@
if(USE_EXTERNAL_YAML)
# Set minimum yaml version for non-patched sources.
set(YAML_VERSION_MIN "0.3.0")
- include(FindPkgConfig)
- pkg_check_modules(PC_YAML_CPP REQUIRED QUIET yaml-cpp)
- find_path(YAML_CPP_INCLUDE_DIR yaml-cpp/yaml.h
- HINTS ${PC_YAML_CPP_INCLUDEDIR} ${PC_YAML_CPP_INCLUDE_DIRS} )
- find_library(YAML_CPP_LIBRARY LIBRARY_NAMES yaml-cpp libyaml-cpp
- HINTS ${PC_YAML_CPP_LIBRARY_DIRS} )
- set(YAML_CPP_LIBRARIES ${YAML_CPP_LIBRARY})
- set(YAML_CPP_INCLUDE_DIRS ${YAML_CPP_INCLUDE_DIR})
- set(YAML_CPP_VERSION ${PC_YAML_CPP_VERSION})
+ if(NOT WIN32)
+ include(FindPkgConfig)
+ pkg_check_modules(PC_YAML_CPP REQUIRED QUIET yaml-cpp)
+ find_path(YAML_CPP_INCLUDE_DIR yaml-cpp/yaml.h
+ HINTS ${PC_YAML_CPP_INCLUDEDIR} ${PC_YAML_CPP_INCLUDE_DIRS} )
+ find_library(YAML_CPP_LIBRARY LIBRARY_NAMES yaml-cpp libyaml-cpp
+ HINTS ${PC_YAML_CPP_LIBRARY_DIRS} )
+ set(YAML_CPP_LIBRARIES ${YAML_CPP_LIBRARY})
+ set(YAML_CPP_INCLUDE_DIRS ${YAML_CPP_INCLUDE_DIR})
+ set(YAML_CPP_VERSION ${PC_YAML_CPP_VERSION})
- if(YAML_CPP_VERSION VERSION_LESS ${YAML_VERSION_MIN})
- message(FATAL_ERROR "ERROR: yaml-cpp ${YAML_VERSION_MIN} or greater is required.")
- endif()
-
- find_package_handle_standard_args(yaml-cpp
- REQUIRED_VARS YAML_CPP_LIBRARIES YAML_CPP_INCLUDE_DIRS )
- set(YAML_CPP_FOUND ${YAML-CPP_FOUND})
- mark_as_advanced(YAML_CPP_INCLUDE_DIR YAML_CPP_LIBRARY YAML-CPP_FOUND)
+ if(YAML_CPP_VERSION VERSION_LESS ${YAML_VERSION_MIN})
+ message(FATAL_ERROR "ERROR: yaml-cpp ${YAML_VERSION_MIN} or greater is required.")
+ endif()
+ find_package_handle_standard_args(yaml-cpp
+ REQUIRED_VARS YAML_CPP_LIBRARIES YAML_CPP_INCLUDE_DIRS )
+ set(YAML_CPP_FOUND ${YAML-CPP_FOUND})
+ mark_as_advanced(YAML_CPP_INCLUDE_DIR YAML_CPP_LIBRARY YAML-CPP_FOUND)
+ else()
+ set(EXTERNAL_INCLUDE_DIRS ${EXTERNAL_INCLUDE_DIRS} ${INC_1})
+ set(EXTERNAL_INCLUDE_DIRS ${EXTERNAL_INCLUDE_DIRS} ${INC_2})
+ message("INCLUDE DIRS = i:${EXTERNAL_INCLUDE_DIRS} |1:${INC_1} |2:${INC_2}")
+ endif()
if(YAML_CPP_FOUND)
if(YAML_CPP_VERSION VERSION_GREATER "0.5.0")
# Need to also get the boost headers here, as yaml-cpp 0.5.0+ requires them.

View File

@@ -1,10 +1,10 @@
diff -Naur openvdb-8.0.0/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake diff -Naur orig/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
--- openvdb-8.0.0/cmake/FindIlmBase.cmake 2020-12-24 10:13:14 -0700 --- orig/cmake/FindIlmBase.cmake 2019-12-06 12:11:33 -0700
+++ openvdb/cmake/FindIlmBase.cmake 2021-02-05 12:07:49 -0700 +++ openvdb/cmake/FindIlmBase.cmake 2020-08-12 12:48:44 -0600
@@ -217,6 +217,12 @@ @@ -225,6 +225,12 @@
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib") list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
endif() "-${IlmBase_VERSION_MAJOR}_${IlmBase_VERSION_MINOR}.lib"
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${_IlmBase_Version_Suffix}.lib") )
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES + list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
+ "_s.lib" + "_s.lib"
+ ) + )
@@ -13,14 +13,14 @@ diff -Naur openvdb-8.0.0/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
+ ) + )
else() else()
if(ILMBASE_USE_STATIC_LIBS) if(ILMBASE_USE_STATIC_LIBS)
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a") list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
diff -Naur openvdb-8.0.0/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake diff -Naur orig/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
--- openvdb-8.0.0/cmake/FindOpenEXR.cmake 2020-12-24 10:13:14 -0700 --- orig/cmake/FindOpenEXR.cmake 2019-12-06 12:11:33 -0700
+++ openvdb/cmake/FindOpenEXR.cmake 2021-02-05 12:23:39 -0700 +++ openvdb/cmake/FindOpenEXR.cmake 2020-08-12 12:48:44 -0600
@@ -210,6 +210,12 @@ @@ -218,6 +218,12 @@
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib") list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
endif() "-${OpenEXR_VERSION_MAJOR}_${OpenEXR_VERSION_MINOR}.lib"
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${_OpenEXR_Version_Suffix}.lib") )
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES + list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
+ "_s.lib" + "_s.lib"
+ ) + )
@@ -29,11 +29,11 @@ diff -Naur openvdb-8.0.0/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
+ ) + )
else() else()
if(OPENEXR_USE_STATIC_LIBS) if(OPENEXR_USE_STATIC_LIBS)
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a") list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/CMakeLists.txt diff -Naur orig/openvdb/CMakeLists.txt openvdb/openvdb/CMakeLists.txt
--- openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt 2020-12-24 10:13:14 -0700 --- orig/openvdb/CMakeLists.txt 2019-12-06 12:11:33 -0700
+++ openvdb/openvdb/openvdb/CMakeLists.txt 2021-02-05 11:18:33 -0700 +++ openvdb/openvdb/CMakeLists.txt 2020-08-12 14:12:26 -0600
@@ -107,7 +107,9 @@ @@ -105,7 +105,9 @@
# http://boost.2283326.n4.nabble.com/CMake-config-scripts-broken-in-1-70-td4708957.html # http://boost.2283326.n4.nabble.com/CMake-config-scripts-broken-in-1-70-td4708957.html
# https://github.com/boostorg/boost_install/commit/160c7cb2b2c720e74463865ef0454d4c4cd9ae7c # https://github.com/boostorg/boost_install/commit/160c7cb2b2c720e74463865ef0454d4c4cd9ae7c
set(BUILD_SHARED_LIBS ON) set(BUILD_SHARED_LIBS ON)
@@ -44,15 +44,15 @@ diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/
endif() endif()
find_package(Boost ${MINIMUM_BOOST_VERSION} REQUIRED COMPONENTS iostreams system) find_package(Boost ${MINIMUM_BOOST_VERSION} REQUIRED COMPONENTS iostreams system)
@@ -146,6 +148,7 @@ @@ -193,6 +195,7 @@
Boost::disable_autolinking # add -DBOOST_ALL_NO_LIB if(OPENVDB_DISABLE_BOOST_IMPLICIT_LINKING)
) add_definitions(-DBOOST_ALL_NO_LIB)
endif() endif()
+ add_definitions(-D__TBB_NO_IMPLICIT_LINKAGE -DOPENVDB_OPENEXR_STATICLIB) + add_definitions(-D__TBB_NO_IMPLICIT_LINKAGE -DOPENVDB_OPENEXR_STATICLIB)
endif() endif()
if(USE_EXR) # @todo Should be target definitions
@@ -379,7 +382,12 @@ @@ -383,7 +386,12 @@
# imported targets. # imported targets.
if(OPENVDB_CORE_SHARED) if(OPENVDB_CORE_SHARED)
@@ -66,9 +66,9 @@ diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/
endif() endif()
if(OPENVDB_CORE_STATIC) if(OPENVDB_CORE_STATIC)
diff -Naur openvdb-8.0.0/openvdb/openvdb/version.rc.in openvdb/openvdb/openvdb/version.rc.in diff -Naur orig/openvdb/version.rc.in openvdb/openvdb/version.rc.in
--- openvdb-8.0.0/openvdb/openvdb/version.rc.in 1969-12-31 17:00:00 -0700 --- orig/openvdb/version.rc.in 1969-12-31 17:00:00 -0700
+++ openvdb/openvdb/openvdb/version.rc.in 2021-02-05 11:18:33 -0700 +++ openvdb/openvdb/version.rc.in 2020-08-12 14:15:01 -0600
@@ -0,0 +1,48 @@ @@ -0,0 +1,48 @@
+#include <winver.h> +#include <winver.h>
+ +

View File

@@ -43,18 +43,22 @@ diff -Naur OpenShadingLanguage-Release-1.9.9/src/liboslexec/llvm_util.cpp extern
- -
+void LLVM_Util::Cleanup () +void LLVM_Util::Cleanup ()
+{ +{
+ if(jitmm_hold) jitmm_hold->clear(); + jitmm_hold.clear();
+} +}
size_t size_t
LLVM_Util::total_jit_memory_held () LLVM_Util::total_jit_memory_held ()
diff -Naur org/CMakeLists.txt external_osl/CMakeLists.txt diff -Naur OpenShadingLanguage-Release-1.9.9/CMakeLists.txt external_osl/CMakeLists.txt
--- org/CMakeLists.txt 2020-12-01 12:37:15 -0700 --- orig/CMakeLists.txt 2020-01-27 16:22:31 -0700
+++ external_osl/CMakeLists.txt 2021-01-20 13:26:50 -0700 +++ external_osl/CMakeLists.txt 2020-05-13 18:04:52 -0600
@@ -84,6 +84,11 @@ @@ -102,10 +102,11 @@
CACHE STRING "Directory where OptiX PTX files will be installed") set (OPTIX_EXTRA_LIBS CACHE STRING "Extra lib targets needed for OptiX")
set (CMAKE_DEBUG_POSTFIX "" CACHE STRING "Library naming postfix for Debug builds (e.g., '_debug')") set (CUDA_TARGET_ARCH "sm_35" CACHE STRING "CUDA GPU architecture (e.g. sm_35)")
-# set (USE_OIIO_STATIC ON CACHE BOOL "If OIIO is built static")
-# if (USE_OIIO_STATIC)
-# add_definitions ("-DOIIO_STATIC_BUILD=1")
-# endif ()
+set (USE_OIIO_STATIC ON CACHE BOOL "If OIIO is built static") +set (USE_OIIO_STATIC ON CACHE BOOL "If OIIO is built static")
+if (USE_OIIO_STATIC) +if (USE_OIIO_STATIC)
+ add_definitions ("-DOIIO_STATIC_BUILD=1") + add_definitions ("-DOIIO_STATIC_BUILD=1")

View File

@@ -2,23 +2,23 @@ diff --git a/setup.py.orig b/setup.py
index a97a755..07ce853 100644 index a97a755..07ce853 100644
--- a/setup.py.orig --- a/setup.py.orig
+++ b/setup.py +++ b/setup.py
@@ -1603,13 +1603,13 @@ @@ -1422,13 +1422,13 @@ class PyBuildExt(build_ext):
version = line.split()[2] version = line.split()[2]
break break
if version >= version_req: if version >= version_req:
- if (self.compiler.find_library_file(self.lib_dirs, 'z')): - if (self.compiler.find_library_file(lib_dirs, 'z')):
+ if (self.compiler.find_library_file(self.lib_dirs, 'z_pic')): + if (self.compiler.find_library_file(lib_dirs, 'z_pic')):
if MACOS: if host_platform == "darwin":
zlib_extra_link_args = ('-Wl,-search_paths_first',) zlib_extra_link_args = ('-Wl,-search_paths_first',)
else: else:
zlib_extra_link_args = () zlib_extra_link_args = ()
self.add(Extension('zlib', ['zlibmodule.c'], exts.append( Extension('zlib', ['zlibmodule.c'],
- libraries=['z'], - libraries = ['z'],
+ libraries=['z_pic'], + libraries = ['z_pic'],
extra_link_args=zlib_extra_link_args)) extra_link_args = zlib_extra_link_args))
have_zlib = True have_zlib = True
else: else:
@@ -1623,7 +1623,7 @@ @@ -1442,7 +1442,7 @@ class PyBuildExt(build_ext):
# crc32 if we have it. Otherwise binascii uses its own. # crc32 if we have it. Otherwise binascii uses its own.
if have_zlib: if have_zlib:
extra_compile_args = ['-DUSE_ZLIB_CRC32'] extra_compile_args = ['-DUSE_ZLIB_CRC32']
@@ -27,12 +27,12 @@ index a97a755..07ce853 100644
extra_link_args = zlib_extra_link_args extra_link_args = zlib_extra_link_args
else: else:
extra_compile_args = [] extra_compile_args = []
@@ -2168,7 +2168,7 @@ @@ -1991,7 +1991,7 @@ class PyBuildExt(build_ext):
ffi_inc = None
print('Header file {} does not exist'.format(ffi_h)) print('Header file {} does not exist'.format(ffi_h))
if ffi_lib is None and ffi_inc: ffi_lib = None
if ffi_inc is not None:
- for lib_name in ('ffi', 'ffi_pic'): - for lib_name in ('ffi', 'ffi_pic'):
+ for lib_name in ('ffi_pic', ): + for lib_name in ('ffi_pic', ):
if (self.compiler.find_library_file(self.lib_dirs, lib_name)): if (self.compiler.find_library_file(lib_dirs, lib_name)):
ffi_lib = lib_name ffi_lib = lib_name
break break

View File

@@ -0,0 +1,289 @@
diff -ru a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst
--- a/Doc/library/ctypes.rst 2020-03-10 07:11:12.000000000 +0100
+++ b/Doc/library/ctypes.rst 2020-07-14 08:10:10.000000000 +0200
@@ -1551,6 +1551,13 @@
value usable as argument (integer, string, ctypes instance). This allows
defining adapters that can adapt custom objects as function parameters.
+ .. attribute:: variadic
+
+ Assign a boolean to specify that the function takes a variable number of
+ arguments. This does not matter on most platforms, but for Apple arm64
+ platforms variadic functions have a different calling convention than
+ normal functions.
+
.. attribute:: errcheck
Assign a Python function or another callable to this attribute. The
diff -ru a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
--- a/Modules/_ctypes/_ctypes.c 2020-03-10 07:11:12.000000000 +0100
+++ b/Modules/_ctypes/_ctypes.c 2020-07-14 08:14:41.000000000 +0200
@@ -3175,6 +3175,35 @@
}
static int
+PyCFuncPtr_set_variadic(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ignored))
+{
+ StgDictObject *dict = PyObject_stgdict((PyObject *)self);
+ assert(dict);
+ int r = PyObject_IsTrue(ob);
+ if (r == 1) {
+ dict->flags |= FUNCFLAG_VARIADIC;
+ return 0;
+ } else if (r == 0) {
+ dict->flags &= ~FUNCFLAG_VARIADIC;
+ return 0;
+ } else {
+ return -1;
+ }
+}
+
+static PyObject *
+PyCFuncPtr_get_variadic(PyCFuncPtrObject *self, void *Py_UNUSED(ignored))
+{
+ StgDictObject *dict = PyObject_stgdict((PyObject *)self);
+ assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */
+ if (dict->flags & FUNCFLAG_VARIADIC)
+ Py_RETURN_TRUE;
+ else
+ Py_RETURN_FALSE;
+}
+
+
+static int
PyCFuncPtr_set_argtypes(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ignored))
{
PyObject *converters;
@@ -5632,6 +5661,7 @@
PyModule_AddObject(m, "FUNCFLAG_USE_ERRNO", PyLong_FromLong(FUNCFLAG_USE_ERRNO));
PyModule_AddObject(m, "FUNCFLAG_USE_LASTERROR", PyLong_FromLong(FUNCFLAG_USE_LASTERROR));
PyModule_AddObject(m, "FUNCFLAG_PYTHONAPI", PyLong_FromLong(FUNCFLAG_PYTHONAPI));
+ PyModule_AddObject(m, "FUNCFLAG_VARIADIC", PyLong_FromLong(FUNCFLAG_VARIADIC));
PyModule_AddStringConstant(m, "__version__", "1.1.0");
PyModule_AddObject(m, "_memmove_addr", PyLong_FromVoidPtr(memmove));
diff -ru a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
--- a/Modules/_ctypes/callproc.c 2020-03-10 07:11:12.000000000 +0100
+++ b/Modules/_ctypes/callproc.c 2020-07-14 08:18:33.000000000 +0200
@@ -767,7 +767,8 @@
ffi_type **atypes,
ffi_type *restype,
void *resmem,
- int argcount)
+ int argcount,
+ int argtypecount)
{
PyThreadState *_save = NULL; /* For Py_BLOCK_THREADS and Py_UNBLOCK_THREADS */
PyObject *error_object = NULL;
@@ -793,15 +794,38 @@
if ((flags & FUNCFLAG_CDECL) == 0)
cc = FFI_STDCALL;
#endif
- if (FFI_OK != ffi_prep_cif(&cif,
- cc,
- argcount,
- restype,
- atypes)) {
- PyErr_SetString(PyExc_RuntimeError,
- "ffi_prep_cif failed");
- return -1;
- }
+#if HAVE_FFI_PREP_CIF_VAR
+ /* Everyone SHOULD set f.variadic=True on variadic function pointers, but
+ * lots of existing code will not. If there's at least one arg and more
+ * args are passed than are defined in the prototype, then it must be a
+ * variadic function. */
+ if ((flags & FUNCFLAG_VARIADIC) ||
+ (argtypecount != 0 && argcount > argtypecount))
+ {
+ if (FFI_OK != ffi_prep_cif_var(&cif,
+ cc,
+ argtypecount,
+ argcount,
+ restype,
+ atypes)) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "ffi_prep_cif_var failed");
+ return -1;
+ }
+ } else {
+#endif
+ if (FFI_OK != ffi_prep_cif(&cif,
+ cc,
+ argcount,
+ restype,
+ atypes)) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "ffi_prep_cif failed");
+ return -1;
+ }
+#if HAVE_FFI_PREP_CIF_VAR
+ }
+#endif
if (flags & (FUNCFLAG_USE_ERRNO | FUNCFLAG_USE_LASTERROR)) {
error_object = _ctypes_get_errobj(&space);
@@ -1185,9 +1209,8 @@
if (-1 == _call_function_pointer(flags, pProc, avalues, atypes,
rtype, resbuf,
- Py_SAFE_DOWNCAST(argcount,
- Py_ssize_t,
- int)))
+ Py_SAFE_DOWNCAST(argcount, Py_ssize_t, int),
+ Py_SAFE_DOWNCAST(argtype_count, Py_ssize_t, int)))
goto cleanup;
#ifdef WORDS_BIGENDIAN
diff -ru a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h
--- a/Modules/_ctypes/ctypes.h 2020-03-10 07:11:12.000000000 +0100
+++ b/Modules/_ctypes/ctypes.h 2020-07-14 08:30:53.000000000 +0200
@@ -285,6 +285,7 @@
#define FUNCFLAG_PYTHONAPI 0x4
#define FUNCFLAG_USE_ERRNO 0x8
#define FUNCFLAG_USE_LASTERROR 0x10
+#define FUNCFLAG_VARIADIC 0x20
#define TYPEFLAG_ISPOINTER 0x100
#define TYPEFLAG_HASPOINTER 0x200
diff -ru a/configure b/configure
--- a/configure 2020-03-10 07:11:12.000000000 +0100
+++ b/configure 2020-07-14 08:03:27.000000000 +0200
@@ -3374,7 +3374,7 @@
# has no effect, don't bother defining them
Darwin/[6789].*)
define_xopen_source=no;;
- Darwin/1[0-9].*)
+ Darwin/[12][0-9].*)
define_xopen_source=no;;
# On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but
# used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined
@@ -9251,6 +9251,9 @@
ppc)
MACOSX_DEFAULT_ARCH="ppc64"
;;
+ arm64)
+ MACOSX_DEFAULT_ARCH="arm64"
+ ;;
*)
as_fn_error $? "Unexpected output of 'arch' on OSX" "$LINENO" 5
;;
diff -ru a/configure.ac b/configure.ac
--- a/configure.ac 2020-03-10 07:11:12.000000000 +0100
+++ b/configure.ac 2020-07-14 08:03:27.000000000 +0200
@@ -2456,6 +2456,9 @@
ppc)
MACOSX_DEFAULT_ARCH="ppc64"
;;
+ arm64)
+ MACOSX_DEFAULT_ARCH="arm64"
+ ;;
*)
AC_MSG_ERROR([Unexpected output of 'arch' on OSX])
;;
diff -ru a/setup.py b/setup.py
--- a/setup.py 2020-03-10 07:11:12.000000000 +0100
+++ b/setup.py 2020-07-14 08:28:12.000000000 +0200
@@ -141,6 +141,13 @@
os.unlink(tmpfile)
return MACOS_SDK_ROOT
+
+def is_macosx_at_least(vers):
+ if host_platform == 'darwin':
+ dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
+ if dep_target:
+ return tuple(map(int, dep_target.split('.'))) >= vers
+ return False
def is_macosx_sdk_path(path):
"""
@@ -150,6 +157,13 @@
or path.startswith('/System/')
or path.startswith('/Library/') )
+def grep_headers_for(function, headers):
+ for header in headers:
+ with open(header, 'r') as f:
+ if function in f.read():
+ return True
+ return False
+
def find_file(filename, std_dirs, paths):
"""Searches for the directory where a given file is located,
and returns a possibly-empty list of additional directories, or None
@@ -1972,7 +1986,11 @@
return True
def detect_ctypes(self, inc_dirs, lib_dirs):
- self.use_system_libffi = False
+ if not sysconfig.get_config_var("LIBFFI_INCLUDEDIR") and is_macosx_at_least((10,15)):
+ self.use_system_libffi = True
+ else:
+ self.use_system_libffi = '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS")
+
include_dirs = []
extra_compile_args = []
extra_link_args = []
@@ -2016,32 +2034,48 @@
ext_test = Extension('_ctypes_test',
sources=['_ctypes/_ctypes_test.c'],
libraries=['m'])
+ ffi_inc = sysconfig.get_config_var("LIBFFI_INCLUDEDIR")
+ ffi_lib = None
+
self.extensions.extend([ext, ext_test])
if host_platform == 'darwin':
- if '--with-system-ffi' not in sysconfig.get_config_var("CONFIG_ARGS"):
+ if not self.use_system_libffi:
return
- # OS X 10.5 comes with libffi.dylib; the include files are
- # in /usr/include/ffi
- inc_dirs.append('/usr/include/ffi')
-
- ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
- if not ffi_inc or ffi_inc[0] == '':
- ffi_inc = find_file('ffi.h', [], inc_dirs)
- if ffi_inc is not None:
- ffi_h = ffi_inc[0] + '/ffi.h'
+ ffi_in_sdk = os.path.join(macosx_sdk_root(), "usr/include/ffi")
+ if os.path.exists(ffi_in_sdk):
+ ffi_inc = ffi_in_sdk
+ ffi_lib = 'ffi'
+ else:
+ # OS X 10.5 comes with libffi.dylib; the include files are
+ # in /usr/include/ffi
+ ffi_inc_dirs.append('/usr/include/ffi')
+
+ if not ffi_inc:
+ found = find_file('ffi.h', [], ffi_inc_dirs)
+ if found:
+ ffi_inc = found[0]
+ if ffi_inc:
+ ffi_h = ffi_inc + '/ffi.h'
if not os.path.exists(ffi_h):
ffi_inc = None
print('Header file {} does not exist'.format(ffi_h))
- ffi_lib = None
- if ffi_inc is not None:
+ if ffi_lib is None and ffi_inc:
for lib_name in ('ffi', 'ffi_pic'):
if (self.compiler.find_library_file(lib_dirs, lib_name)):
ffi_lib = lib_name
break
if ffi_inc and ffi_lib:
- ext.include_dirs.extend(ffi_inc)
+ ffi_headers = glob(os.path.join(ffi_inc, '*.h'))
+ if grep_headers_for('ffi_closure_alloc', ffi_headers):
+ try:
+ sources.remove('_ctypes/malloc_closure.c')
+ except ValueError:
+ pass
+ if grep_headers_for('ffi_prep_cif_var', ffi_headers):
+ ext.extra_compile_args.append("-DHAVE_FFI_PREP_CIF_VAR=1")
+ ext.include_dirs.append(ffi_inc)
ext.libraries.append(ffi_lib)
self.use_system_libffi = True

View File

@@ -1,13 +0,0 @@
diff --git a/include/tbb/tbb_config.h b/include/tbb/tbb_config.h
index 7a8d06a0..886699d8 100644
--- a/include/tbb/tbb_config.h
+++ b/include/tbb/tbb_config.h
@@ -620,7 +620,7 @@ There are four cases that are supported:
// instantiation site, which is too late for suppression of the corresponding messages for internal
// stuff.
#if !defined(__INTEL_COMPILER) && (!defined(TBB_SUPPRESS_DEPRECATED_MESSAGES) || (TBB_SUPPRESS_DEPRECATED_MESSAGES == 0))
- #if (__cplusplus >= 201402L)
+ #if (__cplusplus >= 201402L && (!defined(_MSC_VER) || _MSC_VER >= 1920))
#define __TBB_DEPRECATED [[deprecated]]
#define __TBB_DEPRECATED_MSG(msg) [[deprecated(msg)]]
#elif _MSC_VER

View File

@@ -26,30 +26,36 @@ diff -Naur external_usd_base/cmake/macros/Public.cmake external_usd/cmake/macros
endforeach() endforeach()
foreach(lib ${PXR_OBJECT_LIBS}) foreach(lib ${PXR_OBJECT_LIBS})
set(objects "${objects};\$<TARGET_OBJECTS:${lib}>") set(objects "${objects};\$<TARGET_OBJECTS:${lib}>")
diff -ru USD-20.11/pxr/base/tf/pxrLZ4/lz4.cpp external_usd/pxr/base/tf/pxrLZ4/lz4.cpp
--- USD-20.11/pxr/base/tf/pxrLZ4/lz4.cpp 2020-10-14 19:25:19.000000000 +0100
+++ external_usd/pxr/base/tf/pxrLZ4/lz4.cpp 2021-02-09 09:28:51.496190085 +0000
@@ -614,6 +614,15 @@
/*-************************************
* Internal Definitions used in Tests
**************************************/
+
+/*******************************************************************
+ * Disabled in Blender. The BLOSC library also exposes these
+ * functions, and this causes 'duplicate symbol' linker errors.
+ *
+ * This problem has been reported upstream at
+ * https://github.com/PixarAnimationStudios/USD/issues/1447
+ *
+ *******************************************************************
#if defined (__cplusplus)
extern "C" {
#endif
@@ -627,6 +636,7 @@
#if defined (__cplusplus)
}
#endif
+********************************************************************/
/*-****************************** diff --git a/pxr/base/arch/align.h b/pxr/base/arch/align.h
* Compression functions index f3cabf4..ebc8a69 100644
--- a/pxr/base/arch/align.h
+++ b/pxr/base/arch/align.h
@@ -77,7 +77,11 @@ ArchAlignMemory(void *base)
/// The size of a CPU cache line on the current processor architecture in bytes.
///
/// \hideinitializer
+#if defined(ARCH_OS_DARWIN) && defined(ARCH_CPU_ARM)
+#define ARCH_CACHE_LINE_SIZE 128
+#else
#define ARCH_CACHE_LINE_SIZE 64
+#endif
///@}
diff --git a/pxr/base/arch/math.h b/pxr/base/arch/math.h
index 3e66c37..64a052c 100644
--- a/pxr/base/arch/math.h
+++ b/pxr/base/arch/math.h
@@ -42,7 +42,7 @@ PXR_NAMESPACE_OPEN_SCOPE
/// \addtogroup group_arch_Math
///@{
-#if defined (ARCH_CPU_INTEL) || defined(doxygen)
+#if defined (ARCH_CPU_INTEL) || defined(ARCH_CPU_ARM) || defined(doxygen)
/// This is the smallest value e such that 1+e^2 == 1, using floats.
/// True for all IEEE754 chipsets.

View File

@@ -117,7 +117,7 @@ set path=%BUILD_DIR%\downloads\mingw\mingw64\msys\1.0\bin\;%BUILD_DIR%\downloads
mkdir %STAGING%\%BuildDir%%ARCH%R mkdir %STAGING%\%BuildDir%%ARCH%R
cd %Staging%\%BuildDir%%ARCH%R cd %Staging%\%BuildDir%%ARCH%R
echo %DATE% %TIME% : Start > %StatusFile% echo %DATE% %TIME% : Start > %StatusFile%
cmake -G "%CMAKE_BUILDER%" -Thost=x64 %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DBUILD_MODE=Release -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/ cmake -G "%CMAKE_BUILDER%" %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DBUILD_MODE=Release -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/
echo %DATE% %TIME% : Release Configuration done >> %StatusFile% echo %DATE% %TIME% : Release Configuration done >> %StatusFile%
if "%dobuild%" == "1" ( if "%dobuild%" == "1" (
msbuild /m "ll.vcxproj" /p:Configuration=Release /fl /flp:logfile=BlenderDeps_llvm.log;Verbosity=normal msbuild /m "ll.vcxproj" /p:Configuration=Release /fl /flp:logfile=BlenderDeps_llvm.log;Verbosity=normal
@@ -130,7 +130,7 @@ if "%NODEBUG%" == "1" goto exit
cd %BUILD_DIR% cd %BUILD_DIR%
mkdir %STAGING%\%BuildDir%%ARCH%D mkdir %STAGING%\%BuildDir%%ARCH%D
cd %Staging%\%BuildDir%%ARCH%D cd %Staging%\%BuildDir%%ARCH%D
cmake -G "%CMAKE_BUILDER%" -Thost=x64 %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DCMAKE_BUILD_TYPE=Debug -DBUILD_MODE=Debug -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/ %CMAKE_DEBUG_OPTIONS% cmake -G "%CMAKE_BUILDER%" %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DCMAKE_BUILD_TYPE=Debug -DBUILD_MODE=Debug -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/ %CMAKE_DEBUG_OPTIONS%
echo %DATE% %TIME% : Debug Configuration done >> %StatusFile% echo %DATE% %TIME% : Debug Configuration done >> %StatusFile%
if "%dobuild%" == "1" ( if "%dobuild%" == "1" (
msbuild /m "ll.vcxproj" /p:Configuration=Debug /fl /flp:logfile=BlenderDeps_llvm.log;;Verbosity=normal msbuild /m "ll.vcxproj" /p:Configuration=Debug /fl /flp:logfile=BlenderDeps_llvm.log;;Verbosity=normal

View File

@@ -1,111 +0,0 @@
# - Find Clang library
# Find the native Clang includes and library
# This module defines
# CLANG_INCLUDE_DIRS, where to find AST/AST.h, Set when
# CLANG_INCLUDE_DIR is found.
# CLANG_LIBRARIES, libraries to link against to use Clang.
# CLANG_ROOT_DIR, The base directory to search for Clang.
# This can also be an environment variable.
# CLANG_FOUND, If false, do not try to use Clang.
#=============================================================================
# Copyright 2021 Blender Foundation.
#
# Distributed under the OSI-approved BSD 3-Clause License,
# see accompanying file BSD-3-Clause-license.txt for details.
#=============================================================================
# If CLANG_ROOT_DIR was defined in the environment, use it.
if(NOT CLANG_ROOT_DIR AND NOT $ENV{CLANG_ROOT_DIR} STREQUAL "")
set(CLANG_ROOT_DIR $ENV{CLANG_ROOT_DIR})
endif()
set(_CLANG_SEARCH_DIRS
${CLANG_ROOT_DIR}
/opt/lib/clang
)
find_path(CLANG_INCLUDE_DIR
NAMES
AST/AST.h
HINTS
${_CLANG_SEARCH_DIRS}
PATH_SUFFIXES
include
include/clang
)
set(_CLANG_FIND_COMPONENTS
clangDependencyScanning
clangDynamicASTMatchers
clangFrontendTool
clangStaticAnalyzerFrontend
clangHandleCXX
clangStaticAnalyzerCheckers
clangStaticAnalyzerCore
clangToolingASTDiff
clangToolingRefactoring
clangToolingSyntax
clangARCMigrate
clangCodeGen
clangCrossTU
clangIndex
clangTooling
clangFormat
clangToolingInclusions
clangRewriteFrontend
clangFrontend
clangSerialization
clangDriver
clangToolingCore
clangParse
clangRewrite
clangSema
clangEdit
clangAnalysis
clangASTMatchers
clangAST
clangLex
clangBasic
)
set(_CLANG_LIBRARIES)
foreach(COMPONENT ${_CLANG_FIND_COMPONENTS})
string(TOUPPER ${COMPONENT} UPPERCOMPONENT)
find_library(CLANG_${UPPERCOMPONENT}_LIBRARY
NAMES
${COMPONENT}
HINTS
${_CLANG_SEARCH_DIRS}
PATH_SUFFIXES
lib64 lib
)
list(APPEND _CLANG_LIBRARIES "${CLANG_${UPPERCOMPONENT}_LIBRARY}")
endforeach()
# Handle the QUIETLY and REQUIRED arguments and set CLANG_FOUND to TRUE if
# all listed variables are TRUE.
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Clang DEFAULT_MSG
_CLANG_LIBRARIES CLANG_INCLUDE_DIR)
if(CLANG_FOUND)
set(CLANG_LIBRARIES ${_CLANG_LIBRARIES})
set(CLANG_INCLUDE_DIRS ${CLANG_INCLUDE_DIR})
endif()
mark_as_advanced(
CLANG_INCLUDE_DIR
)
foreach(COMPONENT ${_CLANG_FIND_COMPONENTS})
string(TOUPPER ${COMPONENT} UPPERCOMPONENT)
mark_as_advanced(CLANG_${UPPERCOMPONENT}_LIBRARY)
endforeach()
unset(_CLANG_SEARCH_DIRS)
unset(_CLANG_FIND_COMPONENTS)
unset(_CLANG_LIBRARIES)

View File

@@ -34,17 +34,12 @@ FIND_PATH(EMBREE_INCLUDE_DIR
include include
) )
IF(NOT (APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")))
SET(_embree_SIMD_COMPONENTS
embree_sse42
embree_avx
embree_avx2
)
ENDIF()
SET(_embree_FIND_COMPONENTS SET(_embree_FIND_COMPONENTS
embree3 embree3
${_embree_SIMD_COMPONENTS} embree_sse42
embree_avx
embree_avx2
lexers lexers
math math
simd simd
@@ -64,14 +59,14 @@ FOREACH(COMPONENT ${_embree_FIND_COMPONENTS})
PATH_SUFFIXES PATH_SUFFIXES
lib64 lib lib64 lib
) )
IF(NOT EMBREE_${UPPERCOMPONENT}_LIBRARY) IF (NOT EMBREE_${UPPERCOMPONENT}_LIBRARY)
IF(EMBREE_EMBREE3_LIBRARY) IF (EMBREE_EMBREE3_LIBRARY)
# If we can't find all the static libraries, try to fall back to the shared library if found. # If we can't find all the static libraries, try to fall back to the shared library if found.
# This allows building with a shared embree library # This allows building with a shared embree library
SET(_embree_LIBRARIES ${EMBREE_EMBREE3_LIBRARY}) SET(_embree_LIBRARIES ${EMBREE_EMBREE3_LIBRARY})
BREAK() BREAK()
ENDIF() ENDIF ()
ENDIF() ENDIF ()
LIST(APPEND _embree_LIBRARIES "${EMBREE_${UPPERCOMPONENT}_LIBRARY}") LIST(APPEND _embree_LIBRARIES "${EMBREE_${UPPERCOMPONENT}_LIBRARY}")
ENDFOREACH() ENDFOREACH()

View File

@@ -35,13 +35,11 @@ find_path(HARU_INCLUDE_DIR
${_haru_SEARCH_DIRS} ${_haru_SEARCH_DIRS}
PATH_SUFFIXES PATH_SUFFIXES
include/haru include/haru
include
) )
find_library(HARU_LIBRARY find_library(HARU_LIBRARY
NAMES NAMES
hpdfs hpdfs
hpdf
HINTS HINTS
${_haru_SEARCH_DIRS} ${_haru_SEARCH_DIRS}
PATH_SUFFIXES PATH_SUFFIXES

View File

@@ -26,8 +26,7 @@ ENDIF()
SET(_opencolorio_FIND_COMPONENTS SET(_opencolorio_FIND_COMPONENTS
OpenColorIO OpenColorIO
yaml-cpp yaml-cpp
expat tinyxml
pystring
) )
SET(_opencolorio_SEARCH_DIRS SET(_opencolorio_SEARCH_DIRS
@@ -61,23 +60,12 @@ FOREACH(COMPONENT ${_opencolorio_FIND_COMPONENTS})
ENDIF() ENDIF()
ENDFOREACH() ENDFOREACH()
IF(EXISTS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h")
# Search twice, because this symbol changed between OCIO 1.x and 2.x
FILE(STRINGS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h" _opencolorio_version
REGEX "^#define OCIO_VERSION_STR[ \t].*$")
IF(NOT _opencolorio_version)
file(STRINGS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h" _opencolorio_version
REGEX "^#define OCIO_VERSION[ \t].*$")
ENDIF()
STRING(REGEX MATCHALL "[0-9]+[.0-9]+" OPENCOLORIO_VERSION ${_opencolorio_version})
ENDIF()
# handle the QUIETLY and REQUIRED arguments and set OPENCOLORIO_FOUND to TRUE if # handle the QUIETLY and REQUIRED arguments and set OPENCOLORIO_FOUND to TRUE if
# all listed variables are TRUE # all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs) INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenColorIO FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenColorIO DEFAULT_MSG
REQUIRED_VARS _opencolorio_LIBRARIES OPENCOLORIO_INCLUDE_DIR _opencolorio_LIBRARIES OPENCOLORIO_INCLUDE_DIR)
VERSION_VAR OPENCOLORIO_VERSION)
IF(OPENCOLORIO_FOUND) IF(OPENCOLORIO_FOUND)
SET(OPENCOLORIO_LIBRARIES ${_opencolorio_LIBRARIES}) SET(OPENCOLORIO_LIBRARIES ${_opencolorio_LIBRARIES})
@@ -90,7 +78,6 @@ MARK_AS_ADVANCED(
OPENCOLORIO_OPENCOLORIO_LIBRARY OPENCOLORIO_OPENCOLORIO_LIBRARY
OPENCOLORIO_TINYXML_LIBRARY OPENCOLORIO_TINYXML_LIBRARY
OPENCOLORIO_YAML-CPP_LIBRARY OPENCOLORIO_YAML-CPP_LIBRARY
OPENCOLORIO_VERSION
) )
UNSET(COMPONENT) UNSET(COMPONENT)

View File

@@ -34,7 +34,7 @@ IF(NOT PYTHON_ROOT_DIR AND NOT $ENV{PYTHON_ROOT_DIR} STREQUAL "")
SET(PYTHON_ROOT_DIR $ENV{PYTHON_ROOT_DIR}) SET(PYTHON_ROOT_DIR $ENV{PYTHON_ROOT_DIR})
ENDIF() ENDIF()
SET(PYTHON_VERSION 3.9 CACHE STRING "Python Version (major and minor only)") SET(PYTHON_VERSION 3.7 CACHE STRING "Python Version (major and minor only)")
MARK_AS_ADVANCED(PYTHON_VERSION) MARK_AS_ADVANCED(PYTHON_VERSION)
@@ -73,8 +73,8 @@ SET(_python_SEARCH_DIRS
# only search for the dirs if we haven't already # only search for the dirs if we haven't already
IF((NOT _IS_INC_DEF) OR (NOT _IS_INC_CONF_DEF) OR (NOT _IS_LIB_DEF) OR (NOT _IS_LIB_PATH_DEF)) IF((NOT _IS_INC_DEF) OR (NOT _IS_INC_CONF_DEF) OR (NOT _IS_LIB_DEF) OR (NOT _IS_LIB_PATH_DEF))
SET(_PYTHON_ABI_FLAGS_TEST SET(_PYTHON_ABI_FLAGS_TEST
"u; " # release "m;mu;u; " # release
"du;d" # debug "dm;dmu;du;d" # debug
) )
FOREACH(_CURRENT_ABI_FLAGS ${_PYTHON_ABI_FLAGS_TEST}) FOREACH(_CURRENT_ABI_FLAGS ${_PYTHON_ABI_FLAGS_TEST})

View File

@@ -1,49 +0,0 @@
# - Find sse2neon library
# Find the native sse2neon includes and library
# This module defines
# SSE2NEON_INCLUDE_DIRS, where to find sse2neon.h, Set when
# SSE2NEON_INCLUDE_DIR is found.
# SSE2NEON_ROOT_DIR, The base directory to search for sse2neon.
# This can also be an environment variable.
# SSE2NEON_FOUND, If false, do not try to use sse2neon.
#=============================================================================
# Copyright 2020 Blender Foundation.
#
# Distributed under the OSI-approved BSD 3-Clause License,
# see accompanying file BSD-3-Clause-license.txt for details.
#=============================================================================
# If SSE2NEON_ROOT_DIR was defined in the environment, use it.
IF(NOT SSE2NEON_ROOT_DIR AND NOT $ENV{SSE2NEON_ROOT_DIR} STREQUAL "")
SET(SSE2NEON_ROOT_DIR $ENV{SSE2NEON_ROOT_DIR})
ENDIF()
SET(_sse2neon_SEARCH_DIRS
${SSE2NEON_ROOT_DIR}
)
FIND_PATH(SSE2NEON_INCLUDE_DIR
NAMES
sse2neon.h
HINTS
${_sse2neon_SEARCH_DIRS}
PATH_SUFFIXES
include
)
# handle the QUIETLY and REQUIRED arguments and set SSE2NEON_FOUND to TRUE if
# all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(sse2neon DEFAULT_MSG
SSE2NEON_INCLUDE_DIR)
IF(SSE2NEON_FOUND)
SET(SSE2NEON_INCLUDE_DIRS ${SSE2NEON_INCLUDE_DIR})
ENDIF(SSE2NEON_FOUND)
MARK_AS_ADVANCED(
SSE2NEON_INCLUDE_DIR
)
UNSET(_sse2neon_SEARCH_DIRS)

View File

@@ -272,7 +272,7 @@ cmake_policy(SET CMP0057 NEW) # if IN_LIST
#------------------------------------------------------------------------------ #------------------------------------------------------------------------------
function(gtest_add_tests) function(gtest_add_tests)
if(ARGC LESS 1) if (ARGC LESS 1)
message(FATAL_ERROR "No arguments supplied to gtest_add_tests()") message(FATAL_ERROR "No arguments supplied to gtest_add_tests()")
endif() endif()
@@ -298,7 +298,7 @@ function(gtest_add_tests)
set(autoAddSources YES) set(autoAddSources YES)
else() else()
# Non-keyword syntax, convert to keyword form # Non-keyword syntax, convert to keyword form
if(ARGC LESS 3) if (ARGC LESS 3)
message(FATAL_ERROR "gtest_add_tests() without keyword options requires at least 3 arguments") message(FATAL_ERROR "gtest_add_tests() without keyword options requires at least 3 arguments")
endif() endif()
set(ARGS_TARGET "${ARGV0}") set(ARGS_TARGET "${ARGV0}")

View File

@@ -389,7 +389,7 @@ function(blender_add_lib
endfunction() endfunction()
function(blender_add_test_suite) function(blender_add_test_suite)
if(ARGC LESS 1) if (ARGC LESS 1)
message(FATAL_ERROR "No arguments supplied to blender_add_test_suite()") message(FATAL_ERROR "No arguments supplied to blender_add_test_suite()")
endif() endif()
@@ -668,6 +668,12 @@ macro(TEST_SSE_SUPPORT
#include <xmmintrin.h> #include <xmmintrin.h>
int main(void) { __m128 v = _mm_setzero_ps(); return 0; }" int main(void) { __m128 v = _mm_setzero_ps(); return 0; }"
SUPPORT_SSE_BUILD) SUPPORT_SSE_BUILD)
if(SUPPORT_SSE_BUILD)
message(STATUS "SSE Support: detected.")
else()
message(STATUS "SSE Support: missing.")
endif()
endif() endif()
if(NOT DEFINED SUPPORT_SSE2_BUILD) if(NOT DEFINED SUPPORT_SSE2_BUILD)
@@ -676,21 +682,17 @@ macro(TEST_SSE_SUPPORT
#include <emmintrin.h> #include <emmintrin.h>
int main(void) { __m128d v = _mm_setzero_pd(); return 0; }" int main(void) { __m128d v = _mm_setzero_pd(); return 0; }"
SUPPORT_SSE2_BUILD) SUPPORT_SSE2_BUILD)
if(SUPPORT_SSE2_BUILD)
message(STATUS "SSE2 Support: detected.")
else()
message(STATUS "SSE2 Support: missing.")
endif()
endif() endif()
unset(CMAKE_REQUIRED_FLAGS) unset(CMAKE_REQUIRED_FLAGS)
endmacro() endmacro()
macro(TEST_NEON_SUPPORT)
if(NOT DEFINED SUPPORT_NEON_BUILD)
include(CheckCXXSourceCompiles)
check_cxx_source_compiles(
"#include <arm_neon.h>
int main() {return vaddvq_s32(vdupq_n_s32(1));}"
SUPPORT_NEON_BUILD)
endif()
endmacro()
# Only print message if running CMake first time # Only print message if running CMake first time
macro(message_first_run) macro(message_first_run)
if(FIRST_RUN) if(FIRST_RUN)

View File

@@ -131,22 +131,22 @@ if(WITH_CODEC_SNDFILE)
endif() endif()
if(WITH_PYTHON) if(WITH_PYTHON)
# we use precompiled libraries for py 3.9 and up by default # we use precompiled libraries for py 3.7 and up by default
set(PYTHON_VERSION 3.9) set(PYTHON_VERSION 3.7)
if(NOT WITH_PYTHON_MODULE AND NOT WITH_PYTHON_FRAMEWORK) if(NOT WITH_PYTHON_MODULE AND NOT WITH_PYTHON_FRAMEWORK)
# normally cached but not since we include them with blender # normally cached but not since we include them with blender
set(PYTHON_INCLUDE_DIR "${LIBDIR}/python/include/python${PYTHON_VERSION}") set(PYTHON_INCLUDE_DIR "${LIBDIR}/python/include/python${PYTHON_VERSION}m")
set(PYTHON_EXECUTABLE "${LIBDIR}/python/bin/python${PYTHON_VERSION}") set(PYTHON_EXECUTABLE "${LIBDIR}/python/bin/python${PYTHON_VERSION}m")
set(PYTHON_LIBRARY ${LIBDIR}/python/lib/libpython${PYTHON_VERSION}.a) set(PYTHON_LIBRARY ${LIBDIR}/python/lib/libpython${PYTHON_VERSION}m.a)
set(PYTHON_LIBPATH "${LIBDIR}/python/lib/python${PYTHON_VERSION}") set(PYTHON_LIBPATH "${LIBDIR}/python/lib/python${PYTHON_VERSION}")
# set(PYTHON_LINKFLAGS "-u _PyMac_Error") # won't build with this enabled # set(PYTHON_LINKFLAGS "-u _PyMac_Error") # won't build with this enabled
else() else()
# module must be compiled against Python framework # module must be compiled against Python framework
set(_py_framework "/Library/Frameworks/Python.framework/Versions/${PYTHON_VERSION}") set(_py_framework "/Library/Frameworks/Python.framework/Versions/${PYTHON_VERSION}")
set(PYTHON_INCLUDE_DIR "${_py_framework}/include/python${PYTHON_VERSION}") set(PYTHON_INCLUDE_DIR "${_py_framework}/include/python${PYTHON_VERSION}m")
set(PYTHON_EXECUTABLE "${_py_framework}/bin/python${PYTHON_VERSION}") set(PYTHON_EXECUTABLE "${_py_framework}/bin/python${PYTHON_VERSION}m")
set(PYTHON_LIBPATH "${_py_framework}/lib/python${PYTHON_VERSION}/config-${PYTHON_VERSION}") set(PYTHON_LIBPATH "${_py_framework}/lib/python${PYTHON_VERSION}/config-${PYTHON_VERSION}m")
# set(PYTHON_LIBRARY python${PYTHON_VERSION}) # set(PYTHON_LIBRARY python${PYTHON_VERSION})
# set(PYTHON_LINKFLAGS "-u _PyMac_Error -framework Python") # won't build with this enabled # set(PYTHON_LINKFLAGS "-u _PyMac_Error -framework Python") # won't build with this enabled
@@ -206,13 +206,6 @@ set(PLATFORM_LINKFLAGS
list(APPEND PLATFORM_LINKLIBS c++) list(APPEND PLATFORM_LINKLIBS c++)
if(WITH_OPENIMAGEDENOISE)
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
# OpenImageDenoise uses BNNS from the Accelerate framework.
string(APPEND PLATFORM_LINKFLAGS " -framework Accelerate")
endif()
endif()
if(WITH_JACK) if(WITH_JACK)
string(APPEND PLATFORM_LINKFLAGS " -F/Library/Frameworks -weak_framework jackmp") string(APPEND PLATFORM_LINKFLAGS " -F/Library/Frameworks -weak_framework jackmp")
endif() endif()
@@ -304,12 +297,7 @@ if(WITH_OPENIMAGEIO)
endif() endif()
if(WITH_OPENCOLORIO) if(WITH_OPENCOLORIO)
find_package(OpenColorIO 2.0.0) find_package(OpenColorIO)
if(NOT OPENCOLORIO_FOUND)
set(WITH_OPENCOLORIO OFF)
message(STATUS "OpenColorIO not found")
endif()
endif() endif()
if(WITH_OPENVDB) if(WITH_OPENVDB)
@@ -321,11 +309,8 @@ if(WITH_OPENVDB)
endif() endif()
if(WITH_NANOVDB) if(WITH_NANOVDB)
find_package(NanoVDB) set(NANOVDB ${LIBDIR}/nanovdb)
endif() set(NANOVDB_INCLUDE_DIR ${NANOVDB}/include)
if(WITH_CPU_SIMD AND SUPPORT_NEON_BUILD)
find_package(sse2neon)
endif() endif()
if(WITH_LLVM) if(WITH_LLVM)
@@ -333,13 +318,6 @@ if(WITH_LLVM)
if(NOT LLVM_FOUND) if(NOT LLVM_FOUND)
message(FATAL_ERROR "LLVM not found.") message(FATAL_ERROR "LLVM not found.")
endif() endif()
if(WITH_CLANG)
find_package(Clang)
if(NOT CLANG_FOUND)
message(FATAL_ERROR "Clang not found.")
endif()
endif()
endif() endif()
if(WITH_CYCLES_OSL) if(WITH_CYCLES_OSL)
@@ -352,7 +330,7 @@ if(WITH_CYCLES_OSL)
list(APPEND OSL_LIBRARIES ${OSL_LIB_COMP} -force_load ${OSL_LIB_EXEC} ${OSL_LIB_QUERY}) list(APPEND OSL_LIBRARIES ${OSL_LIB_COMP} -force_load ${OSL_LIB_EXEC} ${OSL_LIB_QUERY})
find_path(OSL_INCLUDE_DIR OSL/oslclosure.h PATHS ${CYCLES_OSL}/include) find_path(OSL_INCLUDE_DIR OSL/oslclosure.h PATHS ${CYCLES_OSL}/include)
find_program(OSL_COMPILER NAMES oslc PATHS ${CYCLES_OSL}/bin) find_program(OSL_COMPILER NAMES oslc PATHS ${CYCLES_OSL}/bin)
find_path(OSL_SHADER_DIR NAMES stdosl.h PATHS ${CYCLES_OSL}/share/OSL/shaders) find_path(OSL_SHADER_DIR NAMES stdosl.h PATHS ${CYCLES_OSL}/shaders)
if(OSL_INCLUDE_DIR AND OSL_LIBRARIES AND OSL_COMPILER AND OSL_SHADER_DIR) if(OSL_INCLUDE_DIR AND OSL_LIBRARIES AND OSL_COMPILER AND OSL_SHADER_DIR)
set(OSL_FOUND TRUE) set(OSL_FOUND TRUE)
@@ -362,6 +340,12 @@ if(WITH_CYCLES_OSL)
endif() endif()
endif() endif()
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
set(WITH_CYCLES_EMBREE OFF)
set(WITH_OPENIMAGEDENOISE OFF)
set(WITH_CPU_SSE OFF)
endif()
if(WITH_CYCLES_EMBREE) if(WITH_CYCLES_EMBREE)
find_package(Embree 3.8.0 REQUIRED) find_package(Embree 3.8.0 REQUIRED)
# Increase stack size for Embree, only works for executables. # Increase stack size for Embree, only works for executables.
@@ -410,7 +394,7 @@ if(WITH_OPENMP)
set(OPENMP_FOUND ON) set(OPENMP_FOUND ON)
set(OpenMP_C_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'") set(OpenMP_C_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'")
set(OpenMP_CXX_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'") set(OpenMP_CXX_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'")
set(OpenMP_LINKER_FLAGS "-L'${LIBDIR}/openmp/lib' -lomp") string(APPEND CMAKE_EXE_LINKER_FLAGS " -L'${LIBDIR}/openmp/lib' -lomp")
# Copy libomp.dylib to allow executables like datatoc and tests to work. # Copy libomp.dylib to allow executables like datatoc and tests to work.
# `@executable_path/../Resources/lib/` is a default dylib search path. # `@executable_path/../Resources/lib/` is a default dylib search path.

View File

@@ -20,7 +20,6 @@
# Xcode and system configuration for Apple. # Xcode and system configuration for Apple.
# Detect processor architecture.
if(NOT CMAKE_OSX_ARCHITECTURES) if(NOT CMAKE_OSX_ARCHITECTURES)
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCHITECTURE OUTPUT_STRIP_TRAILING_WHITESPACE) execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCHITECTURE OUTPUT_STRIP_TRAILING_WHITESPACE)
message(STATUS "Detected native architecture ${ARCHITECTURE}.") message(STATUS "Detected native architecture ${ARCHITECTURE}.")
@@ -29,93 +28,56 @@ if(NOT CMAKE_OSX_ARCHITECTURES)
FORCE) FORCE)
endif() endif()
# Detect developer directory. Depending on configuration this may be either if(NOT DEFINED OSX_SYSTEM)
# an Xcode or Command Line Tools installation.
execute_process(
COMMAND xcode-select --print-path
OUTPUT_VARIABLE XCODE_DEVELOPER_DIR OUTPUT_STRIP_TRAILING_WHITESPACE)
# Detect Xcode version. It is provided by the Xcode generator but not
# Unix Makefiles or Ninja.
if(NOT ${CMAKE_GENERATOR} MATCHES "Xcode")
# Note that `xcodebuild -version` gives output in two lines: first line will include
# Xcode version, second one will include build number. We are only interested in the
# first line. Here is an example of the output:
# Xcode 11.4
# Build version 11E146
# The expected XCODE_VERSION in this case is 11.4.
execute_process(
COMMAND xcodebuild -version
OUTPUT_VARIABLE _xcode_vers_build_nr
RESULT_VARIABLE _xcode_vers_result
ERROR_QUIET)
if(_xcode_vers_result EQUAL 0)
# Convert output to a single line by replacing newlines with spaces.
# This is needed because regex replace can not operate through the newline character
# and applies substitutions for each individual lines.
string(REPLACE "\n" " " _xcode_vers_build_nr_single_line "${_xcode_vers_build_nr}")
string(REGEX REPLACE "(.*)Xcode ([0-9\\.]+).*" "\\2" XCODE_VERSION "${_xcode_vers_build_nr_single_line}")
unset(_xcode_vers_build_nr_single_line)
endif()
unset(_xcode_vers_build_nr)
unset(_xcode_vers_result)
endif()
if(XCODE_VERSION)
# Construct SDKs path ourselves, because xcode-select path could be ambiguous.
# Both /Applications/Xcode.app/Contents/Developer or /Applications/Xcode.app would be allowed.
set(XCODE_SDK_DIR ${XCODE_DEVELOPER_DIR}/Platforms/MacOSX.platform//Developer/SDKs)
# Detect SDK version to use
if(NOT DEFINED OSX_SYSTEM)
execute_process( execute_process(
COMMAND xcodebuild -version -sdk macosx SDKVersion COMMAND xcodebuild -version -sdk macosx SDKVersion
OUTPUT_VARIABLE OSX_SYSTEM OUTPUT_VARIABLE OSX_SYSTEM
OUTPUT_STRIP_TRAILING_WHITESPACE) OUTPUT_STRIP_TRAILING_WHITESPACE)
endif()
message(STATUS "Detected OS X ${OSX_SYSTEM} and Xcode ${XCODE_VERSION} at ${XCODE_DEVELOPER_DIR}")
message(STATUS "SDKs Directory: " ${XCODE_SDK_DIR})
else()
# If no Xcode version found, try detecting command line tools.
execute_process(
COMMAND pkgutil --pkg-info=com.apple.pkg.CLTools_Executables
OUTPUT_VARIABLE _cltools_pkg_info
OUTPUT_STRIP_TRAILING_WHITESPACE
RESULT_VARIABLE _cltools_pkg_info_result
ERROR_QUIET)
if(_cltools_pkg_info_result EQUAL 0)
# Extract version.
string(REGEX REPLACE ".*version: ([0-9]+)\\.([0-9]+).*" "\\1.\\2" XCODE_VERSION "${_cltools_pkg_info}")
# SDK directory.
set(XCODE_SDK_DIR "${XCODE_DEVELOPER_DIR}/SDKs")
# Detect SDK version to use.
if(NOT DEFINED OSX_SYSTEM)
execute_process(
COMMAND xcrun --show-sdk-version
OUTPUT_VARIABLE OSX_SYSTEM
OUTPUT_STRIP_TRAILING_WHITESPACE)
endif()
message(STATUS "Detected OS X ${OSX_SYSTEM} and Command Line Tools ${XCODE_VERSION} at ${XCODE_DEVELOPER_DIR}")
message(STATUS "SDKs Directory: " ${XCODE_SDK_DIR})
else()
message(FATAL_ERROR "No Xcode or Command Line Tools detected")
endif()
unset( _cltools_pkg_info)
unset(__cltools_pkg_info_result)
endif() endif()
# workaround for incorrect cmake xcode lookup for developer previews - XCODE_VERSION does not
# take xcode-select path into account but would always look into /Applications/Xcode.app
# while dev versions are named Xcode<version>-DP<preview_number>
execute_process(
COMMAND xcode-select --print-path
OUTPUT_VARIABLE XCODE_CHECK OUTPUT_STRIP_TRAILING_WHITESPACE)
string(REPLACE "/Contents/Developer" "" XCODE_BUNDLE ${XCODE_CHECK}) # truncate to bundlepath in any case
if(NOT ${CMAKE_GENERATOR} MATCHES "Xcode")
# Unix makefile generator does not fill XCODE_VERSION var, so we get it with a command.
# Note that `xcodebuild -version` gives output in two lines: first line will include
# Xcode version, second one will include build number. We are only interested in the
# former one. Here is an example of the output:
# Xcode 11.4
# Build version 11E146
# The expected XCODE_VERSION in this case is 11.4.
execute_process(COMMAND xcodebuild -version OUTPUT_VARIABLE XCODE_VERS_BUILD_NR)
# Convert output to a single line by replacling newlines with spaces.
# This is needed because regex replace can not operate through the newline character
# and applies substitutions for each individual lines.
string(REPLACE "\n" " " XCODE_VERS_BUILD_NR_SINGLE_LINE "${XCODE_VERS_BUILD_NR}")
string(REGEX REPLACE "(.*)Xcode ([0-9\\.]+).*" "\\2" XCODE_VERSION "${XCODE_VERS_BUILD_NR_SINGLE_LINE}")
unset(XCODE_VERS_BUILD_NR)
unset(XCODE_VERS_BUILD_NR_SINGLE_LINE)
endif()
message(STATUS "Detected OS X ${OSX_SYSTEM} and Xcode ${XCODE_VERSION} at ${XCODE_BUNDLE}")
# Require a relatively recent Xcode version. # Require a relatively recent Xcode version.
if(${XCODE_VERSION} VERSION_LESS 10.0) if(${XCODE_VERSION} VERSION_LESS 10.0)
message(FATAL_ERROR "Only Xcode version 10.0 and newer is supported") message(FATAL_ERROR "Only Xcode version 10.0 and newer is supported")
endif() endif()
# note: xcode-select path could be ambiguous,
# cause /Applications/Xcode.app/Contents/Developer or /Applications/Xcode.app would be allowed
# so i use a selfcomposed bundlepath here
set(OSX_SYSROOT_PREFIX ${XCODE_BUNDLE}/Contents/Developer/Platforms/MacOSX.platform)
message(STATUS "OSX_SYSROOT_PREFIX: " ${OSX_SYSROOT_PREFIX})
# Collect list of OSX system versions which will be used to detect path to corresponding SDK. # Collect list of OSX system versions which will be used to detect path to corresponding SDK.
# Start with macOS SDK version reported by xcodebuild and include possible extra ones. # Start with macOS SDK version reported by xcodebuild and include possible extra ones.
# #
@@ -139,9 +101,10 @@ endif()
# Loop through all possible versions and pick the first one which resolves to a valid SDK path. # Loop through all possible versions and pick the first one which resolves to a valid SDK path.
set(OSX_SDK_PATH) set(OSX_SDK_PATH)
set(OSX_SDK_FOUND FALSE) set(OSX_SDK_FOUND FALSE)
set(OSX_SDK_PREFIX ${OSX_SYSROOT_PREFIX}/Developer/SDKs)
set(OSX_SDKROOT) set(OSX_SDKROOT)
foreach(OSX_SDK_VERSION ${OSX_SDK_TEST_VERSIONS}) foreach(OSX_SDK_VERSION ${OSX_SDK_TEST_VERSIONS})
set(CURRENT_OSX_SDK_PATH "${XCODE_SDK_DIR}/MacOSX${OSX_SDK_VERSION}.sdk") set(CURRENT_OSX_SDK_PATH "${OSX_SDK_PREFIX}/MacOSX${OSX_SDK_VERSION}.sdk")
if(EXISTS ${CURRENT_OSX_SDK_PATH}) if(EXISTS ${CURRENT_OSX_SDK_PATH})
set(OSX_SDK_PATH "${CURRENT_OSX_SDK_PATH}") set(OSX_SDK_PATH "${CURRENT_OSX_SDK_PATH}")
set(OSX_SDKROOT macosx${OSX_SDK_VERSION}) set(OSX_SDKROOT macosx${OSX_SDK_VERSION})
@@ -149,6 +112,7 @@ foreach(OSX_SDK_VERSION ${OSX_SDK_TEST_VERSIONS})
break() break()
endif() endif()
endforeach() endforeach()
unset(OSX_SDK_PREFIX)
unset(OSX_SDK_TEST_VERSIONS) unset(OSX_SDK_TEST_VERSIONS)
if(NOT OSX_SDK_FOUND) if(NOT OSX_SDK_FOUND)

View File

@@ -70,7 +70,6 @@ if(EXISTS ${LIBDIR})
set(BOOST_LIBRARYDIR ${LIBDIR}/boost/lib) set(BOOST_LIBRARYDIR ${LIBDIR}/boost/lib)
set(Boost_NO_SYSTEM_PATHS ON) set(Boost_NO_SYSTEM_PATHS ON)
set(OPENEXR_ROOT_DIR ${LIBDIR}/openexr) set(OPENEXR_ROOT_DIR ${LIBDIR}/openexr)
set(CLANG_ROOT_DIR ${LIBDIR}/llvm)
endif() endif()
if(WITH_STATIC_LIBS) if(WITH_STATIC_LIBS)
@@ -285,10 +284,6 @@ if(WITH_NANOVDB)
endif() endif()
endif() endif()
if(WITH_CPU_SIMD AND SUPPORT_NEON_BUILD)
find_package_wrapper(sse2neon)
endif()
if(WITH_ALEMBIC) if(WITH_ALEMBIC)
find_package_wrapper(Alembic) find_package_wrapper(Alembic)
@@ -358,7 +353,7 @@ endif()
if(WITH_PUGIXML) if(WITH_PUGIXML)
find_package_wrapper(PugiXML) find_package_wrapper(PugiXML)
if(NOT PUGIXML_FOUND) if (NOT PUGIXML_FOUND)
set(WITH_PUGIXML OFF) set(WITH_PUGIXML OFF)
message(STATUS "PugiXML not found, disabling WITH_PUGIXML") message(STATUS "PugiXML not found, disabling WITH_PUGIXML")
endif() endif()
@@ -390,7 +385,7 @@ if(WITH_OPENIMAGEIO)
endif() endif()
if(WITH_OPENCOLORIO) if(WITH_OPENCOLORIO)
find_package_wrapper(OpenColorIO 2.0.0) find_package_wrapper(OpenColorIO)
set(OPENCOLORIO_LIBRARIES ${OPENCOLORIO_LIBRARIES}) set(OPENCOLORIO_LIBRARIES ${OPENCOLORIO_LIBRARIES})
set(OPENCOLORIO_LIBPATH) # TODO, remove and reference the absolute path everywhere set(OPENCOLORIO_LIBPATH) # TODO, remove and reference the absolute path everywhere
@@ -421,9 +416,7 @@ if(WITH_LLVM)
endif() endif()
find_package_wrapper(LLVM) find_package_wrapper(LLVM)
if(WITH_CLANG)
find_package_wrapper(Clang)
endif()
# Symbol conflicts with same UTF library used by OpenCollada # Symbol conflicts with same UTF library used by OpenCollada
if(EXISTS ${LIBDIR}) if(EXISTS ${LIBDIR})
if(WITH_OPENCOLLADA AND (${LLVM_VERSION} VERSION_LESS "4.0.0")) if(WITH_OPENCOLLADA AND (${LLVM_VERSION} VERSION_LESS "4.0.0"))
@@ -433,13 +426,7 @@ if(WITH_LLVM)
if(NOT LLVM_FOUND) if(NOT LLVM_FOUND)
set(WITH_LLVM OFF) set(WITH_LLVM OFF)
set(WITH_CLANG OFF)
message(STATUS "LLVM not found") message(STATUS "LLVM not found")
else()
if(NOT CLANG_FOUND)
set(WITH_CLANG OFF)
message(STATUS "Clang not found")
endif()
endif() endif()
endif() endif()
@@ -483,14 +470,6 @@ if(WITH_POTRACE)
endif() endif()
endif() endif()
if(WITH_HARU)
find_package_wrapper(Haru)
if(NOT HARU_FOUND)
message(WARNING "Haru not found, disabling WITH_HARU")
set(WITH_HARU OFF)
endif()
endif()
if(EXISTS ${LIBDIR}) if(EXISTS ${LIBDIR})
without_system_libs_end() without_system_libs_end()
endif() endif()

View File

@@ -49,7 +49,7 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
if(NOT EXISTS "${CLANG_OPENMP_DLL}") if(NOT EXISTS "${CLANG_OPENMP_DLL}")
message(FATAL_ERROR "Clang OpenMP library (${CLANG_OPENMP_DLL}) not found.") message(FATAL_ERROR "Clang OpenMP library (${CLANG_OPENMP_DLL}) not found.")
endif() endif()
set(OpenMP_LINKER_FLAGS "\"${CLANG_OPENMP_LIB}\"") string(APPEND CMAKE_EXE_LINKER_FLAGS " \"${CLANG_OPENMP_LIB}\"")
endif() endif()
if(WITH_WINDOWS_STRIPPED_PDB) if(WITH_WINDOWS_STRIPPED_PDB)
message(WARNING "stripped pdb not supported with clang, disabling..") message(WARNING "stripped pdb not supported with clang, disabling..")
@@ -119,7 +119,6 @@ string(APPEND CMAKE_MODULE_LINKER_FLAGS " /SAFESEH:NO /ignore:4099")
list(APPEND PLATFORM_LINKLIBS list(APPEND PLATFORM_LINKLIBS
ws2_32 vfw32 winmm kernel32 user32 gdi32 comdlg32 Comctl32 version ws2_32 vfw32 winmm kernel32 user32 gdi32 comdlg32 Comctl32 version
advapi32 shfolder shell32 ole32 oleaut32 uuid psapi Dbghelp Shlwapi advapi32 shfolder shell32 ole32 oleaut32 uuid psapi Dbghelp Shlwapi
pathcch
) )
if(WITH_INPUT_IME) if(WITH_INPUT_IME)
@@ -422,7 +421,7 @@ if(WITH_JACK)
endif() endif()
if(WITH_PYTHON) if(WITH_PYTHON)
set(PYTHON_VERSION 3.9) # CACHE STRING) set(PYTHON_VERSION 3.7) # CACHE STRING)
string(REPLACE "." "" _PYTHON_VERSION_NO_DOTS ${PYTHON_VERSION}) string(REPLACE "." "" _PYTHON_VERSION_NO_DOTS ${PYTHON_VERSION})
set(PYTHON_LIBRARY ${LIBDIR}/python/${_PYTHON_VERSION_NO_DOTS}/libs/python${_PYTHON_VERSION_NO_DOTS}.lib) set(PYTHON_LIBRARY ${LIBDIR}/python/${_PYTHON_VERSION_NO_DOTS}/libs/python${_PYTHON_VERSION_NO_DOTS}.lib)
@@ -455,18 +454,10 @@ if(WITH_BOOST)
set(BOOST ${LIBDIR}/boost) set(BOOST ${LIBDIR}/boost)
set(BOOST_INCLUDE_DIR ${BOOST}/include) set(BOOST_INCLUDE_DIR ${BOOST}/include)
set(BOOST_LIBPATH ${BOOST}/lib) set(BOOST_LIBPATH ${BOOST}/lib)
set(BOOST_VERSION_HEADER ${BOOST_INCLUDE_DIR}/boost/version.hpp) if(CMAKE_CL_64)
if(EXISTS ${BOOST_VERSION_HEADER}) set(BOOST_POSTFIX "vc141-mt-x64-1_70.lib")
file(STRINGS "${BOOST_VERSION_HEADER}" BOOST_LIB_VERSION REGEX "#define BOOST_LIB_VERSION ") set(BOOST_DEBUG_POSTFIX "vc141-mt-gd-x64-1_70.lib")
if(BOOST_LIB_VERSION MATCHES "#define BOOST_LIB_VERSION \"([0-9_]+)\"")
set(BOOST_VERSION "${CMAKE_MATCH_1}")
endif() endif()
endif()
if(NOT BOOST_VERSION)
message(FATAL_ERROR "Unable to determine Boost version")
endif()
set(BOOST_POSTFIX "vc141-mt-x64-${BOOST_VERSION}.lib")
set(BOOST_DEBUG_POSTFIX "vc141-mt-gd-x64-${BOOST_VERSION}.lib")
set(BOOST_LIBRARIES set(BOOST_LIBRARIES
optimized ${BOOST_LIBPATH}/libboost_date_time-${BOOST_POSTFIX} optimized ${BOOST_LIBPATH}/libboost_date_time-${BOOST_POSTFIX}
optimized ${BOOST_LIBPATH}/libboost_filesystem-${BOOST_POSTFIX} optimized ${BOOST_LIBPATH}/libboost_filesystem-${BOOST_POSTFIX}
@@ -509,7 +500,7 @@ if(WITH_OPENIMAGEIO)
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG}) set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0") set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0")
set(OPENCOLORIO_DEFINITIONS "-DDOpenColorIO_SKIP_IMPORTS") set(OPENCOLORIO_DEFINITIONS "-DOCIO_STATIC_BUILD")
set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe") set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe")
add_definitions(-DOIIO_STATIC_DEFINE) add_definitions(-DOIIO_STATIC_DEFINE)
add_definitions(-DOIIO_NO_SSE=1) add_definitions(-DOIIO_NO_SSE=1)
@@ -547,13 +538,11 @@ if(WITH_OPENCOLORIO)
set(OPENCOLORIO_LIBPATH ${OPENCOLORIO}/lib) set(OPENCOLORIO_LIBPATH ${OPENCOLORIO}/lib)
set(OPENCOLORIO_LIBRARIES set(OPENCOLORIO_LIBRARIES
optimized ${OPENCOLORIO_LIBPATH}/OpenColorIO.lib optimized ${OPENCOLORIO_LIBPATH}/OpenColorIO.lib
optimized ${OPENCOLORIO_LIBPATH}/tinyxml.lib
optimized ${OPENCOLORIO_LIBPATH}/libyaml-cpp.lib optimized ${OPENCOLORIO_LIBPATH}/libyaml-cpp.lib
optimized ${OPENCOLORIO_LIBPATH}/libexpatMD.lib
optimized ${OPENCOLORIO_LIBPATH}/pystring.lib
debug ${OPENCOLORIO_LIBPATH}/OpencolorIO_d.lib debug ${OPENCOLORIO_LIBPATH}/OpencolorIO_d.lib
debug ${OPENCOLORIO_LIBPATH}/tinyxml_d.lib
debug ${OPENCOLORIO_LIBPATH}/libyaml-cpp_d.lib debug ${OPENCOLORIO_LIBPATH}/libyaml-cpp_d.lib
debug ${OPENCOLORIO_LIBPATH}/libexpatdMD.lib
debug ${OPENCOLORIO_LIBPATH}/pystring_d.lib
) )
set(OPENCOLORIO_DEFINITIONS) set(OPENCOLORIO_DEFINITIONS)
endif() endif()
@@ -672,10 +661,6 @@ endif()
if(WITH_CYCLES_OSL) if(WITH_CYCLES_OSL)
set(CYCLES_OSL ${LIBDIR}/osl CACHE PATH "Path to OpenShadingLanguage installation") set(CYCLES_OSL ${LIBDIR}/osl CACHE PATH "Path to OpenShadingLanguage installation")
set(OSL_SHADER_DIR ${CYCLES_OSL}/shaders) set(OSL_SHADER_DIR ${CYCLES_OSL}/shaders)
# Shaders have moved around a bit between OSL versions, check multiple locations
if(NOT EXISTS "${OSL_SHADER_DIR}")
set(OSL_SHADER_DIR ${CYCLES_OSL}/share/OSL/shaders)
endif()
find_library(OSL_LIB_EXEC NAMES oslexec PATHS ${CYCLES_OSL}/lib) find_library(OSL_LIB_EXEC NAMES oslexec PATHS ${CYCLES_OSL}/lib)
find_library(OSL_LIB_COMP NAMES oslcomp PATHS ${CYCLES_OSL}/lib) find_library(OSL_LIB_COMP NAMES oslcomp PATHS ${CYCLES_OSL}/lib)
find_library(OSL_LIB_QUERY NAMES oslquery PATHS ${CYCLES_OSL}/lib) find_library(OSL_LIB_QUERY NAMES oslquery PATHS ${CYCLES_OSL}/lib)
@@ -796,14 +781,7 @@ if(WITH_XR_OPENXR)
set(XR_OPENXR_SDK ${LIBDIR}/xr_openxr_sdk) set(XR_OPENXR_SDK ${LIBDIR}/xr_openxr_sdk)
set(XR_OPENXR_SDK_LIBPATH ${LIBDIR}/xr_openxr_sdk/lib) set(XR_OPENXR_SDK_LIBPATH ${LIBDIR}/xr_openxr_sdk/lib)
set(XR_OPENXR_SDK_INCLUDE_DIR ${XR_OPENXR_SDK}/include) set(XR_OPENXR_SDK_INCLUDE_DIR ${XR_OPENXR_SDK}/include)
# This is the old name of this library, it is checked to
# support the transition between the old and new lib versions
# this can be removed after the next lib update.
if(EXISTS ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib) set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
else()
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loaderd.lib)
endif()
else() else()
message(WARNING "OpenXR-SDK was not found, disabling WITH_XR_OPENXR") message(WARNING "OpenXR-SDK was not found, disabling WITH_XR_OPENXR")
set(WITH_XR_OPENXR OFF) set(WITH_XR_OPENXR OFF)

View File

@@ -220,12 +220,14 @@ def cmake_advanced_info():
def cmake_cache_var(var): def cmake_cache_var(var):
with open(os.path.join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8') as cache_file: cache_file = open(join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8')
lines = [ lines = [
l_strip for l in cache_file l_strip for l in cache_file
if (l_strip := l.strip()) for l_strip in (l.strip(),)
if l_strip
if not l_strip.startswith(("//", "#")) if not l_strip.startswith(("//", "#"))
] ]
cache_file.close()
for l in lines: for l in lines:
if l.split(":")[0] == var: if l.split(":")[0] == var:

View File

@@ -1,64 +0,0 @@
"""
Dependency graph: Object.to_curve()
+++++++++++++++++++++++++++++++++++
Function to get a curve from text and curve objects. It is typically used by exporters, render
engines, and tools that need to access the curve representing the object.
The function takes the evaluated dependency graph as a required parameter and optionally a boolean
apply_modifiers which defaults to false. If apply_modifiers is true and the object is a curve object,
the spline deform modifiers are applied on the control points. Note that constructive modifiers and
modifiers that are not spline-enabled will not be applied. So modifiers like Array will not be applied
and deform modifiers that have Apply On Spline disabled will not be applied.
If the object is a text object. The text will be converted into a 3D curve and returned. Modifiers are
never applied on text objects and apply_modifiers will be ignored. If the object is neither a curve nor
a text object, an error will be reported.
.. note:: The resulting curve is owned by the object. It can be freed by calling `object.to_curve_clear()`.
.. note::
The resulting curve must be treated as temporary, and can not be referenced from objects in the main
database.
"""
import bpy
class OBJECT_OT_object_to_curve(bpy.types.Operator):
"""Convert selected object to curve and show number of splines"""
bl_label = "DEG Object to Curve"
bl_idname = "object.object_to_curve"
def execute(self, context):
# Access input original object.
obj = context.object
if obj is None:
self.report({'INFO'}, "No active object to convert to curve")
return {'CANCELLED'}
if obj.type not in {'CURVE', 'FONT'}:
self.report({'INFO'}, "Object can not be converted to curve")
return {'CANCELLED'}
depsgraph = context.evaluated_depsgraph_get()
# Invoke to_curve() without applying modifiers.
curve_without_modifiers = obj.to_curve(depsgraph)
self.report({'INFO'}, f"{len(curve_without_modifiers.splines)} splines in a new curve without modifiers.")
# Remove temporary curve.
obj.to_curve_clear()
# Invoke to_curve() with applying modifiers.
curve_with_modifiers = obj.to_curve(depsgraph, apply_modifiers = True)
self.report({'INFO'}, f"{len(curve_with_modifiers.splines)} splines in new curve with modifiers.")
# Remove temporary curve.
obj.to_curve_clear()
return {'FINISHED'}
def register():
bpy.utils.register_class(OBJECT_OT_object_to_curve)
def unregister():
bpy.utils.unregister_class(OBJECT_OT_object_to_curve)
if __name__ == "__main__":
register()

View File

@@ -1,2 +1,2 @@
Sphinx==3.5.1 Sphinx==3.1.1
sphinx_rtd_theme==0.5.1 sphinx_rtd_theme==0.5.0

View File

@@ -186,7 +186,7 @@ For example, if you want to access the texture of a brush via Python to adjust i
#. From the Sidebar expand the Brush Settings panel's *Texture* subpanel and add a new texture. #. From the Sidebar expand the Brush Settings panel's *Texture* subpanel and add a new texture.
*Notice the texture data-block menu itself doesn't have very useful links (you can check the tooltips).* *Notice the texture data-block menu itself doesn't have very useful links (you can check the tooltips).*
#. The contrast setting isn't exposed in the Sidebar, so view the texture in the #. The contrast setting isn't exposed in the Sidebar, so view the texture in the
:ref:`Properties Editor <blender_manual:bpy.types.Texture.contrast>`. :ref:`Properties Editor <blender_manual:bpy.types.Texture.contrast`
#. Open the context menu of the contrast field and select *Online Python Reference*. #. Open the context menu of the contrast field and select *Online Python Reference*.
This takes you to ``bpy.types.Texture.contrast``. Now you can see that ``contrast`` is a property of texture. This takes you to ``bpy.types.Texture.contrast``. Now you can see that ``contrast`` is a property of texture.
#. To find out how to access the texture from the brush check on the references at the bottom of the page. #. To find out how to access the texture from the brush check on the references at the bottom of the page.

View File

@@ -35,7 +35,7 @@ but not to fully cover each topic.
A quick list of helpful things to know before starting: A quick list of helpful things to know before starting:
- Enable :ref:`Developer Extra <blender_manual:prefs-interface-dev-extras>` - Enable :ref:`Developer Extra <blender_manual:prefs-interface-dev-extras`
and :ref:`Python Tooltips <blender_manual:prefs-interface-tooltips-python>`. and :ref:`Python Tooltips <blender_manual:prefs-interface-tooltips-python>`.
- The :ref:`Python Console <blender_manual:bpy.types.SpaceConsole>` - The :ref:`Python Console <blender_manual:bpy.types.SpaceConsole>`
is great for testing one-liners; it has autocompletion so you can inspect the API quickly. is great for testing one-liners; it has autocompletion so you can inspect the API quickly.

View File

@@ -75,12 +75,12 @@ def rna_info_BuildRNAInfo_cache():
rna_info_BuildRNAInfo_cache.ret = None rna_info_BuildRNAInfo_cache.ret = None
# --- end rna_info cache # --- end rna_info cache
# import rpdb2; rpdb2.start_embedded_debugger('test')
import os import os
import sys import sys
import inspect import inspect
import shutil import shutil
import logging import logging
import warnings
from textwrap import indent from textwrap import indent
@@ -227,7 +227,6 @@ else:
"blf", "blf",
"bl_math", "bl_math",
"imbuf", "imbuf",
"imbuf.types",
"bmesh", "bmesh",
"bmesh.ops", "bmesh.ops",
"bmesh.types", "bmesh.types",
@@ -251,9 +250,6 @@ else:
"gpu.types", "gpu.types",
"gpu.matrix", "gpu.matrix",
"gpu.select", "gpu.select",
"gpu.shader",
"gpu.state",
"gpu.texture",
"gpu_extras", "gpu_extras",
"idprop.types", "idprop.types",
"mathutils", "mathutils",
@@ -1208,7 +1204,7 @@ def pycontext2sphinx(basepath):
# for member in sorted(unique): # for member in sorted(unique):
# print(' "%s": ("", False),' % member) # print(' "%s": ("", False),' % member)
if len(context_type_map) > len(unique): if len(context_type_map) > len(unique):
warnings.warn( raise Exception(
"Some types are not used: %s" % "Some types are not used: %s" %
str([member for member in context_type_map if member not in unique])) str([member for member in context_type_map if member not in unique]))
else: else:
@@ -1718,6 +1714,7 @@ except ModuleNotFoundError:
fw("if html_theme == 'sphinx_rtd_theme':\n") fw("if html_theme == 'sphinx_rtd_theme':\n")
fw(" html_theme_options = {\n") fw(" html_theme_options = {\n")
fw(" 'canonical_url': 'https://docs.blender.org/api/current/',\n")
# fw(" 'analytics_id': '',\n") # fw(" 'analytics_id': '',\n")
# fw(" 'collapse_navigation': True,\n") # fw(" 'collapse_navigation': True,\n")
fw(" 'sticky_navigation': False,\n") fw(" 'sticky_navigation': False,\n")
@@ -1729,7 +1726,6 @@ except ModuleNotFoundError:
# not helpful since the source is generated, adds to upload size. # not helpful since the source is generated, adds to upload size.
fw("html_copy_source = False\n") fw("html_copy_source = False\n")
fw("html_show_sphinx = False\n") fw("html_show_sphinx = False\n")
fw("html_baseurl = 'https://docs.blender.org/api/current/'\n")
fw("html_use_opensearch = 'https://docs.blender.org/api/current'\n") fw("html_use_opensearch = 'https://docs.blender.org/api/current'\n")
fw("html_split_index = True\n") fw("html_split_index = True\n")
fw("html_static_path = ['static']\n") fw("html_static_path = ['static']\n")
@@ -1979,14 +1975,11 @@ def write_rst_importable_modules(basepath):
"aud": "Audio System", "aud": "Audio System",
"blf": "Font Drawing", "blf": "Font Drawing",
"imbuf": "Image Buffer", "imbuf": "Image Buffer",
"imbuf.types": "Image Buffer Types",
"gpu": "GPU Shader Module", "gpu": "GPU Shader Module",
"gpu.types": "GPU Types", "gpu.types": "GPU Types",
"gpu.matrix": "GPU Matrix Utilities", "gpu.matrix": "GPU Matrix",
"gpu.select": "GPU Select Utilities", "gpu.select": "GPU Select",
"gpu.shader": "GPU Shader Utilities", "gpu.shader": "GPU Shader",
"gpu.state": "GPU State Utilities",
"gpu.texture": "GPU Texture Utilities",
"bmesh": "BMesh Module", "bmesh": "BMesh Module",
"bmesh.ops": "BMesh Operators", "bmesh.ops": "BMesh Operators",
"bmesh.types": "BMesh Types", "bmesh.types": "BMesh Types",

156
doc/python_api/sphinx_doc_gen.sh Executable file
View File

@@ -0,0 +1,156 @@
#!/bin/sh
# run from the blender source dir
# bash doc/python_api/sphinx_doc_gen.sh
# ssh upload means you need an account on the server
# ----------------------------------------------------------------------------
# Upload vars
# disable for testing
DO_UPLOAD=true
DO_EXE_BLENDER=true
DO_OUT_HTML=true
DO_OUT_HTML_ZIP=true
DO_OUT_PDF=false
if [ -z $BLENDER_BIN ] ; then
BLENDER_BIN="./blender.bin"
fi
if [ "$1" == "" ] ; then
echo "Expected a single argument for the username on blender.org, skipping upload step!"
DO_UPLOAD=false
else
SSH_USER=$1
SSH_HOST=$SSH_USER"@blender.org"
SSH_UPLOAD="/data/www/vhosts/www.blender.org/api" # blender_python_api_VERSION, added after
fi
# ----------------------------------------------------------------------------
# Blender Version & Info
# 'Blender 2.53 (sub 1) Build' --> '2_53_1' as a shell script.
# "_".join(str(v) for v in bpy.app.version)
# custom blender vars
blender_srcdir=$(dirname -- $0)/../..
blender_version_header="$blender_srcdir/source/blender/blenkernel/BKE_blender_version.h"
blender_version=$(grep "BLENDER_VERSION\s" "$blender_version_header" | awk '{print $3}')
blender_version_cycle=$(grep "BLENDER_VERSION_CYCLE\s" "$blender_version_header" | awk '{print $3}')
unset blender_version_header
BLENDER_VERSION=$(expr $blender_version / 100)_$(expr $blender_version % 100)
SSH_UPLOAD_FULL=$SSH_UPLOAD/"blender_python_api_"$BLENDER_VERSION
SPHINXBASE=doc/python_api
SPHINX_WORKDIR="$(mktemp --directory --suffix=.sphinx)"
# ----------------------------------------------------------------------------
# Generate reStructuredText (blender/python only)
if $DO_EXE_BLENDER ; then
# Don't delete existing docs, now partial updates are used for quick builds.
#
# Disable ASAN error halt since it results in nonzero exit code on any minor issue.
ASAN_OPTIONS=halt_on_error=0:${ASAN_OPTIONS} \
$BLENDER_BIN \
--background \
-noaudio \
--factory-startup \
--python-exit-code 1 \
--python $SPHINXBASE/sphinx_doc_gen.py \
-- \
--output=$SPHINX_WORKDIR
if (($? != 0)) ; then
echo "Generating documentation failed, aborting"
exit 1
fi
fi
# ----------------------------------------------------------------------------
# Generate HTML (sphinx)
if $DO_OUT_HTML ; then
sphinx-build -b html -j auto $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
# XXX, saves space on upload and zip, should move HTML outside
# and zip up there, for now this is OK
rm -rf sphinx-out/.doctrees
# in case we have a zip already
rm -f blender_python_reference_$BLENDER_VERSION.zip
# ------------------------------------------------------------------------
# ZIP the HTML dir for upload
if $DO_OUT_HTML_ZIP ; then
# lame, temp rename dir
mv sphinx-out blender_python_reference_$BLENDER_VERSION
zip -r -9 blender_python_reference_$BLENDER_VERSION.zip blender_python_reference_$BLENDER_VERSION
mv blender_python_reference_$BLENDER_VERSION sphinx-out
fi
cd -
fi
# ----------------------------------------------------------------------------
# Generate PDF (sphinx/laytex)
if $DO_OUT_PDF ; then
sphinx-build -n -b latex -j auto $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
make -C $SPHINX_WORKDIR/sphinx-out
mv $SPHINX_WORKDIR/sphinx-out/contents.pdf \
$SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf
fi
# ----------------------------------------------------------------------------
# Upload to blender servers, comment this section for testing
if $DO_UPLOAD ; then
ssh $SSH_USER@blender.org 'rm -rf '$SSH_UPLOAD_FULL'/*'
rsync --progress -ave "ssh -p 22" $SPHINX_WORKDIR/sphinx-out/* $SSH_HOST:$SSH_UPLOAD_FULL/
## symlink the dir to a static URL
#ssh $SSH_USER@blender.org 'rm '$SSH_UPLOAD'/250PythonDoc && ln -s '$SSH_UPLOAD_FULL' '$SSH_UPLOAD'/250PythonDoc'
if [ "$blender_version_cycle" = "release" ] ; then
ssh $SSH_USER@blender.org 'rm '$SSH_UPLOAD'/blender_python_api_current && ln -s '$SSH_UPLOAD_FULL' '$SSH_UPLOAD'/blender_python_api_current'
fi
# better redirect
ssh $SSH_USER@blender.org 'echo "<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\" content=\"0;url=../blender_python_api_'$BLENDER_VERSION'/\"></head><body>Redirecting...</body></html>" > '$SSH_UPLOAD'/250PythonDoc/index.html'
# redirect for release only so wiki can point here
if [ "$blender_version_cycle" = "release" ] ; then
ssh $SSH_USER@blender.org 'echo "<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\" content=\"0;url=../blender_python_api_'$BLENDER_VERSION'/\"></head><body>Redirecting...</body></html>" > '$SSH_UPLOAD'/blender_python_api/index.html'
fi
if $DO_OUT_PDF ; then
# rename so local PDF has matching name.
rsync --progress -ave "ssh -p 22" \
$SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf \
$SSH_HOST:$SSH_UPLOAD_FULL/blender_python_reference_$BLENDER_VERSION.pdf
fi
if $DO_OUT_HTML_ZIP ; then
rsync --progress -ave "ssh -p 22" \
$SPHINX_WORKDIR/blender_python_reference_$BLENDER_VERSION.zip \
$SSH_HOST:$SSH_UPLOAD_FULL/blender_python_reference_$BLENDER_VERSION.zip
fi
fi
# ----------------------------------------------------------------------------
# Print some useful text
echo ""
echo "Finished! view the docs from: "
if $DO_OUT_HTML ; then echo " html:" $SPHINX_WORKDIR/sphinx-out/index.html ; fi
if $DO_OUT_PDF ; then echo " pdf:" $SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf ; fi

View File

@@ -0,0 +1,216 @@
#!/usr/bin/env python3
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
"""
This is a helper script to generate Blender Python API documentation (using Sphinx), and update server data using rsync.
You'll need to specify your user login and password, obviously.
Example usage:
./sphinx_doc_update.py --jobs 16 --mirror ../../../docs/remote_api_backup/ --source ../.. --blender ../../../build_cmake/bin/blender --user foobar --password barfoo
"""
import os
import shutil
import subprocess
import sys
import tempfile
import zipfile
DEFAULT_RSYNC_SERVER = "docs.blender.org"
DEFAULT_RSYNC_ROOT = "/api/"
DEFAULT_SYMLINK_ROOT = "/data/www/vhosts/docs.blender.org/api"
def argparse_create():
import argparse
global __doc__
# When --help or no args are given, print this help
usage_text = __doc__
parser = argparse.ArgumentParser(description=usage_text,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
"--mirror", dest="mirror_dir",
metavar='PATH', required=True,
help="Path to local rsync mirror of api doc server")
parser.add_argument(
"--source", dest="source_dir",
metavar='PATH', required=True,
help="Path to Blender git repository")
parser.add_argument(
"--blender", dest="blender",
metavar='PATH', required=True,
help="Path to Blender executable")
parser.add_argument(
"--rsync-server", dest="rsync_server", default=DEFAULT_RSYNC_SERVER,
metavar='RSYNCSERVER', type=str, required=False,
help=("rsync server address"))
parser.add_argument(
"--rsync-root", dest="rsync_root", default=DEFAULT_RSYNC_ROOT,
metavar='RSYNCROOT', type=str, required=False,
help=("Root path of API doc on rsync server"))
parser.add_argument(
"--user", dest="user",
metavar='USER', type=str, required=True,
help=("User to login on rsync server"))
parser.add_argument(
"--password", dest="password",
metavar='PASSWORD', type=str, required=True,
help=("Password to login on rsync server"))
parser.add_argument(
"--jobs", dest="jobs_nr",
metavar='NR', type=int, required=False, default=1,
help="Number of sphinx building jobs to launch in parallel")
return parser
def main():
# ----------
# Parse Args
args = argparse_create().parse_args()
rsync_base = "rsync://%s@%s:%s" % (args.user, args.rsync_server, args.rsync_root)
blenver = api_blenver = api_blenver_zip = ""
api_name = ""
branch = ""
is_release = is_beta = False
# I) Update local mirror using rsync.
rsync_mirror_cmd = ("rsync", "--delete-after", "-avzz", rsync_base, args.mirror_dir)
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
with tempfile.TemporaryDirectory() as tmp_dir:
# II) Generate doc source in temp dir.
doc_gen_cmd = (
args.blender, "--background", "-noaudio", "--factory-startup", "--python-exit-code", "1",
"--python", "%s/doc/python_api/sphinx_doc_gen.py" % args.source_dir, "--",
"--output", tmp_dir
)
subprocess.run(doc_gen_cmd)
# III) Get Blender version info.
getver_file = os.path.join(tmp_dir, "blendver.txt")
getver_script = (r"""import sys, bpy
with open(sys.argv[-1], 'w') as f:
is_release = bpy.app.version_cycle in {'rc', 'release'}
is_beta = bpy.app.version_cycle in {'beta'}
branch = bpy.app.build_branch.split()[0].decode()
f.write('%d\n' % is_release)
f.write('%d\n' % is_beta)
f.write('%s\n' % branch)
f.write('%d.%d\n' % (bpy.app.version[0], bpy.app.version[1]))
f.write('%d.%d\n' % (bpy.app.version[0], bpy.app.version[1])
if (is_release or is_beta) else '%s\n' % branch)
f.write('%d_%d' % (bpy.app.version[0], bpy.app.version[1]))
""")
get_ver_cmd = (args.blender, "--background", "-noaudio", "--factory-startup", "--python-exit-code", "1",
"--python-expr", getver_script, "--", getver_file)
subprocess.run(get_ver_cmd)
with open(getver_file) as f:
is_release, is_beta, branch, blenver, api_blenver, api_blenver_zip = f.read().split("\n")
is_release = bool(int(is_release))
is_beta = bool(int(is_beta))
os.remove(getver_file)
# IV) Build doc.
curr_dir = os.getcwd()
os.chdir(tmp_dir)
sphinx_cmd = ("sphinx-build", "-j", str(args.jobs_nr), "-b", "html", "sphinx-in", "sphinx-out")
subprocess.run(sphinx_cmd)
shutil.rmtree(os.path.join("sphinx-out", ".doctrees"))
os.chdir(curr_dir)
# V) Cleanup existing matching dir in server mirror (if any), and copy new doc.
api_name = api_blenver
api_dir = os.path.join(args.mirror_dir, api_name)
if os.path.exists(api_dir):
if os.path.islink(api_dir):
os.remove(api_dir)
else:
shutil.rmtree(api_dir)
os.rename(os.path.join(tmp_dir, "sphinx-out"), api_dir)
# VI) Create zip archive.
zip_name = "blender_python_reference_%s" % api_blenver_zip # We can't use 'release' postfix here...
zip_path = os.path.join(args.mirror_dir, zip_name)
with zipfile.ZipFile(zip_path, 'w') as zf:
for dirname, _, filenames in os.walk(api_dir):
for filename in filenames:
filepath = os.path.join(dirname, filename)
zip_filepath = os.path.join(zip_name, os.path.relpath(filepath, api_dir))
zf.write(filepath, arcname=zip_filepath)
os.rename(zip_path, os.path.join(api_dir, "%s.zip" % zip_name))
# VII) Create symlinks and html redirects.
if is_release:
symlink = os.path.join(args.mirror_dir, "current")
if os.path.exists(symlink):
if os.path.islink(symlink):
os.remove(symlink)
else:
shutil.rmtree(symlink)
os.symlink("./%s" % api_name, symlink)
with open(os.path.join(args.mirror_dir, "250PythonDoc/index.html"), 'w') as f:
f.write("<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\""
"content=\"0;url=../%s/\"></head><body>Redirecting...</body></html>" % api_name)
elif is_beta:
# We do not have any particular symlink for that stage.
pass
elif branch == "master":
# Also create a symlink from version number to actual master api doc.
symlink = os.path.join(args.mirror_dir, blenver)
if os.path.exists(symlink):
if os.path.islink(symlink):
os.remove(symlink)
else:
shutil.rmtree(symlink)
os.symlink("./%s" % api_name, symlink)
with open(os.path.join(args.mirror_dir, "blender_python_api/index.html"), 'w') as f:
f.write("<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\""
"content=\"0;url=../%s/\"></head><body>Redirecting...</body></html>" % api_name)
# VIII) Upload (first do a dry-run so user can ensure everything is OK).
print("Doc generated in local mirror %s, please check it before uploading "
"(hit [Enter] to continue, [Ctrl-C] to exit):" % api_dir)
sys.stdin.read(1)
rsync_mirror_cmd = ("rsync", "--dry-run", "--delete-after", "-avzz", args.mirror_dir, rsync_base)
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
print("Rsync upload simulated, please check every thing is OK (hit [Enter] to continue, [Ctrl-C] to exit):")
sys.stdin.read(1)
rsync_mirror_cmd = ("rsync", "--delete-after", "-avzz", args.mirror_dir, rsync_base)
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
if __name__ == "__main__":
main()

View File

@@ -75,8 +75,6 @@ endif()
if(WITH_OPENVDB) if(WITH_OPENVDB)
add_definitions(-DOPENVDB=1) add_definitions(-DOPENVDB=1)
# OpenVDB headers use deprecated TBB headers, silence warning.
add_definitions(-DTBB_SUPPRESS_DEPRECATED_MESSAGES=1)
endif() endif()
if(WITH_OPENVDB_BLOSC) if(WITH_OPENVDB_BLOSC)
@@ -133,14 +131,6 @@ if(WITH_OPENVDB)
list(APPEND LIB list(APPEND LIB
${OPENVDB_LIBRARIES} ${OPENVDB_LIBRARIES}
) )
if(WIN32)
# OpenVDB emits lots of these, they should be suppressed through other
# means but MSVC 16.8/16.9 has broken this functionality, so C4251 is
# suppressed here explicitly. See
# https://developercommunity.visualstudio.com/content/problem/1167590/bug.html
# for details.
string(APPEND CMAKE_CXX_FLAGS " /wd4251")
endif()
endif() endif()
set(SRC set(SRC

View File

@@ -664,11 +664,6 @@ template<class T> inline Vec3i toVec3iRound(T v)
return Vec3i((int)round(v[0]), (int)round(v[1]), (int)round(v[2])); return Vec3i((int)round(v[0]), (int)round(v[1]), (int)round(v[2]));
} }
template<class T> inline Vec3i toVec3iFloor(T v)
{
return Vec3i((int)floor(v[0]), (int)floor(v[1]), (int)floor(v[2]));
}
//! convert to int Vector if values are close enough to an int //! convert to int Vector if values are close enough to an int
template<class T> inline Vec3i toVec3iChecked(T v) template<class T> inline Vec3i toVec3iChecked(T v)
{ {

View File

@@ -209,11 +209,11 @@ typename GridType::Ptr exportVDB(Grid<T> *from, float clip, openvdb::FloatGrid::
openvdb::Coord(from->getSizeX() - 1, from->getSizeY() - 1, from->getSizeZ() - 1)); openvdb::Coord(from->getSizeX() - 1, from->getSizeY() - 1, from->getSizeZ() - 1));
openvdb::tools::Dense<ValueT, openvdb::tools::MemoryLayout::LayoutXYZ> dense(bbox, data); openvdb::tools::Dense<ValueT, openvdb::tools::MemoryLayout::LayoutXYZ> dense(bbox, data);
// Use clip value, or (when not exporting in sparse mode) clear it in order to copy all values // Trick: Set clip value to very small / negative value in order to copy all values of dense
// of dense grid // grids
ValueT tmpClip = (from->saveSparse()) ? ValueT(clip) : ValueT(0); float tmpClip = (from->saveSparse()) ? clip : -std::numeric_limits<Real>::max();
// Copy from dense to sparse grid structure considering clip value // Copy from dense to sparse grid structure considering clip value
openvdb::tools::copyFromDense(dense, *to, tmpClip); openvdb::tools::copyFromDense(dense, *to, ValueT(tmpClip));
// If present, use clip grid to trim down current vdb grid even more // If present, use clip grid to trim down current vdb grid even more
if (from->saveSparse() && clipGrid && !clipGrid->empty()) { if (from->saveSparse() && clipGrid && !clipGrid->empty()) {
@@ -245,10 +245,10 @@ void exportVDB(ParticleDataImpl<MantaType> *from,
std::vector<VDBType> vdbValues; std::vector<VDBType> vdbValues;
std::string name = from->getName(); std::string name = from->getName();
BasicParticleSystem *pp = dynamic_cast<BasicParticleSystem *>(from->getParticleSys());
FOR_PARTS(*from) FOR_PARTS(*from)
{ {
// Optionally, skip exporting particles that have been marked as deleted // Optionally, skip exporting particles that have been marked as deleted
BasicParticleSystem *pp = dynamic_cast<BasicParticleSystem *>(from->getParticleSys());
if (skipDeletedParts && !pp->isActive(idx)) { if (skipDeletedParts && !pp->isActive(idx)) {
continue; continue;
} }

View File

@@ -1,3 +1,3 @@
#define MANTA_GIT_VERSION "commit 39b7a415721ecbf6643612a24e8eadd221aeb934" #define MANTA_GIT_VERSION "commit 1c86d86496e7f7473c36248d12ef07bf4d9d2840"

View File

@@ -204,12 +204,14 @@ class GridBase : public PbClass {
inline void checkIndex(int i, int j, int k) const; inline void checkIndex(int i, int j, int k) const;
//! Check if indices are within bounds, otherwise error (should only be called when debugging) //! Check if indices are within bounds, otherwise error (should only be called when debugging)
inline void checkIndex(IndexInt idx) const; inline void checkIndex(IndexInt idx) const;
//! Check if vector int is within given boundaries //! Check if index is within given boundaries
inline bool isInBounds(const Vec3i &p, int bnd = 0) const; inline bool isInBounds(const Vec3i &p, int bnd) const;
//! Check if vector real is within given boundaries //! Check if index is within given boundaries
inline bool isInBounds(const Vec3i &p) const;
//! Check if index is within given boundaries
inline bool isInBounds(const Vec3 &p, int bnd = 0) const inline bool isInBounds(const Vec3 &p, int bnd = 0) const
{ {
return isInBounds(toVec3iFloor(p), bnd); return isInBounds(toVec3i(p), bnd);
} }
//! Check if linear index is in the range of the array //! Check if linear index is in the range of the array
inline bool isInBounds(IndexInt idx) const; inline bool isInBounds(IndexInt idx) const;
@@ -1783,6 +1785,11 @@ inline void GridBase::checkIndex(IndexInt idx) const
} }
} }
bool GridBase::isInBounds(const Vec3i &p) const
{
return (p.x >= 0 && p.y >= 0 && p.z >= 0 && p.x < mSize.x && p.y < mSize.y && p.z < mSize.z);
}
bool GridBase::isInBounds(const Vec3i &p, int bnd) const bool GridBase::isInBounds(const Vec3i &p, int bnd) const
{ {
bool ret = (p.x >= bnd && p.y >= bnd && p.x < mSize.x - bnd && p.y < mSize.y - bnd); bool ret = (p.x >= bnd && p.y >= bnd && p.x < mSize.x - bnd && p.y < mSize.y - bnd);

View File

@@ -429,7 +429,7 @@ void markFluidCells(const BasicParticleSystem &parts,
for (IndexInt idx = 0; idx < parts.size(); idx++) { for (IndexInt idx = 0; idx < parts.size(); idx++) {
if (!parts.isActive(idx) || (ptype && ((*ptype)[idx] & exclude))) if (!parts.isActive(idx) || (ptype && ((*ptype)[idx] & exclude)))
continue; continue;
Vec3i p = toVec3iFloor(parts.getPos(idx)); Vec3i p = toVec3i(parts.getPos(idx));
if (flags.isInBounds(p) && flags.isEmpty(p)) if (flags.isInBounds(p) && flags.isEmpty(p))
flags(p) = (flags(p) | FlagGrid::TypeFluid) & ~FlagGrid::TypeEmpty; flags(p) = (flags(p) | FlagGrid::TypeFluid) & ~FlagGrid::TypeEmpty;
} }
@@ -544,7 +544,7 @@ void adjustNumber(BasicParticleSystem &parts,
// count particles in cells, and delete excess particles // count particles in cells, and delete excess particles
for (IndexInt idx = 0; idx < (int)parts.size(); idx++) { for (IndexInt idx = 0; idx < (int)parts.size(); idx++) {
if (parts.isActive(idx)) { if (parts.isActive(idx)) {
Vec3i p = toVec3iFloor(parts.getPos(idx)); Vec3i p = toVec3i(parts.getPos(idx));
if (!tmp.isInBounds(p)) { if (!tmp.isInBounds(p)) {
parts.kill(idx); // out of domain, remove parts.kill(idx); // out of domain, remove
continue; continue;
@@ -711,7 +711,7 @@ void gridParticleIndex(const BasicParticleSystem &parts,
for (IndexInt idx = 0; idx < (IndexInt)parts.size(); idx++) { for (IndexInt idx = 0; idx < (IndexInt)parts.size(); idx++) {
if (parts.isActive(idx)) { if (parts.isActive(idx)) {
// check index for validity... // check index for validity...
Vec3i p = toVec3iFloor(parts.getPos(idx)); Vec3i p = toVec3i(parts.getPos(idx));
if (!index.isInBounds(p)) { if (!index.isInBounds(p)) {
inactive++; inactive++;
continue; continue;
@@ -740,7 +740,7 @@ void gridParticleIndex(const BasicParticleSystem &parts,
for (IndexInt idx = 0; idx < (IndexInt)parts.size(); idx++) { for (IndexInt idx = 0; idx < (IndexInt)parts.size(); idx++) {
if (!parts.isActive(idx)) if (!parts.isActive(idx))
continue; continue;
Vec3i p = toVec3iFloor(parts.getPos(idx)); Vec3i p = toVec3i(parts.getPos(idx));
if (!index.isInBounds(p)) { if (!index.isInBounds(p)) {
continue; continue;
} }
@@ -1636,7 +1636,7 @@ struct knPushOutofObs : public KernelBase {
{ {
if (!parts.isActive(idx) || (ptype && ((*ptype)[idx] & exclude))) if (!parts.isActive(idx) || (ptype && ((*ptype)[idx] & exclude)))
return; return;
Vec3i p = toVec3iFloor(parts.getPos(idx)); Vec3i p = toVec3i(parts.getPos(idx));
if (!flags.isInBounds(p)) if (!flags.isInBounds(p))
return; return;

View File

@@ -2278,7 +2278,7 @@ T convolveGrid(Grid<T> &originGrid, GaussianKernelCreator &gkSigma, Vec3 pos, in
step = Vec3(0.0, 0.0, 1.0); step = Vec3(0.0, 0.0, 1.0);
T pxResult(0); T pxResult(0);
for (int i = 0; i < gkSigma.mDim; ++i) { for (int i = 0; i < gkSigma.mDim; ++i) {
Vec3i curpos = toVec3iFloor(pos - step * (i - gkSigma.mDim / 2)); Vec3i curpos = toVec3i(pos - step * (i - gkSigma.mDim / 2));
if (originGrid.isInBounds(curpos)) if (originGrid.isInBounds(curpos))
pxResult += gkSigma.get1DKernelValue(i) * originGrid.get(curpos); pxResult += gkSigma.get1DKernelValue(i) * originGrid.get(curpos);
else { // TODO , improve... else { // TODO , improve...
@@ -2423,7 +2423,7 @@ struct KnBlurMACGridGauss : public KernelBase {
Vec3 pxResult(0.0f); Vec3 pxResult(0.0f);
for (int di = 0; di < gkSigma.mDim; ++di) { for (int di = 0; di < gkSigma.mDim; ++di) {
Vec3i curpos = toVec3iFloor(pos - step * (di - gkSigma.mDim / 2)); Vec3i curpos = toVec3i(pos - step * (di - gkSigma.mDim / 2));
if (!originGrid.isInBounds(curpos)) { if (!originGrid.isInBounds(curpos)) {
if (curpos.x < 0) if (curpos.x < 0)
curpos.x = 0; curpos.x = 0;

View File

@@ -1214,7 +1214,7 @@ struct knFlipUpdateSecondaryParticlesLinear : public KernelBase {
// anti tunneling for small obstacles // anti tunneling for small obstacles
for (int ct = 1; ct < antitunneling; ct++) { for (int ct = 1; ct < antitunneling; ct++) {
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos + Vec3i tempPos = toVec3i(pts_sec[idx].pos +
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]); ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) { if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
pts_sec.kill(idx); pts_sec.kill(idx);
@@ -1234,7 +1234,7 @@ struct knFlipUpdateSecondaryParticlesLinear : public KernelBase {
// anti tunneling for small obstacles // anti tunneling for small obstacles
for (int ct = 1; ct < antitunneling; ct++) { for (int ct = 1; ct < antitunneling; ct++) {
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos + Vec3i tempPos = toVec3i(pts_sec[idx].pos +
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]); ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) { if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
pts_sec.kill(idx); pts_sec.kill(idx);
@@ -1252,7 +1252,7 @@ struct knFlipUpdateSecondaryParticlesLinear : public KernelBase {
const Vec3 vj = v.getInterpolated(pts_sec[idx].pos); const Vec3 vj = v.getInterpolated(pts_sec[idx].pos);
// anti tunneling for small obstacles // anti tunneling for small obstacles
for (int ct = 1; ct < antitunneling; ct++) { for (int ct = 1; ct < antitunneling; ct++) {
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt * vj); Vec3i tempPos = toVec3i(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt * vj);
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) { if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
pts_sec.kill(idx); pts_sec.kill(idx);
return; return;
@@ -1474,7 +1474,7 @@ struct knFlipUpdateSecondaryParticlesCubic : public KernelBase {
// anti tunneling for small obstacles // anti tunneling for small obstacles
for (int ct = 1; ct < antitunneling; ct++) { for (int ct = 1; ct < antitunneling; ct++) {
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos + Vec3i tempPos = toVec3i(pts_sec[idx].pos +
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]); ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) { if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
pts_sec.kill(idx); pts_sec.kill(idx);
@@ -1515,7 +1515,7 @@ struct knFlipUpdateSecondaryParticlesCubic : public KernelBase {
// anti tunneling for small obstacles // anti tunneling for small obstacles
for (int ct = 1; ct < antitunneling; ct++) { for (int ct = 1; ct < antitunneling; ct++) {
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos + Vec3i tempPos = toVec3i(pts_sec[idx].pos +
ct * (1 / Real(antitunneling)) * dt * v_sec[idx]); ct * (1 / Real(antitunneling)) * dt * v_sec[idx]);
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) { if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
pts_sec.kill(idx); pts_sec.kill(idx);
@@ -1554,7 +1554,7 @@ struct knFlipUpdateSecondaryParticlesCubic : public KernelBase {
// anti tunneling for small obstacles // anti tunneling for small obstacles
for (int ct = 1; ct < antitunneling; ct++) { for (int ct = 1; ct < antitunneling; ct++) {
Vec3i tempPos = toVec3iFloor(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt * Vec3i tempPos = toVec3i(pts_sec[idx].pos + ct * (1 / Real(antitunneling)) * dt *
(sumNumerator / sumDenominator)); (sumNumerator / sumDenominator));
if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) { if (!flags.isInBounds(tempPos) || flags(tempPos) & FlagGrid::TypeObstacle) {
pts_sec.kill(idx); pts_sec.kill(idx);
@@ -1863,7 +1863,7 @@ struct knFlipDeleteParticlesInObstacle : public KernelBase {
return; return;
const Vec3 &xi = pts[idx].pos; const Vec3 &xi = pts[idx].pos;
const Vec3i xidx = toVec3iFloor(xi); const Vec3i xidx = toVec3i(xi);
// remove particles that completely left the bounds // remove particles that completely left the bounds
if (!flags.isInBounds(xidx)) { if (!flags.isInBounds(xidx)) {
pts.kill(idx); pts.kill(idx);

View File

@@ -2145,7 +2145,8 @@ void PbRegister_particleSurfaceTurbulence()
void debugCheckParts(const BasicParticleSystem &parts, const FlagGrid &flags) void debugCheckParts(const BasicParticleSystem &parts, const FlagGrid &flags)
{ {
for (int idx = 0; idx < parts.size(); idx++) { for (int idx = 0; idx < parts.size(); idx++) {
if (!flags.isInBounds(parts.getPos(idx))) { Vec3i p = toVec3i(parts.getPos(idx));
if (!flags.isInBounds(p)) {
debMsg("bad position??? " << idx << " " << parts.getPos(idx), 1); debMsg("bad position??? " << idx << " " << parts.getPos(idx), 1);
exit(1); exit(1);
} }

View File

@@ -1,4 +1,4 @@
/* /*
* This program is free software; you can redistribute it and/or * This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License * modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2 * as published by the Free Software Foundation; either version 2
@@ -38,13 +38,8 @@
#endif #endif
#if defined(_MSC_VER) #if defined(_MSC_VER)
# include <Windows.h>
# include <VersionHelpers.h> /* This needs to be included after Windows.h. */
# include <io.h> # include <io.h>
# if !defined(ENABLE_VIRTUAL_TERMINAL_PROCESSING) # include <windows.h>
# define ENABLE_VIRTUAL_TERMINAL_PROCESSING 0x0004
# endif
#endif #endif
/* For printing timestamp. */ /* For printing timestamp. */
@@ -233,9 +228,6 @@ enum eCLogColor {
#define COLOR_LEN (COLOR_RESET + 1) #define COLOR_LEN (COLOR_RESET + 1)
static const char *clg_color_table[COLOR_LEN] = {NULL}; static const char *clg_color_table[COLOR_LEN] = {NULL};
#ifdef _WIN32
static DWORD clg_previous_console_mode = 0;
#endif
static void clg_color_table_init(bool use_color) static void clg_color_table_init(bool use_color)
{ {
@@ -556,22 +548,13 @@ static void CLG_ctx_output_set(CLogContext *ctx, void *file_handle)
#if defined(__unix__) || defined(__APPLE__) #if defined(__unix__) || defined(__APPLE__)
ctx->use_color = isatty(ctx->output); ctx->use_color = isatty(ctx->output);
#elif defined(WIN32) #elif defined(WIN32)
/* As of Windows 10 build 18298 all the standard consoles supports color /* Windows Terminal supports color like the Linux terminals do while the standard console does
* like the Linux Terminal do, but it needs to be turned on. * not, the way to tell the two apart is to look at the `WT_SESSION` environment variable which
* To turn on colors we need to enable virtual terminal processing by passing the flag * will only be defined for Windows Terminal. */
* ENABLE_VIRTUAL_TERMINAL_PROCESSING into SetConsoleMode.
* If the system doesn't support virtual terminal processing it will fail silently and the flag
* will not be set. */
GetConsoleMode(GetStdHandle(STD_OUTPUT_HANDLE), &clg_previous_console_mode); /* #getenv is used here rather than #BLI_getenv since it would be a bad level call
* and there are no benefits for using it in this context. */
ctx->use_color = 0; ctx->use_color = isatty(ctx->output) && getenv("WT_SESSION");
if (IsWindows10OrGreater() && isatty(ctx->output)) {
DWORD mode = clg_previous_console_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING;
if (SetConsoleMode(GetStdHandle(STD_OUTPUT_HANDLE), mode)) {
ctx->use_color = 1;
}
}
#endif #endif
} }
@@ -655,9 +638,6 @@ static CLogContext *CLG_ctx_init(void)
static void CLG_ctx_free(CLogContext *ctx) static void CLG_ctx_free(CLogContext *ctx)
{ {
#if defined(WIN32)
SetConsoleMode(GetStdHandle(STD_OUTPUT_HANDLE), clg_previous_console_mode);
#endif
while (ctx->types != NULL) { while (ctx->types != NULL) {
CLG_LogType *item = ctx->types; CLG_LogType *item = ctx->types;
ctx->types = item->next; ctx->types = item->next;

View File

@@ -64,7 +64,7 @@ if(WITH_CYCLES_NATIVE_ONLY)
endif() endif()
set(CYCLES_KERNEL_FLAGS "${MSVC_NATIVE_ARCH_FLAGS}") set(CYCLES_KERNEL_FLAGS "${MSVC_NATIVE_ARCH_FLAGS}")
endif() endif()
elseif(NOT WITH_CPU_SIMD OR (SUPPORT_NEON_BUILD AND SSE2NEON_FOUND)) elseif(NOT WITH_CPU_SSE)
set(CXX_HAS_SSE FALSE) set(CXX_HAS_SSE FALSE)
set(CXX_HAS_AVX FALSE) set(CXX_HAS_AVX FALSE)
set(CXX_HAS_AVX2 FALSE) set(CXX_HAS_AVX2 FALSE)

View File

@@ -103,7 +103,7 @@ static bool compile_cuda(CompilationSettings &settings)
return false; return false;
} }
/* Transfer options to a classic C array. */ /* Tranfer options to a classic C array. */
vector<const char *> opts(options.size()); vector<const char *> opts(options.size());
for (size_t i = 0; i < options.size(); i++) { for (size_t i = 0; i < options.size(); i++) {
opts[i] = options[i].c_str(); opts[i] = options[i].c_str();

View File

@@ -564,19 +564,19 @@ static void xml_read_transform(xml_node node, Transform &tfm)
} }
if (node.attribute("translate")) { if (node.attribute("translate")) {
float3 translate = zero_float3(); float3 translate = make_float3(0.0f, 0.0f, 0.0f);
xml_read_float3(&translate, node, "translate"); xml_read_float3(&translate, node, "translate");
tfm = tfm * transform_translate(translate); tfm = tfm * transform_translate(translate);
} }
if (node.attribute("rotate")) { if (node.attribute("rotate")) {
float4 rotate = zero_float4(); float4 rotate = make_float4(0.0f, 0.0f, 0.0f, 0.0f);
xml_read_float4(&rotate, node, "rotate"); xml_read_float4(&rotate, node, "rotate");
tfm = tfm * transform_rotate(DEG2RADF(rotate.x), make_float3(rotate.y, rotate.z, rotate.w)); tfm = tfm * transform_rotate(DEG2RADF(rotate.x), make_float3(rotate.y, rotate.z, rotate.w));
} }
if (node.attribute("scale")) { if (node.attribute("scale")) {
float3 scale = zero_float3(); float3 scale = make_float3(0.0f, 0.0f, 0.0f);
xml_read_float3(&scale, node, "scale"); xml_read_float3(&scale, node, "scale");
tfm = tfm * transform_scale(scale); tfm = tfm * transform_scale(scale);
} }

View File

@@ -15,7 +15,6 @@
# #
# <pep8 compliant> # <pep8 compliant>
from __future__ import annotations
bl_info = { bl_info = {
"name": "Cycles Render Engine", "name": "Cycles Render Engine",

View File

@@ -15,7 +15,6 @@
# #
# <pep8 compliant> # <pep8 compliant>
from __future__ import annotations
def _is_using_buggy_driver(): def _is_using_buggy_driver():
@@ -302,7 +301,7 @@ def list_render_passes(scene, srl):
yield ("Denoising Clean", "RGB", 'COLOR') yield ("Denoising Clean", "RGB", 'COLOR')
# Custom AOV passes. # Custom AOV passes.
for aov in srl.aovs: for aov in crl.aovs:
if aov.type == 'VALUE': if aov.type == 'VALUE':
yield (aov.name, "X", 'VALUE') yield (aov.name, "X", 'VALUE')
else: else:
@@ -310,5 +309,22 @@ def list_render_passes(scene, srl):
def register_passes(engine, scene, view_layer): def register_passes(engine, scene, view_layer):
# Detect duplicate render pass names, first one wins.
listed = set()
for name, channelids, channeltype in list_render_passes(scene, view_layer): for name, channelids, channeltype in list_render_passes(scene, view_layer):
if name not in listed:
engine.register_pass(scene, view_layer, name, len(channelids), channelids, channeltype) engine.register_pass(scene, view_layer, name, len(channelids), channelids, channeltype)
listed.add(name)
def detect_conflicting_passes(scene, view_layer):
# Detect conflicting render pass names for UI.
counter = {}
for name, _, _ in list_render_passes(scene, view_layer):
counter[name] = counter.get(name, 0) + 1
for aov in view_layer.cycles.aovs:
if counter[aov.name] > 1:
aov.conflict = "Conflicts with another render pass with the same name"
else:
aov.conflict = ""

View File

@@ -15,7 +15,6 @@
# #
# <pep8 compliant> # <pep8 compliant>
from __future__ import annotations
import bpy import bpy
from bpy.types import Operator from bpy.types import Operator
@@ -45,6 +44,36 @@ class CYCLES_OT_use_shading_nodes(Operator):
return {'FINISHED'} return {'FINISHED'}
class CYCLES_OT_add_aov(bpy.types.Operator):
"""Add an AOV pass"""
bl_idname = "cycles.add_aov"
bl_label = "Add AOV"
def execute(self, context):
view_layer = context.view_layer
cycles_view_layer = view_layer.cycles
cycles_view_layer.aovs.add()
view_layer.update_render_passes()
return {'FINISHED'}
class CYCLES_OT_remove_aov(bpy.types.Operator):
"""Remove an AOV pass"""
bl_idname = "cycles.remove_aov"
bl_label = "Remove AOV"
def execute(self, context):
view_layer = context.view_layer
cycles_view_layer = view_layer.cycles
cycles_view_layer.aovs.remove(cycles_view_layer.active_aov)
view_layer.update_render_passes()
return {'FINISHED'}
class CYCLES_OT_denoise_animation(Operator): class CYCLES_OT_denoise_animation(Operator):
"Denoise rendered animation sequence using current scene and view " \ "Denoise rendered animation sequence using current scene and view " \
"layer settings. Requires denoising data passes and output to " \ "layer settings. Requires denoising data passes and output to " \
@@ -168,6 +197,8 @@ class CYCLES_OT_merge_images(Operator):
classes = ( classes = (
CYCLES_OT_use_shading_nodes, CYCLES_OT_use_shading_nodes,
CYCLES_OT_add_aov,
CYCLES_OT_remove_aov,
CYCLES_OT_denoise_animation, CYCLES_OT_denoise_animation,
CYCLES_OT_merge_images CYCLES_OT_merge_images
) )

View File

@@ -15,7 +15,6 @@
# #
# <pep8 compliant> # <pep8 compliant>
from __future__ import annotations
import bpy import bpy
import _cycles import _cycles

View File

@@ -15,7 +15,6 @@
# #
# <pep8 compliant> # <pep8 compliant>
from __future__ import annotations
from bl_operators.presets import AddPresetBase from bl_operators.presets import AddPresetBase
from bpy.types import Operator from bpy.types import Operator

View File

@@ -15,7 +15,6 @@
# #
# <pep8 compliant> # <pep8 compliant>
from __future__ import annotations
import bpy import bpy
from bpy.props import ( from bpy.props import (
@@ -179,6 +178,11 @@ enum_view3d_shading_render_pass = (
('MIST', "Mist", "Show the Mist render pass", 32), ('MIST', "Mist", "Show the Mist render pass", 32),
) )
enum_aov_types = (
('VALUE', "Value", "Write a Value pass", 0),
('COLOR', "Color", "Write a Color pass", 1),
)
def enum_openimagedenoise_denoiser(self, context): def enum_openimagedenoise_denoiser(self, context):
import _cycles import _cycles
@@ -225,6 +229,7 @@ def update_render_passes(self, context):
scene = context.scene scene = context.scene
view_layer = context.view_layer view_layer = context.view_layer
view_layer.update_render_passes() view_layer.update_render_passes()
engine.detect_conflicting_passes(scene, view_layer)
class CyclesRenderSettings(bpy.types.PropertyGroup): class CyclesRenderSettings(bpy.types.PropertyGroup):
@@ -646,12 +651,6 @@ class CyclesRenderSettings(bpy.types.PropertyGroup):
min=0, max=(1 << 24), min=0, max=(1 << 24),
default=1, default=1,
) )
preview_denoising_input_passes: EnumProperty(
name="Viewport Input Passes",
description="Passes used by the denoiser to distinguish noise from shader and geometry detail",
items=enum_denoising_input_passes,
default='RGB_ALBEDO',
)
debug_reset_timeout: FloatProperty( debug_reset_timeout: FloatProperty(
name="Reset timeout", name="Reset timeout",
@@ -848,7 +847,7 @@ class CyclesRenderSettings(bpy.types.PropertyGroup):
('MEGA', "Mega", ""), ('MEGA', "Mega", ""),
('SPLIT', "Split", ""), ('SPLIT', "Split", ""),
), ),
update=CyclesRenderSettings._devices_update_callback update=_devices_update_callback
) )
debug_opencl_device_type: EnumProperty( debug_opencl_device_type: EnumProperty(
@@ -862,9 +861,11 @@ class CyclesRenderSettings(bpy.types.PropertyGroup):
('GPU', "GPU", ""), ('GPU', "GPU", ""),
('ACCELERATOR', "Accelerator", ""), ('ACCELERATOR', "Accelerator", ""),
), ),
update=CyclesRenderSettings._devices_update_callback update=_devices_update_callback
) )
del _devices_update_callback
debug_use_opencl_debug: BoolProperty(name="Debug OpenCL", default=False) debug_use_opencl_debug: BoolProperty(name="Debug OpenCL", default=False)
debug_opencl_mem_limit: IntProperty( debug_opencl_mem_limit: IntProperty(
@@ -1310,6 +1311,27 @@ class CyclesCurveRenderSettings(bpy.types.PropertyGroup):
del bpy.types.Scene.cycles_curves del bpy.types.Scene.cycles_curves
class CyclesAOVPass(bpy.types.PropertyGroup):
name: StringProperty(
name="Name",
description="Name of the pass, to use in the AOV Output shader node",
update=update_render_passes,
default="AOV"
)
type: EnumProperty(
name="Type",
description="Pass data type",
update=update_render_passes,
items=enum_aov_types,
default='COLOR'
)
conflict: StringProperty(
name="Conflict",
description="If there is a conflict with another render passes, message explaining why",
default=""
)
class CyclesRenderLayerSettings(bpy.types.PropertyGroup): class CyclesRenderLayerSettings(bpy.types.PropertyGroup):
pass_debug_bvh_traversed_nodes: BoolProperty( pass_debug_bvh_traversed_nodes: BoolProperty(
@@ -1440,6 +1462,7 @@ class CyclesRenderLayerSettings(bpy.types.PropertyGroup):
items=enum_denoising_input_passes, items=enum_denoising_input_passes,
default='RGB_ALBEDO', default='RGB_ALBEDO',
) )
denoising_openimagedenoise_input_passes: EnumProperty( denoising_openimagedenoise_input_passes: EnumProperty(
name="Input Passes", name="Input Passes",
description="Passes used by the denoiser to distinguish noise from shader and geometry detail", description="Passes used by the denoiser to distinguish noise from shader and geometry detail",
@@ -1447,6 +1470,15 @@ class CyclesRenderLayerSettings(bpy.types.PropertyGroup):
default='RGB_ALBEDO_NORMAL', default='RGB_ALBEDO_NORMAL',
) )
aovs: CollectionProperty(
type=CyclesAOVPass,
description="Custom render passes that can be output by shader nodes",
)
active_aov: IntProperty(
default=0,
min=0
)
@classmethod @classmethod
def register(cls): def register(cls):
bpy.types.ViewLayer.cycles = PointerProperty( bpy.types.ViewLayer.cycles = PointerProperty(
@@ -1485,7 +1517,7 @@ class CyclesPreferences(bpy.types.AddonPreferences):
compute_device_type: EnumProperty( compute_device_type: EnumProperty(
name="Compute Device Type", name="Compute Device Type",
description="Device to use for computation (rendering with Cycles)", description="Device to use for computation (rendering with Cycles)",
items=CyclesPreferences.get_device_types, items=get_device_types,
) )
devices: bpy.props.CollectionProperty(type=CyclesDeviceSettings) devices: bpy.props.CollectionProperty(type=CyclesDeviceSettings)
@@ -1633,6 +1665,7 @@ def register():
bpy.utils.register_class(CyclesCurveRenderSettings) bpy.utils.register_class(CyclesCurveRenderSettings)
bpy.utils.register_class(CyclesDeviceSettings) bpy.utils.register_class(CyclesDeviceSettings)
bpy.utils.register_class(CyclesPreferences) bpy.utils.register_class(CyclesPreferences)
bpy.utils.register_class(CyclesAOVPass)
bpy.utils.register_class(CyclesRenderLayerSettings) bpy.utils.register_class(CyclesRenderLayerSettings)
bpy.utils.register_class(CyclesView3DShadingSettings) bpy.utils.register_class(CyclesView3DShadingSettings)
@@ -1654,5 +1687,6 @@ def unregister():
bpy.utils.unregister_class(CyclesCurveRenderSettings) bpy.utils.unregister_class(CyclesCurveRenderSettings)
bpy.utils.unregister_class(CyclesDeviceSettings) bpy.utils.unregister_class(CyclesDeviceSettings)
bpy.utils.unregister_class(CyclesPreferences) bpy.utils.unregister_class(CyclesPreferences)
bpy.utils.unregister_class(CyclesAOVPass)
bpy.utils.unregister_class(CyclesRenderLayerSettings) bpy.utils.unregister_class(CyclesRenderLayerSettings)
bpy.utils.unregister_class(CyclesView3DShadingSettings) bpy.utils.unregister_class(CyclesView3DShadingSettings)

View File

@@ -15,7 +15,6 @@
# #
# <pep8 compliant> # <pep8 compliant>
from __future__ import annotations
import bpy import bpy
from bpy_extras.node_utils import find_node_input from bpy_extras.node_utils import find_node_input
@@ -24,7 +23,7 @@ from bl_ui.utils import PresetPanel
from bpy.types import Panel from bpy.types import Panel
from bl_ui.properties_grease_pencil_common import GreasePencilSimplifyPanel from bl_ui.properties_grease_pencil_common import GreasePencilSimplifyPanel
from bl_ui.properties_view_layer import ViewLayerCryptomattePanel, ViewLayerAOVPanel from bl_ui.properties_view_layer import ViewLayerCryptomattePanel
class CYCLES_PT_sampling_presets(PresetPanel, Panel): class CYCLES_PT_sampling_presets(PresetPanel, Panel):
@@ -275,8 +274,6 @@ class CYCLES_RENDER_PT_sampling_denoising(CyclesButtonsPanel, Panel):
sub.prop(cscene, "denoiser", text="") sub.prop(cscene, "denoiser", text="")
layout.separator()
heading = layout.column(align=False, heading="Viewport") heading = layout.column(align=False, heading="Viewport")
row = heading.row(align=True) row = heading.row(align=True)
row.prop(cscene, "use_preview_denoising", text="") row.prop(cscene, "use_preview_denoising", text="")
@@ -287,9 +284,6 @@ class CYCLES_RENDER_PT_sampling_denoising(CyclesButtonsPanel, Panel):
sub = heading.row(align=True) sub = heading.row(align=True)
sub.active = cscene.use_preview_denoising sub.active = cscene.use_preview_denoising
sub.prop(cscene, "preview_denoising_start_sample", text="Start Sample") sub.prop(cscene, "preview_denoising_start_sample", text="Start Sample")
sub = heading.row(align=True)
sub.active = cscene.use_preview_denoising
sub.prop(cscene, "preview_denoising_input_passes", text="Input Passes")
class CYCLES_RENDER_PT_sampling_advanced(CyclesButtonsPanel, Panel): class CYCLES_RENDER_PT_sampling_advanced(CyclesButtonsPanel, Panel):
@@ -892,7 +886,7 @@ class CYCLES_RENDER_PT_passes_light(CyclesButtonsPanel, Panel):
col.prop(view_layer, "use_pass_ambient_occlusion", text="Ambient Occlusion") col.prop(view_layer, "use_pass_ambient_occlusion", text="Ambient Occlusion")
class CYCLES_RENDER_PT_passes_crypto(CyclesButtonsPanel, ViewLayerCryptomattePanel, Panel): class CYCLES_RENDER_PT_passes_crypto(CyclesButtonsPanel, ViewLayerCryptomattePanel):
bl_label = "Cryptomatte" bl_label = "Cryptomatte"
bl_context = "view_layer" bl_context = "view_layer"
bl_parent_id = "CYCLES_RENDER_PT_passes" bl_parent_id = "CYCLES_RENDER_PT_passes"
@@ -921,11 +915,49 @@ class CYCLES_RENDER_PT_passes_debug(CyclesButtonsPanel, Panel):
layout.prop(cycles_view_layer, "pass_debug_ray_bounces") layout.prop(cycles_view_layer, "pass_debug_ray_bounces")
class CYCLES_RENDER_PT_passes_aov(CyclesButtonsPanel, ViewLayerAOVPanel): class CYCLES_RENDER_UL_aov(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
row = layout.row()
split = row.split(factor=0.65)
icon = 'ERROR' if item.conflict else 'NONE'
split.row().prop(item, "name", text="", icon=icon, emboss=False)
split.row().prop(item, "type", text="", emboss=False)
class CYCLES_RENDER_PT_passes_aov(CyclesButtonsPanel, Panel):
bl_label = "Shader AOV" bl_label = "Shader AOV"
bl_context = "view_layer" bl_context = "view_layer"
bl_parent_id = "CYCLES_RENDER_PT_passes" bl_parent_id = "CYCLES_RENDER_PT_passes"
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False
cycles_view_layer = context.view_layer.cycles
row = layout.row()
col = row.column()
col.template_list(
"CYCLES_RENDER_UL_aov",
"aovs",
cycles_view_layer,
"aovs",
cycles_view_layer,
"active_aov",
rows=2,
)
col = row.column()
sub = col.column(align=True)
sub.operator("cycles.add_aov", icon='ADD', text="")
sub.operator("cycles.remove_aov", icon='REMOVE', text="")
if cycles_view_layer.active_aov < len(cycles_view_layer.aovs):
active_aov = cycles_view_layer.aovs[cycles_view_layer.active_aov]
if active_aov.conflict:
layout.label(text=active_aov.conflict, icon='ERROR')
class CYCLES_RENDER_PT_denoising(CyclesButtonsPanel, Panel): class CYCLES_RENDER_PT_denoising(CyclesButtonsPanel, Panel):
bl_label = "Denoising" bl_label = "Denoising"
@@ -2268,6 +2300,7 @@ classes = (
CYCLES_RENDER_PT_passes_light, CYCLES_RENDER_PT_passes_light,
CYCLES_RENDER_PT_passes_crypto, CYCLES_RENDER_PT_passes_crypto,
CYCLES_RENDER_PT_passes_debug, CYCLES_RENDER_PT_passes_debug,
CYCLES_RENDER_UL_aov,
CYCLES_RENDER_PT_passes_aov, CYCLES_RENDER_PT_passes_aov,
CYCLES_RENDER_PT_filter, CYCLES_RENDER_PT_filter,
CYCLES_RENDER_PT_override, CYCLES_RENDER_PT_override,

View File

@@ -15,7 +15,6 @@
# #
# <pep8 compliant> # <pep8 compliant>
from __future__ import annotations
import bpy import bpy
import math import math
@@ -109,7 +108,7 @@ def do_versions(self):
library_versions.setdefault(library.version, []).append(library) library_versions.setdefault(library.version, []).append(library)
# Do versioning per library, since they might have different versions. # Do versioning per library, since they might have different versions.
max_need_versioning = (2, 93, 7) max_need_versioning = (2, 92, 4)
for version, libraries in library_versions.items(): for version, libraries in library_versions.items():
if version > max_need_versioning: if version > max_need_versioning:
continue continue
@@ -205,18 +204,6 @@ def do_versions(self):
view_layer.pass_cryptomatte_depth = cview_layer.get("pass_crypto_depth", 6) view_layer.pass_cryptomatte_depth = cview_layer.get("pass_crypto_depth", 6)
view_layer.use_pass_cryptomatte_accurate = cview_layer.get("pass_crypto_accurate", True) view_layer.use_pass_cryptomatte_accurate = cview_layer.get("pass_crypto_accurate", True)
if version <= (2, 93, 7):
if scene.render.engine == 'CYCLES':
for view_layer in scene.view_layers:
cview_layer = view_layer.cycles
for caov in cview_layer.get("aovs", []):
aov_name = caov.get("name", "AOV")
if aov_name in view_layer.aovs:
continue
baov = view_layer.aovs.add()
baov.name = caov.get("name", "AOV")
baov.type = "COLOR" if caov.get("type", 1) == 1 else "VALUE"
# Lamps # Lamps
for light in bpy.data.lights: for light in bpy.data.lights:
if light.library not in libraries: if light.library not in libraries:

View File

@@ -112,7 +112,7 @@ static void blender_camera_init(BlenderCamera *bcam, BL::RenderSettings &b_rende
bcam->focaldistance = 10.0f; bcam->focaldistance = 10.0f;
bcam->zoom = 1.0f; bcam->zoom = 1.0f;
bcam->pixelaspect = one_float2(); bcam->pixelaspect = make_float2(1.0f, 1.0f);
bcam->aperture_ratio = 1.0f; bcam->aperture_ratio = 1.0f;
bcam->sensor_width = 36.0f; bcam->sensor_width = 36.0f;

View File

@@ -121,8 +121,8 @@ static bool ObtainCacheParticleData(
CData->curve_firstkey.push_back_slow(keyno); CData->curve_firstkey.push_back_slow(keyno);
float curve_length = 0.0f; float curve_length = 0.0f;
float3 prev_co_world = zero_float3(); float3 prev_co_world = make_float3(0.0f, 0.0f, 0.0f);
float3 prev_co_object = zero_float3(); float3 prev_co_object = make_float3(0.0f, 0.0f, 0.0f);
for (int step_no = 0; step_no < ren_step; step_no++) { for (int step_no = 0; step_no < ren_step; step_no++) {
float3 co_world = prev_co_world; float3 co_world = prev_co_world;
b_psys.co_hair(*b_ob, pa_no, step_no, &co_world.x); b_psys.co_hair(*b_ob, pa_no, step_no, &co_world.x);
@@ -197,7 +197,7 @@ static bool ObtainCacheParticleUV(Hair *hair,
BL::Mesh::uv_layers_iterator l; BL::Mesh::uv_layers_iterator l;
b_mesh->uv_layers.begin(l); b_mesh->uv_layers.begin(l);
float2 uv = zero_float2(); float2 uv = make_float2(0.0f, 0.0f);
if (b_mesh->uv_layers.length()) if (b_mesh->uv_layers.length())
b_psys.uv_on_emitter(psmd, *b_pa, pa_no, uv_num, &uv.x); b_psys.uv_on_emitter(psmd, *b_pa, pa_no, uv_num, &uv.x);
CData->curve_uv.push_back_slow(uv); CData->curve_uv.push_back_slow(uv);
@@ -678,7 +678,7 @@ static void export_hair_curves(Scene *scene, Hair *hair, BL::Hair b_hair)
const int first_point_index = b_curve.first_point_index(); const int first_point_index = b_curve.first_point_index();
const int num_points = b_curve.num_points(); const int num_points = b_curve.num_points();
float3 prev_co = zero_float3(); float3 prev_co = make_float3(0.0f, 0.0f, 0.0f);
float length = 0.0f; float length = 0.0f;
if (attr_intercept) { if (attr_intercept) {
points_length.clear(); points_length.clear();

View File

@@ -310,143 +310,6 @@ static void attr_create_sculpt_vertex_color(Scene *scene,
} }
} }
template<typename TypeInCycles, typename GetValueAtIndex>
static void fill_generic_attribute(BL::Mesh &b_mesh,
TypeInCycles *data,
const AttributeElement element,
const GetValueAtIndex &get_value_at_index)
{
switch (element) {
case ATTR_ELEMENT_CORNER: {
for (BL::MeshLoopTriangle &t : b_mesh.loop_triangles) {
const int index = t.index() * 3;
BL::Array<int, 3> loops = t.loops();
data[index] = get_value_at_index(loops[0]);
data[index + 1] = get_value_at_index(loops[1]);
data[index + 2] = get_value_at_index(loops[2]);
}
break;
}
case ATTR_ELEMENT_VERTEX: {
const int num_verts = b_mesh.vertices.length();
for (int i = 0; i < num_verts; i++) {
data[i] = get_value_at_index(i);
}
break;
}
case ATTR_ELEMENT_FACE: {
for (BL::MeshLoopTriangle &t : b_mesh.loop_triangles) {
data[t.index()] = get_value_at_index(t.polygon_index());
}
break;
}
default: {
assert(false);
break;
}
}
}
static void attr_create_generic(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh, bool subdivision)
{
if (subdivision) {
/* TODO: Handle subdivision correctly. */
return;
}
AttributeSet &attributes = mesh->attributes;
for (BL::Attribute &b_attribute : b_mesh.attributes) {
const ustring name{b_attribute.name().c_str()};
if (!mesh->need_attribute(scene, name)) {
continue;
}
if (attributes.find(name)) {
continue;
}
const BL::Attribute::domain_enum b_domain = b_attribute.domain();
const BL::Attribute::data_type_enum b_data_type = b_attribute.data_type();
AttributeElement element = ATTR_ELEMENT_NONE;
switch (b_domain) {
case BL::Attribute::domain_CORNER:
element = ATTR_ELEMENT_CORNER;
break;
case BL::Attribute::domain_POINT:
element = ATTR_ELEMENT_VERTEX;
break;
case BL::Attribute::domain_POLYGON:
element = ATTR_ELEMENT_FACE;
break;
default:
break;
}
if (element == ATTR_ELEMENT_NONE) {
/* Not supported. */
continue;
}
switch (b_data_type) {
case BL::Attribute::data_type_FLOAT: {
BL::FloatAttribute b_float_attribute{b_attribute};
Attribute *attr = attributes.add(name, TypeFloat, element);
float *data = attr->data_float();
fill_generic_attribute(
b_mesh, data, element, [&](int i) { return b_float_attribute.data[i].value(); });
break;
}
case BL::Attribute::data_type_BOOLEAN: {
BL::BoolAttribute b_bool_attribute{b_attribute};
Attribute *attr = attributes.add(name, TypeFloat, element);
float *data = attr->data_float();
fill_generic_attribute(
b_mesh, data, element, [&](int i) { return (float)b_bool_attribute.data[i].value(); });
break;
}
case BL::Attribute::data_type_INT: {
BL::IntAttribute b_int_attribute{b_attribute};
Attribute *attr = attributes.add(name, TypeFloat, element);
float *data = attr->data_float();
fill_generic_attribute(
b_mesh, data, element, [&](int i) { return (float)b_int_attribute.data[i].value(); });
break;
}
case BL::Attribute::data_type_FLOAT_VECTOR: {
BL::FloatVectorAttribute b_vector_attribute{b_attribute};
Attribute *attr = attributes.add(name, TypeVector, element);
float3 *data = attr->data_float3();
fill_generic_attribute(b_mesh, data, element, [&](int i) {
BL::Array<float, 3> v = b_vector_attribute.data[i].vector();
return make_float3(v[0], v[1], v[2]);
});
break;
}
case BL::Attribute::data_type_FLOAT_COLOR: {
BL::FloatColorAttribute b_color_attribute{b_attribute};
Attribute *attr = attributes.add(name, TypeRGBA, element);
float4 *data = attr->data_float4();
fill_generic_attribute(b_mesh, data, element, [&](int i) {
BL::Array<float, 4> v = b_color_attribute.data[i].color();
return make_float4(v[0], v[1], v[2], v[3]);
});
break;
}
case BL::Attribute::data_type_FLOAT2: {
BL::Float2Attribute b_float2_attribute{b_attribute};
Attribute *attr = attributes.add(name, TypeFloat2, element);
float2 *data = attr->data_float2();
fill_generic_attribute(b_mesh, data, element, [&](int i) {
BL::Array<float, 2> v = b_float2_attribute.data[i].vector();
return make_float2(v[0], v[1]);
});
break;
}
default:
/* Not supported. */
break;
}
}
}
/* Create vertex color attributes. */ /* Create vertex color attributes. */
static void attr_create_vertex_color(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh, bool subdivision) static void attr_create_vertex_color(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh, bool subdivision)
{ {
@@ -726,7 +589,7 @@ static void attr_create_pointiness(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh, b
/* STEP 2: Calculate vertex normals taking into account their possible /* STEP 2: Calculate vertex normals taking into account their possible
* duplicates which gets "welded" together. * duplicates which gets "welded" together.
*/ */
vector<float3> vert_normal(num_verts, zero_float3()); vector<float3> vert_normal(num_verts, make_float3(0.0f, 0.0f, 0.0f));
/* First we accumulate all vertex normals in the original index. */ /* First we accumulate all vertex normals in the original index. */
for (int vert_index = 0; vert_index < num_verts; ++vert_index) { for (int vert_index = 0; vert_index < num_verts; ++vert_index) {
const float3 normal = get_float3(b_mesh.vertices[vert_index].normal()); const float3 normal = get_float3(b_mesh.vertices[vert_index].normal());
@@ -743,7 +606,7 @@ static void attr_create_pointiness(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh, b
/* STEP 3: Calculate pointiness using single ring neighborhood. */ /* STEP 3: Calculate pointiness using single ring neighborhood. */
vector<int> counter(num_verts, 0); vector<int> counter(num_verts, 0);
vector<float> raw_data(num_verts, 0.0f); vector<float> raw_data(num_verts, 0.0f);
vector<float3> edge_accum(num_verts, zero_float3()); vector<float3> edge_accum(num_verts, make_float3(0.0f, 0.0f, 0.0f));
BL::Mesh::edges_iterator e; BL::Mesh::edges_iterator e;
EdgeMap visited_edges; EdgeMap visited_edges;
int edge_index = 0; int edge_index = 0;
@@ -974,7 +837,6 @@ static void create_mesh(Scene *scene,
attr_create_vertex_color(scene, mesh, b_mesh, subdivision); attr_create_vertex_color(scene, mesh, b_mesh, subdivision);
attr_create_sculpt_vertex_color(scene, mesh, b_mesh, subdivision); attr_create_sculpt_vertex_color(scene, mesh, b_mesh, subdivision);
attr_create_random_per_island(scene, mesh, b_mesh, subdivision); attr_create_random_per_island(scene, mesh, b_mesh, subdivision);
attr_create_generic(scene, mesh, b_mesh, subdivision);
if (subdivision) { if (subdivision) {
attr_create_subd_uv_map(scene, mesh, b_mesh, subdivide_uvs); attr_create_subd_uv_map(scene, mesh, b_mesh, subdivide_uvs);

View File

@@ -323,8 +323,8 @@ Object *BlenderSync::sync_object(BL::Depsgraph &b_depsgraph,
object->set_random_id(b_instance.random_id()); object->set_random_id(b_instance.random_id());
} }
else { else {
object->set_dupli_generated(zero_float3()); object->set_dupli_generated(make_float3(0.0f, 0.0f, 0.0f));
object->set_dupli_uv(zero_float2()); object->set_dupli_uv(make_float2(0.0f, 0.0f));
object->set_random_id(hash_uint2(hash_string(object->name.c_str()), 0)); object->set_random_id(hash_uint2(hash_string(object->name.c_str()), 0));
} }

View File

@@ -31,7 +31,7 @@ bool BlenderSync::sync_dupli_particle(BL::Object &b_ob,
BL::DepsgraphObjectInstance &b_instance, BL::DepsgraphObjectInstance &b_instance,
Object *object) Object *object)
{ {
/* Test if this dupli was generated from a particle system. */ /* test if this dupli was generated from a particle sytem */
BL::ParticleSystem b_psys = b_instance.particle_system(); BL::ParticleSystem b_psys = b_instance.particle_system();
if (!b_psys) if (!b_psys)
return false; return false;

View File

@@ -147,7 +147,7 @@ void python_thread_state_restore(void **python_thread_state)
static const char *PyC_UnicodeAsByte(PyObject *py_str, PyObject **coerce) static const char *PyC_UnicodeAsByte(PyObject *py_str, PyObject **coerce)
{ {
const char *result = PyUnicode_AsUTF8(py_str); const char *result = _PyUnicode_AsString(py_str);
if (result) { if (result) {
/* 99% of the time this is enough but we better support non unicode /* 99% of the time this is enough but we better support non unicode
* chars since blender doesn't limit this. * chars since blender doesn't limit this.

View File

@@ -136,7 +136,7 @@ class BlenderSession {
/* ** Resumable render ** */ /* ** Resumable render ** */
/* Overall number of chunks in which the sample range is to be divided. */ /* Overall number of chunks in which the sample range is to be devided. */
static int num_resumable_chunks; static int num_resumable_chunks;
/* Current resumable chunk index to render. */ /* Current resumable chunk index to render. */

View File

@@ -1404,7 +1404,7 @@ void BlenderSync::sync_world(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d,
world_color = get_float3(b_world.color()); world_color = get_float3(b_world.color());
} }
else { else {
world_color = zero_float3(); world_color = make_float3(0.0f, 0.0f, 0.0f);
} }
BackgroundNode *background = graph->create_node<BackgroundNode>(); BackgroundNode *background = graph->create_node<BackgroundNode>();
@@ -1535,7 +1535,7 @@ void BlenderSync::sync_lights(BL::Depsgraph &b_depsgraph, bool update_all)
} }
else { else {
EmissionNode *emission = graph->create_node<EmissionNode>(); EmissionNode *emission = graph->create_node<EmissionNode>();
emission->set_color(one_float3()); emission->set_color(make_float3(1.0f, 1.0f, 1.0f));
emission->set_strength(1.0f); emission->set_strength(1.0f);
graph->add(emission); graph->add(emission);

View File

@@ -697,15 +697,9 @@ vector<Pass> BlenderSync::sync_render_passes(BL::RenderLayer &b_rlay,
} }
} }
BL::ViewLayer::aovs_iterator b_aov_iter; RNA_BEGIN (&crl, b_aov, "aovs") {
for (b_view_layer.aovs.begin(b_aov_iter); b_aov_iter != b_view_layer.aovs.end(); ++b_aov_iter) { bool is_color = (get_enum(b_aov, "type") == 1);
BL::AOV b_aov(*b_aov_iter); string name = get_string(b_aov, "name");
if (!b_aov.is_valid()) {
continue;
}
string name = b_aov.name();
bool is_color = b_aov.type() == BL::AOV::type_COLOR;
if (is_color) { if (is_color) {
b_engine.add_pass(name.c_str(), 4, "RGBA", b_view_layer.name().c_str()); b_engine.add_pass(name.c_str(), 4, "RGBA", b_view_layer.name().c_str());
@@ -716,6 +710,7 @@ vector<Pass> BlenderSync::sync_render_passes(BL::RenderLayer &b_rlay,
Pass::add(PASS_AOV_VALUE, passes, name.c_str()); Pass::add(PASS_AOV_VALUE, passes, name.c_str());
} }
} }
RNA_END;
scene->film->set_denoising_data_pass(denoising.use || denoising.store_passes); scene->film->set_denoising_data_pass(denoising.use || denoising.store_passes);
scene->film->set_denoising_clean_pass(scene->film->get_denoising_flags() & scene->film->set_denoising_clean_pass(scene->film->get_denoising_flags() &
@@ -947,7 +942,7 @@ SessionParams BlenderSync::get_session_params(BL::RenderEngine &b_engine,
else if (shadingsystem == 1) else if (shadingsystem == 1)
params.shadingsystem = SHADINGSYSTEM_OSL; params.shadingsystem = SHADINGSYSTEM_OSL;
/* Color management. */ /* color managagement */
params.display_buffer_linear = b_engine.support_display_space_shader(b_scene); params.display_buffer_linear = b_engine.support_display_space_shader(b_scene);
if (b_engine.is_preview()) { if (b_engine.is_preview()) {
@@ -1005,9 +1000,6 @@ DenoiseParams BlenderSync::get_denoise_params(BL::Scene &b_scene,
cscene, "preview_denoiser", DENOISER_NUM, DENOISER_NONE); cscene, "preview_denoiser", DENOISER_NUM, DENOISER_NONE);
denoising.start_sample = get_int(cscene, "preview_denoising_start_sample"); denoising.start_sample = get_int(cscene, "preview_denoising_start_sample");
denoising.input_passes = (DenoiserInput)get_enum(
cscene, "preview_denoising_input_passes", DENOISER_INPUT_NUM, (int)denoising.input_passes);
/* Auto select fastest denoiser. */ /* Auto select fastest denoiser. */
if (denoising.type == DENOISER_NONE) { if (denoising.type == DENOISER_NONE) {
if (!Device::available_devices(DEVICE_MASK_OPTIX).empty()) { if (!Device::available_devices(DEVICE_MASK_OPTIX).empty()) {

View File

@@ -43,8 +43,8 @@ void point_density_texture_space(BL::Depsgraph &b_depsgraph,
{ {
BL::Object b_ob(b_point_density_node.object()); BL::Object b_ob(b_point_density_node.object());
if (!b_ob) { if (!b_ob) {
loc = zero_float3(); loc = make_float3(0.0f, 0.0f, 0.0f);
size = zero_float3(); size = make_float3(0.0f, 0.0f, 0.0f);
return; return;
} }
float3 min, max; float3 min, max;

View File

@@ -69,7 +69,7 @@ BVHLayout BVHParams::best_bvh_layout(BVHLayout requested_layout, BVHLayoutMask s
allowed_layouts_mask = supported_layouts; allowed_layouts_mask = supported_layouts;
} }
/* We get widest from allowed ones and convert mask to actual layout. */ /* We get widest from allowed ones and convert mask to actual layout. */
const BVHLayoutMask widest_allowed_layout_mask = __bsr((uint32_t)allowed_layouts_mask); const BVHLayoutMask widest_allowed_layout_mask = __bsr(allowed_layouts_mask);
return (BVHLayout)(1 << widest_allowed_layout_mask); return (BVHLayout)(1 << widest_allowed_layout_mask);
} }

View File

@@ -107,9 +107,9 @@ BVHObjectBinning::BVHObjectBinning(const BVHRange &job,
/* map geometry to bins, unrolled once */ /* map geometry to bins, unrolled once */
{ {
int64_t i; ssize_t i;
for (i = 0; i < int64_t(size()) - 1; i += 2) { for (i = 0; i < ssize_t(size()) - 1; i += 2) {
prefetch_L2(&prims[start() + i + 8]); prefetch_L2(&prims[start() + i + 8]);
/* map even and odd primitive to bin */ /* map even and odd primitive to bin */
@@ -146,7 +146,7 @@ BVHObjectBinning::BVHObjectBinning(const BVHRange &job,
} }
/* for uneven number of primitives */ /* for uneven number of primitives */
if (i < int64_t(size())) { if (i < ssize_t(size())) {
/* map primitive to bin */ /* map primitive to bin */
const BVHReference &prim0 = prims[start() + i]; const BVHReference &prim0 = prims[start() + i];
BoundBox bounds0 = get_prim_bounds(prim0); BoundBox bounds0 = get_prim_bounds(prim0);
@@ -237,7 +237,7 @@ void BVHObjectBinning::split(BVHReference *prims,
BoundBox lcent_bounds = BoundBox::empty; BoundBox lcent_bounds = BoundBox::empty;
BoundBox rcent_bounds = BoundBox::empty; BoundBox rcent_bounds = BoundBox::empty;
int64_t l = 0, r = N - 1; ssize_t l = 0, r = N - 1;
while (l <= r) { while (l <= r) {
prefetch_L2(&prims[start() + l + 8]); prefetch_L2(&prims[start() + l + 8]);

View File

@@ -360,7 +360,7 @@ void BVHBuild::add_references(BVHRange &root)
/* happens mostly on empty meshes */ /* happens mostly on empty meshes */
if (!bounds.valid()) if (!bounds.valid())
bounds.grow(zero_float3()); bounds.grow(make_float3(0.0f, 0.0f, 0.0f));
root = BVHRange(bounds, center, 0, references.size()); root = BVHRange(bounds, center, 0, references.size());
} }
@@ -703,7 +703,7 @@ BVHNode *BVHBuild::build_node(const BVHRange &range,
unalignedSplitSAH = params.sah_node_cost * unaligned_split.bounds.half_area() + unalignedSplitSAH = params.sah_node_cost * unaligned_split.bounds.half_area() +
params.sah_primitive_cost * unaligned_split.nodeSAH; params.sah_primitive_cost * unaligned_split.nodeSAH;
/* TOOD(sergey): Check we can create leaf already. */ /* TOOD(sergey): Check we can create leaf already. */
/* Check whether unaligned split is better than the regular one. */ /* Check whether unaligned split is better than the regulat one. */
if (unalignedSplitSAH < splitSAH) { if (unalignedSplitSAH < splitSAH) {
do_unalinged_split = true; do_unalinged_split = true;
} }
@@ -842,7 +842,7 @@ BVHNode *BVHBuild::create_leaf_node(const BVHRange &range, const vector<BVHRefer
vector<BVHReference, LeafReferenceStackAllocator> object_references; vector<BVHReference, LeafReferenceStackAllocator> object_references;
uint visibility[PRIMITIVE_NUM_TOTAL] = {0}; uint visibility[PRIMITIVE_NUM_TOTAL] = {0};
/* NOTE: Keep initialization in sync with actual number of primitives. */ /* NOTE: Keep initializtion in sync with actual number of primitives. */
BoundBox bounds[PRIMITIVE_NUM_TOTAL] = { BoundBox bounds[PRIMITIVE_NUM_TOTAL] = {
BoundBox::empty, BoundBox::empty, BoundBox::empty, BoundBox::empty}; BoundBox::empty, BoundBox::empty, BoundBox::empty, BoundBox::empty};
int ob_num = 0; int ob_num = 0;
@@ -851,7 +851,7 @@ BVHNode *BVHBuild::create_leaf_node(const BVHRange &range, const vector<BVHRefer
for (int i = 0; i < range.size(); i++) { for (int i = 0; i < range.size(); i++) {
const BVHReference &ref = references[range.start() + i]; const BVHReference &ref = references[range.start() + i];
if (ref.prim_index() != -1) { if (ref.prim_index() != -1) {
uint32_t type_index = bitscan((uint32_t)(ref.prim_type() & PRIMITIVE_ALL)); int type_index = bitscan(ref.prim_type() & PRIMITIVE_ALL);
p_ref[type_index].push_back(ref); p_ref[type_index].push_back(ref);
p_type[type_index].push_back(ref.prim_type()); p_type[type_index].push_back(ref.prim_type());
p_index[type_index].push_back(ref.prim_index()); p_index[type_index].push_back(ref.prim_index());

View File

@@ -31,6 +31,8 @@
#ifdef WITH_EMBREE #ifdef WITH_EMBREE
# include <embree3/rtcore_geometry.h> # include <embree3/rtcore_geometry.h>
# include <pmmintrin.h>
# include <xmmintrin.h>
# include "bvh/bvh_embree.h" # include "bvh/bvh_embree.h"
@@ -304,7 +306,8 @@ BVHEmbree::BVHEmbree(const BVHParams &params_,
rtc_device(NULL), rtc_device(NULL),
build_quality(RTC_BUILD_QUALITY_REFIT) build_quality(RTC_BUILD_QUALITY_REFIT)
{ {
SIMD_SET_FLUSH_TO_ZERO; _MM_SET_FLUSH_ZERO_MODE(_MM_FLUSH_ZERO_ON);
_MM_SET_DENORMALS_ZERO_MODE(_MM_DENORMALS_ZERO_ON);
} }
BVHEmbree::~BVHEmbree() BVHEmbree::~BVHEmbree()

View File

@@ -184,7 +184,7 @@ CUDADevice::CUDADevice(DeviceInfo &info, Stats &stats, Profiler &profiler, bool
functions.loaded = false; functions.loaded = false;
/* Initialize CUDA. */ /* Intialize CUDA. */
CUresult result = cuInit(0); CUresult result = cuInit(0);
if (result != CUDA_SUCCESS) { if (result != CUDA_SUCCESS) {
set_error(string_printf("Failed to initialize CUDA runtime (%s)", cuewErrorString(result))); set_error(string_printf("Failed to initialize CUDA runtime (%s)", cuewErrorString(result)));

View File

@@ -296,7 +296,7 @@ class MultiDevice : public Device {
i++; i++;
} }
/* Change geometry BVH pointers back to the multi BVH. */ /* Change geomtry BVH pointers back to the multi BVH */
for (size_t k = 0; k < bvh->geometry.size(); ++k) { for (size_t k = 0; k < bvh->geometry.size(); ++k) {
bvh->geometry[k]->bvh = geom_bvhs[k]; bvh->geometry[k]->bvh = geom_bvhs[k];
} }

View File

@@ -80,7 +80,7 @@ class network_device_memory : public device_memory {
vector<char> local_data; vector<char> local_data;
}; };
/* Common network error function / object for both DeviceNetwork and DeviceServer. */ /* Common netowrk error function / object for both DeviceNetwork and DeviceServer*/
class NetworkError { class NetworkError {
public: public:
NetworkError() NetworkError()

Some files were not shown because too many files have changed in this diff Show More