Compare commits
186 Commits
temp-sprea
...
geometry-n
Author | SHA1 | Date | |
---|---|---|---|
35910c2895 | |||
960b2fcc06 | |||
b401cde634 | |||
9b70ee10a9 | |||
5a6959c816 | |||
1c60780bc0 | |||
1ca5e111b2 | |||
7f29d8905e | |||
69a44a8e2f | |||
4ea0201f95 | |||
7b4b22d50a | |||
f7206a555c | |||
47d5fea4d0 | |||
866ce9f380 | |||
ef9a90f07b | |||
096cf62dbf | |||
11a00e7530 | |||
38e2cfe34d | |||
fd19733f74 | |||
6a55b85a5e | |||
17c08cbd57 | |||
198145dfec | |||
9c65c22fa1 | |||
04aed4f194 | |||
3d3f69054e | |||
62b88ae187 | |||
2cf008c933 | |||
063c8ce466 | |||
d8b9e77cc6 | |||
edab1d6dfe | |||
9ed073546e | |||
cb7713fd33 | |||
60ae3551e3 | |||
2852d253e6 | |||
cfc9229cbe | |||
1e289a0748 | |||
4a962c6589 | |||
1a7c0af729 | |||
274fd5bb4b | |||
61b3bdaebc | |||
08e106f440 | |||
9e6aa67a8d | |||
0204f9865e | |||
22dc0187be | |||
43c47813ab | |||
51b9aa6f4a | |||
0de26eec08 | |||
6fac6f2925 | |||
41a5883068 | |||
4b4e37b75e | |||
bef0ef676a | |||
27dbc15c59 | |||
ea7949fea4 | |||
f45d75be63 | |||
8bbaa0b485 | |||
d00fe1bf8a | |||
22289d2925 | |||
5b59354623 | |||
799e25cec9 | |||
0f831c5234 | |||
bc27610b02 | |||
![]() |
28a76665a0 | ||
083e073bf4 | |||
cc2f75b8ce | |||
0b1ba9a404 | |||
ab171f0a21 | |||
c2e3b0409c | |||
76b7c2bfb3 | |||
e8dfdabfbf | |||
1999210b80 | |||
877503e85f | |||
7b07c8b33c | |||
28e6b479cc | |||
2e165a10c8 | |||
407c9a9426 | |||
77ddb580ee | |||
e60c7feb36 | |||
56ad82a2c8 | |||
19d8da9b4c | |||
75715102fb | |||
8fb56e1d80 | |||
b83d02f5fe | |||
4bc879fa9f | |||
375f5cf25d | |||
1d9520a483 | |||
07a87babfd | |||
b5d209121c | |||
4e8b3f8f29 | |||
bf4024ddbf | |||
e5e6a637ea | |||
7f55ff53de | |||
7963dd9efd | |||
901c04fc1e | |||
1830c19051 | |||
![]() |
bc04353584 | ||
6ac8934bab | |||
e4ecafeab3 | |||
034f942be5 | |||
6f34a1ac43 | |||
115dbe4524 | |||
4cff69c441 | |||
![]() |
1ba6af4273 | ||
0733fdede9 | |||
f503a24632 | |||
8bb7b4fc34 | |||
a87a541398 | |||
6da6e8be2a | |||
98c72734fb | |||
e9aa2df089 | |||
3a1a1d9401 | |||
a130ff4e3b | |||
4af87c9fd5 | |||
61f33299e3 | |||
96453d5f3b | |||
6f3c59429c | |||
5d1470fefe | |||
85306bd451 | |||
bedb319442 | |||
79dff444b3 | |||
bc6dd3d8f6 | |||
d9dc0a8545 | |||
![]() |
2d29ef22b5 | ||
0ddb9a1cc5 | |||
1e3517e201 | |||
7d3d24a883 | |||
![]() |
c5ee119491 | ||
ca8d2283d3 | |||
ff239491ed | |||
464485023f | |||
9925063e43 | |||
f48ad3c0b8 | |||
f9e716907d | |||
3e5eb155c0 | |||
a078d917d2 | |||
402bab153d | |||
c1c9b018b7 | |||
9ab137d78e | |||
4f6a6ecd64 | |||
49d0aebc80 | |||
fdbe5d30bf | |||
9263688324 | |||
9c9d8a715c | |||
cf433a30ff | |||
491b4d4cee | |||
88a5b233d1 | |||
43fc89beb2 | |||
2ce95d2060 | |||
94175bdef4 | |||
6d9644e91b | |||
37a7514f3d | |||
0de0e59bee | |||
8a32ec6fb1 | |||
1a25faabd0 | |||
01a7c1952a | |||
44ba3cc6f5 | |||
bff7a5a9a2 | |||
3e4e9ce12d | |||
4fbb0a146c | |||
23fd42f87a | |||
1e2f4cf8cf | |||
eb38c719fe | |||
9a0066e8d2 | |||
ca0cfaa1a5 | |||
ddaa395aaf | |||
3e7994c7dd | |||
c6c27354e9 | |||
4c0bdaf387 | |||
ceb2663d31 | |||
80bab453cd | |||
5acf99b539 | |||
b2483f5796 | |||
7040937717 | |||
9fe3112260 | |||
f7796ed01d | |||
08c45178f6 | |||
38eb804cb3 | |||
068cfde079 | |||
7f1943f0d3 | |||
56f7153220 | |||
025615993c | |||
2ff0a6f5be | |||
47388c1684 | |||
ab3e003366 | |||
70fa23989e | |||
e9110d7017 | |||
819d40ebb1 |
10
.clang-tidy
10
.clang-tidy
@@ -34,13 +34,17 @@ Checks: >
|
||||
modernize-*,
|
||||
-modernize-use-auto,
|
||||
-modernize-use-trailing-return-type,
|
||||
-modernize-deprecated-headers,
|
||||
-modernize-avoid-c-arrays,
|
||||
-modernize-use-equals-default,
|
||||
-modernize-use-nodiscard,
|
||||
-modernize-use-using,
|
||||
-modernize-loop-convert,
|
||||
-modernize-pass-by-value,
|
||||
-modernize-use-default-member-init,
|
||||
-modernize-raw-string-literal,
|
||||
-modernize-avoid-bind,
|
||||
-modernize-use-override,
|
||||
-modernize-use-transparent-functors,
|
||||
|
||||
WarningsAsErrors: '*'
|
||||
CheckOptions:
|
||||
- key: modernize-use-default-member-init.UseAssignment
|
||||
value: 1
|
||||
|
@@ -10,9 +10,8 @@
|
||||
# Changes that belong here:
|
||||
# - Massive comment, doxy-sections, or spelling corrections.
|
||||
# - Clang-format, PEP8 or other automated changes which are *strictly* "no functional change".
|
||||
# - Several commits should be added to this list at once, because adding
|
||||
# one extra commit (to edit this file) after every cleanup is noisy.
|
||||
# - No clang-tidy changes.
|
||||
# - Several smaller commits should be added to this list at once, because adding
|
||||
# one extra commit (to edit this file) after every small cleanup is noisy.
|
||||
#
|
||||
# Note:
|
||||
# - The comment above the SHA should be the first line of the commit.
|
||||
@@ -92,13 +91,3 @@ c42a6b77b52560d257279de2cb624b4ef2c0d24c
|
||||
|
||||
# Cleanup: use doxy sections for imbuf
|
||||
c207f7c22e1439e0b285fba5d2c072bdae23f981
|
||||
|
||||
# Cleanup: clang-format
|
||||
c4d8f6a4a8ddc29ed27311ed7578b3c8c31399d2
|
||||
b5d310b569e07a937798a2d38539cfd290149f1c
|
||||
8c846cccd6bdfd3e90a695fabbf05f53e5466a57
|
||||
4eac03d821fa17546f562485f7d073813a5e5943
|
||||
1166110a9d66af9c5a47cee2be591f50fdc445e8
|
||||
|
||||
# Cleanup: clang-format.
|
||||
40d4a4cb1a6b4c3c2a486e8f2868f547530e0811
|
||||
|
177
CMakeLists.txt
177
CMakeLists.txt
@@ -63,9 +63,6 @@ list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/build_files/cmake/platform")
|
||||
# avoid having empty buildtype
|
||||
if(NOT DEFINED CMAKE_BUILD_TYPE_INIT)
|
||||
set(CMAKE_BUILD_TYPE_INIT "Release")
|
||||
# Internal logic caches this variable, avoid showing it by default
|
||||
# since it's easy to accidentally set instead of the build type.
|
||||
mark_as_advanced(CMAKE_BUILD_TYPE_INIT)
|
||||
endif()
|
||||
|
||||
# Omit superfluous "Up-to-date" messages.
|
||||
@@ -167,6 +164,10 @@ if(APPLE)
|
||||
endif()
|
||||
|
||||
option(WITH_BUILDINFO "Include extra build details (only disable for development & faster builds)" ON)
|
||||
if(${CMAKE_VERSION} VERSION_LESS 2.8.8)
|
||||
# add_library OBJECT arg unsupported
|
||||
set(WITH_BUILDINFO OFF)
|
||||
endif()
|
||||
set(BUILDINFO_OVERRIDE_DATE "" CACHE STRING "Use instead of the current date for reproducible builds (empty string disables this option)")
|
||||
set(BUILDINFO_OVERRIDE_TIME "" CACHE STRING "Use instead of the current time for reproducible builds (empty string disables this option)")
|
||||
set(CPACK_OVERRIDE_PACKAGENAME "" CACHE STRING "Use instead of the standard packagename (empty string disables this option)")
|
||||
@@ -177,7 +178,9 @@ mark_as_advanced(BUILDINFO_OVERRIDE_TIME)
|
||||
option(WITH_IK_ITASC "Enable ITASC IK solver (only disable for development & for incompatible C++ compilers)" ON)
|
||||
option(WITH_IK_SOLVER "Enable Legacy IK solver (only disable for development)" ON)
|
||||
option(WITH_FFTW3 "Enable FFTW3 support (Used for smoke, ocean sim, and audio effects)" ON)
|
||||
option(WITH_PUGIXML "Enable PugiXML support (Used for OpenImageIO, Grease Pencil SVG export)" ON)
|
||||
if(WIN32)
|
||||
option(WITH_PUGIXML "Enable PugiXML support (Used for OpenImageIO, Grease Pencil SVG export)" ON)
|
||||
endif()
|
||||
option(WITH_BULLET "Enable Bullet (Physics Engine)" ON)
|
||||
option(WITH_SYSTEM_BULLET "Use the systems bullet library (currently unsupported due to missing features in upstream!)" )
|
||||
mark_as_advanced(WITH_SYSTEM_BULLET)
|
||||
@@ -204,7 +207,6 @@ option(WITH_OPENVDB_BLOSC "Enable blosc compression for OpenVDB, only enable if
|
||||
option(WITH_OPENVDB_3_ABI_COMPATIBLE "Assume OpenVDB library has been compiled with version 3 ABI compatibility" OFF)
|
||||
mark_as_advanced(WITH_OPENVDB_3_ABI_COMPATIBLE)
|
||||
option(WITH_NANOVDB "Enable usage of NanoVDB data structure for rendering on the GPU" ON)
|
||||
option(WITH_HARU "Enable features relying on Libharu (Grease pencil PDF export)" ON)
|
||||
|
||||
# GHOST Windowing Library Options
|
||||
option(WITH_GHOST_DEBUG "Enable debugging output for the GHOST library" OFF)
|
||||
@@ -347,21 +349,16 @@ if(UNIX AND NOT APPLE)
|
||||
endif()
|
||||
|
||||
option(WITH_PYTHON_INSTALL "Copy system python into the blender install folder" ON)
|
||||
|
||||
if((WITH_AUDASPACE AND NOT WITH_SYSTEM_AUDASPACE) OR WITH_MOD_FLUID)
|
||||
option(WITH_PYTHON_NUMPY "Include NumPy in Blender (used by Audaspace and Mantaflow)" ON)
|
||||
endif()
|
||||
|
||||
if(WIN32 OR APPLE)
|
||||
# Windows and macOS have this bundled with Python libraries.
|
||||
elseif(WITH_PYTHON_INSTALL OR WITH_PYTHON_NUMPY)
|
||||
elseif(WITH_PYTHON_INSTALL OR (WITH_AUDASPACE AND NOT WITH_SYSTEM_AUDASPACE))
|
||||
set(PYTHON_NUMPY_PATH "" CACHE PATH "Path to python site-packages or dist-packages containing 'numpy' module")
|
||||
mark_as_advanced(PYTHON_NUMPY_PATH)
|
||||
set(PYTHON_NUMPY_INCLUDE_DIRS "" CACHE PATH "Path to the include directory of the NumPy module")
|
||||
set(PYTHON_NUMPY_INCLUDE_DIRS ${PYTHON_NUMPY_PATH}/numpy/core/include CACHE PATH "Path to the include directory of the numpy module")
|
||||
mark_as_advanced(PYTHON_NUMPY_INCLUDE_DIRS)
|
||||
endif()
|
||||
if(WITH_PYTHON_INSTALL)
|
||||
option(WITH_PYTHON_INSTALL_NUMPY "Copy system NumPy into the blender install folder" ON)
|
||||
option(WITH_PYTHON_INSTALL_NUMPY "Copy system numpy into the blender install folder" ON)
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
option(WITH_PYTHON_INSTALL_REQUESTS "Copy system requests into the blender install folder" ON)
|
||||
@@ -370,8 +367,8 @@ if(WITH_PYTHON_INSTALL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
option(WITH_CPU_SIMD "Enable SIMD instruction if they're detected on the host machine" ON)
|
||||
mark_as_advanced(WITH_CPU_SIMD)
|
||||
option(WITH_CPU_SSE "Enable SIMD instruction if they're detected on the host machine" ON)
|
||||
mark_as_advanced(WITH_CPU_SSE)
|
||||
|
||||
# Cycles
|
||||
option(WITH_CYCLES "Enable Cycles Render Engine" ON)
|
||||
@@ -416,7 +413,6 @@ else()
|
||||
option(LLVM_STATIC "Link with LLVM static libraries" OFF)
|
||||
endif()
|
||||
mark_as_advanced(LLVM_STATIC)
|
||||
option(WITH_CLANG "Use Clang" OFF)
|
||||
|
||||
# disable for now, but plan to support on all platforms eventually
|
||||
option(WITH_MEM_JEMALLOC "Enable malloc replacement (http://www.canonware.com/jemalloc)" ON)
|
||||
@@ -433,8 +429,8 @@ mark_as_advanced(WITH_CXX_GUARDEDALLOC)
|
||||
option(WITH_ASSERT_ABORT "Call abort() when raising an assertion through BLI_assert()" ON)
|
||||
mark_as_advanced(WITH_ASSERT_ABORT)
|
||||
|
||||
if((UNIX AND NOT APPLE) OR (CMAKE_GENERATOR MATCHES "^Visual Studio.+"))
|
||||
option(WITH_CLANG_TIDY "Use Clang Tidy to analyze the source code (only enable for development on Linux using Clang, or Windows using the Visual Studio IDE)" OFF)
|
||||
if(UNIX AND NOT APPLE)
|
||||
option(WITH_CLANG_TIDY "Use Clang Tidy to analyze the source code (only enable for development on Linux using Clang)" OFF)
|
||||
mark_as_advanced(WITH_CLANG_TIDY)
|
||||
endif()
|
||||
|
||||
@@ -611,12 +607,6 @@ if(WIN32)
|
||||
|
||||
endif()
|
||||
|
||||
if(UNIX)
|
||||
# See WITH_WINDOWS_SCCACHE for Windows.
|
||||
option(WITH_COMPILER_CCACHE "Use ccache to improve rebuild times (Works with Ninja, Makefiles and Xcode)" OFF)
|
||||
mark_as_advanced(WITH_COMPILER_CCACHE)
|
||||
endif()
|
||||
|
||||
# The following only works with the Ninja generator in CMake >= 3.0.
|
||||
if("${CMAKE_GENERATOR}" MATCHES "Ninja")
|
||||
option(WITH_NINJA_POOL_JOBS
|
||||
@@ -711,8 +701,10 @@ set_and_warn_dependency(WITH_BOOST WITH_OPENCOLORIO OFF)
|
||||
set_and_warn_dependency(WITH_BOOST WITH_QUADRIFLOW OFF)
|
||||
set_and_warn_dependency(WITH_BOOST WITH_USD OFF)
|
||||
set_and_warn_dependency(WITH_BOOST WITH_ALEMBIC OFF)
|
||||
set_and_warn_dependency(WITH_PUGIXML WITH_CYCLES_OSL OFF)
|
||||
set_and_warn_dependency(WITH_PUGIXML WITH_OPENIMAGEIO OFF)
|
||||
if(WIN32)
|
||||
set_and_warn_dependency(WITH_PUGIXML WITH_CYCLES_OSL OFF)
|
||||
set_and_warn_dependency(WITH_PUGIXML WITH_OPENIMAGEIO OFF)
|
||||
endif()
|
||||
|
||||
if(WITH_BOOST AND NOT (WITH_CYCLES OR WITH_OPENIMAGEIO OR WITH_INTERNATIONAL OR
|
||||
WITH_OPENVDB OR WITH_OPENCOLORIO OR WITH_USD OR WITH_ALEMBIC))
|
||||
@@ -729,12 +721,8 @@ set_and_warn_dependency(WITH_TBB WITH_MOD_FLUID OFF)
|
||||
# NanoVDB requires OpenVDB to convert the data structure
|
||||
set_and_warn_dependency(WITH_OPENVDB WITH_NANOVDB OFF)
|
||||
|
||||
# OpenVDB and OpenColorIO uses 'half' type from OpenEXR
|
||||
# OpenVDB uses 'half' type from OpenEXR & fails to link without OpenEXR enabled.
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENVDB OFF)
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENCOLORIO OFF)
|
||||
|
||||
# Haru needs `TIFFFaxBlackCodes` & `TIFFFaxWhiteCodes` symbols from TIFF.
|
||||
set_and_warn_dependency(WITH_IMAGE_TIFF WITH_HARU OFF)
|
||||
|
||||
# auto enable openimageio for cycles
|
||||
if(WITH_CYCLES)
|
||||
@@ -743,7 +731,6 @@ if(WITH_CYCLES)
|
||||
# auto enable llvm for cycles_osl
|
||||
if(WITH_CYCLES_OSL)
|
||||
set(WITH_LLVM ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CLANG ON CACHE BOOL "" FORCE)
|
||||
endif()
|
||||
else()
|
||||
set(WITH_CYCLES_OSL OFF)
|
||||
@@ -777,6 +764,14 @@ if(WITH_GHOST_SDL OR WITH_HEADLESS)
|
||||
set(WITH_XR_OPENXR OFF)
|
||||
endif()
|
||||
|
||||
if(WITH_CPU_SSE)
|
||||
TEST_SSE_SUPPORT(COMPILER_SSE_FLAG COMPILER_SSE2_FLAG)
|
||||
else()
|
||||
message(STATUS "SSE and SSE2 optimizations are DISABLED!")
|
||||
set(COMPILER_SSE_FLAG)
|
||||
set(COMPILER_SSE2_FLAG)
|
||||
endif()
|
||||
|
||||
if(WITH_BUILDINFO)
|
||||
find_package(Git)
|
||||
if(NOT GIT_FOUND)
|
||||
@@ -825,8 +820,8 @@ if(WITH_PYTHON)
|
||||
# Do this before main 'platform_*' checks,
|
||||
# because UNIX will search for the old Python paths which may not exist.
|
||||
# giving errors about missing paths before this case is met.
|
||||
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.9")
|
||||
message(FATAL_ERROR "At least Python 3.9 is required to build")
|
||||
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.7")
|
||||
message(FATAL_ERROR "At least Python 3.7 is required to build")
|
||||
endif()
|
||||
|
||||
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/scripts/addons")
|
||||
@@ -888,11 +883,9 @@ if(NOT CMAKE_BUILD_TYPE MATCHES "Release")
|
||||
|
||||
if(APPLE AND COMPILER_ASAN_LIBRARY)
|
||||
string(REPLACE " " ";" _list_COMPILER_ASAN_CFLAGS ${COMPILER_ASAN_CFLAGS})
|
||||
set(_is_CONFIG_DEBUG "$<OR:$<CONFIG:Debug>,$<CONFIG:RelWithDebInfo>>")
|
||||
add_compile_options("$<${_is_CONFIG_DEBUG}:${_list_COMPILER_ASAN_CFLAGS}>")
|
||||
add_link_options("$<${_is_CONFIG_DEBUG}:-fno-omit-frame-pointer;-fsanitize=address>")
|
||||
add_compile_options("$<$<NOT:$<CONFIG:Release>>:${_list_COMPILER_ASAN_CFLAGS}>")
|
||||
add_link_options("$<$<NOT:$<CONFIG:Release>>:-fno-omit-frame-pointer;-fsanitize=address>")
|
||||
unset(_list_COMPILER_ASAN_CFLAGS)
|
||||
unset(_is_CONFIG_DEBUG)
|
||||
elseif(COMPILER_ASAN_LIBRARY)
|
||||
set(PLATFORM_LINKLIBS "${PLATFORM_LINKLIBS};${COMPILER_ASAN_LIBRARY}")
|
||||
set(PLATFORM_LINKFLAGS "${COMPILER_ASAN_LIBRARY} ${COMPILER_ASAN_LINKER_FLAGS}")
|
||||
@@ -901,18 +894,6 @@ if(NOT CMAKE_BUILD_TYPE MATCHES "Release")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Test SIMD support, before platform includes to determine if sse2neon is needed.
|
||||
if(WITH_CPU_SIMD)
|
||||
set(COMPILER_SSE_FLAG)
|
||||
set(COMPILER_SSE2_FLAG)
|
||||
|
||||
# Test Neon first since macOS Arm can compile and run x86-64 SSE binaries.
|
||||
TEST_NEON_SUPPORT()
|
||||
if(NOT SUPPORT_NEON_BUILD)
|
||||
TEST_SSE_SUPPORT(COMPILER_SSE_FLAG COMPILER_SSE2_FLAG)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Main Platform Checks
|
||||
#
|
||||
@@ -968,49 +949,22 @@ if(WITH_INTERNATIONAL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Enable SIMD support if detected by TEST_SSE_SUPPORT() or TEST_NEON_SUPPORT().
|
||||
#
|
||||
# This is done globally, so that all modules can use it if available, and
|
||||
# because these are used in headers used by many modules.
|
||||
if(WITH_CPU_SIMD)
|
||||
if(SUPPORT_NEON_BUILD)
|
||||
# Neon
|
||||
if(SSE2NEON_FOUND)
|
||||
blender_include_dirs_sys("${SSE2NEON_INCLUDE_DIRS}")
|
||||
add_definitions(-DWITH_SSE2NEON)
|
||||
endif()
|
||||
else()
|
||||
# SSE
|
||||
if(SUPPORT_SSE_BUILD)
|
||||
string(PREPEND PLATFORM_CFLAGS "${COMPILER_SSE_FLAG} ")
|
||||
add_definitions(-D__SSE__ -D__MMX__)
|
||||
endif()
|
||||
if(SUPPORT_SSE2_BUILD)
|
||||
string(APPEND PLATFORM_CFLAGS " ${COMPILER_SSE2_FLAG}")
|
||||
add_definitions(-D__SSE2__)
|
||||
if(NOT SUPPORT_SSE_BUILD) # don't double up
|
||||
add_definitions(-D__MMX__)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
# See TEST_SSE_SUPPORT() for how this is defined.
|
||||
|
||||
# Print instructions used
|
||||
if(SUPPORT_NEON_BUILD)
|
||||
if(SSE2NEON_FOUND)
|
||||
message(STATUS "Neon SIMD instructions enabled")
|
||||
else()
|
||||
message(STATUS "Neon SIMD instructions detected but unused, requires sse2neon")
|
||||
endif()
|
||||
elseif(SUPPORT_SSE2_BUILD)
|
||||
message(STATUS "SSE2 SIMD instructions enabled")
|
||||
elseif(SUPPORT_SSE_BUILD)
|
||||
message(STATUS "SSE SIMD instructions enabled")
|
||||
else()
|
||||
message(STATUS "No SIMD instructions detected")
|
||||
endif()
|
||||
else()
|
||||
message(STATUS "SIMD instructions disabled")
|
||||
# Do it globally, SSE2 is required for quite some time now.
|
||||
# Doing it now allows to use SSE/SSE2 in inline headers.
|
||||
if(SUPPORT_SSE_BUILD)
|
||||
string(PREPEND PLATFORM_CFLAGS "${COMPILER_SSE_FLAG} ")
|
||||
add_definitions(-D__SSE__ -D__MMX__)
|
||||
endif()
|
||||
if(SUPPORT_SSE2_BUILD)
|
||||
string(APPEND PLATFORM_CFLAGS " ${COMPILER_SSE2_FLAG}")
|
||||
add_definitions(-D__SSE2__)
|
||||
if(NOT SUPPORT_SSE_BUILD) # don't double up
|
||||
add_definitions(-D__MMX__)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
||||
# set the endian define
|
||||
if(MSVC)
|
||||
@@ -1056,9 +1010,6 @@ if(WITH_OPENVDB)
|
||||
list(APPEND OPENVDB_DEFINITIONS -DOPENVDB_3_ABI_COMPATIBLE)
|
||||
endif()
|
||||
|
||||
# OpenVDB headers use deprecated TBB headers, silence warning.
|
||||
list(APPEND OPENVDB_DEFINITIONS -DTBB_SUPPRESS_DEPRECATED_MESSAGES=1)
|
||||
|
||||
list(APPEND OPENVDB_INCLUDE_DIRS
|
||||
${BOOST_INCLUDE_DIR}
|
||||
${TBB_INCLUDE_DIRS}
|
||||
@@ -1221,7 +1172,6 @@ if(WITH_OPENMP)
|
||||
if(NOT WITH_OPENMP_STATIC)
|
||||
string(APPEND CMAKE_C_FLAGS " ${OpenMP_C_FLAGS}")
|
||||
string(APPEND CMAKE_CXX_FLAGS " ${OpenMP_CXX_FLAGS}")
|
||||
string(APPEND CMAKE_EXE_LINKER_FLAGS " ${OpenMP_LINKER_FLAGS}")
|
||||
else()
|
||||
# Typically avoid adding flags as defines but we can't
|
||||
# pass OpenMP flags to the linker for static builds, meaning
|
||||
@@ -1475,7 +1425,6 @@ if(CMAKE_COMPILER_IS_GNUCC)
|
||||
# gcc 4.2 gives annoying warnings on every file with this
|
||||
if(NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "4.3")
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_WARNINGS C_WARN_UNINITIALIZED -Wuninitialized)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_WARNINGS CXX_WARN_UNINITIALIZED -Wuninitialized)
|
||||
endif()
|
||||
|
||||
# versions before gcc4.6 give many BLI_math warnings
|
||||
@@ -1540,13 +1489,11 @@ if(CMAKE_COMPILER_IS_GNUCC)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_FORMAT -Wno-format)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_SWITCH -Wno-switch)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_UNUSED_VARIABLE -Wno-unused-variable)
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_UNUSED_VARIABLE -Wno-uninitialized)
|
||||
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_CLASS_MEMACCESS -Wno-class-memaccess)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_COMMENT -Wno-comment)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_TYPEDEFS -Wno-unused-local-typedefs)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_VARIABLE -Wno-unused-variable)
|
||||
ADD_CHECK_CXX_COMPILER_FLAG(CXX_REMOVE_STRICT_FLAGS CXX_WARN_NO_UNUSED_VARIABLE -Wno-uninitialized)
|
||||
|
||||
if(CMAKE_COMPILER_IS_GNUCC AND (NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "7.0"))
|
||||
ADD_CHECK_C_COMPILER_FLAG(C_REMOVE_STRICT_FLAGS C_WARN_NO_IMPLICIT_FALLTHROUGH -Wno-implicit-fallthrough)
|
||||
@@ -1676,16 +1623,19 @@ if(WITH_PYTHON)
|
||||
|
||||
if(WIN32 OR APPLE)
|
||||
# Windows and macOS have this bundled with Python libraries.
|
||||
elseif((WITH_PYTHON_INSTALL AND WITH_PYTHON_INSTALL_NUMPY) OR WITH_PYTHON_NUMPY)
|
||||
elseif((WITH_PYTHON_INSTALL AND WITH_PYTHON_INSTALL_NUMPY) OR (WITH_AUDASPACE AND NOT WITH_SYSTEM_AUDASPACE))
|
||||
if(("${PYTHON_NUMPY_PATH}" STREQUAL "") OR (${PYTHON_NUMPY_PATH} MATCHES NOTFOUND))
|
||||
find_python_package(numpy "core/include")
|
||||
find_python_package(numpy)
|
||||
unset(PYTHON_NUMPY_INCLUDE_DIRS CACHE)
|
||||
set(PYTHON_NUMPY_INCLUDE_DIRS ${PYTHON_NUMPY_PATH}/numpy/core/include CACHE PATH "Path to the include directory of the numpy module")
|
||||
mark_as_advanced(PYTHON_NUMPY_INCLUDE_DIRS)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WIN32 OR APPLE)
|
||||
# pass, we have this in lib/python/site-packages
|
||||
elseif(WITH_PYTHON_INSTALL_REQUESTS)
|
||||
find_python_package(requests "")
|
||||
find_python_package(requests)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@@ -1808,20 +1758,8 @@ if(WITH_BLENDER)
|
||||
# internal and external library information first, for test linking
|
||||
add_subdirectory(source)
|
||||
elseif(WITH_CYCLES_STANDALONE)
|
||||
add_subdirectory(intern/glew-mx)
|
||||
add_subdirectory(intern/guardedalloc)
|
||||
add_subdirectory(intern/libc_compat)
|
||||
add_subdirectory(intern/numaapi)
|
||||
add_subdirectory(intern/sky)
|
||||
|
||||
add_subdirectory(intern/cycles)
|
||||
add_subdirectory(extern/clew)
|
||||
if(WITH_CYCLES_LOGGING)
|
||||
if(NOT WITH_SYSTEM_GFLAGS)
|
||||
add_subdirectory(extern/gflags)
|
||||
endif()
|
||||
add_subdirectory(extern/glog)
|
||||
endif()
|
||||
if(WITH_CUDA_DYNLOAD)
|
||||
add_subdirectory(extern/cuew)
|
||||
endif()
|
||||
@@ -1830,10 +1768,6 @@ elseif(WITH_CYCLES_STANDALONE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Testing
|
||||
add_subdirectory(tests)
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Blender Application
|
||||
if(WITH_BLENDER)
|
||||
@@ -1841,6 +1775,11 @@ if(WITH_BLENDER)
|
||||
endif()
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Testing
|
||||
add_subdirectory(tests)
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Define 'heavy' submodules (for Ninja builder when using pools).
|
||||
setup_heavy_lib_pool()
|
||||
@@ -1890,12 +1829,10 @@ if(FIRST_RUN)
|
||||
info_cfg_text("Build Options:")
|
||||
info_cfg_option(WITH_ALEMBIC)
|
||||
info_cfg_option(WITH_BULLET)
|
||||
info_cfg_option(WITH_CLANG)
|
||||
info_cfg_option(WITH_CYCLES)
|
||||
info_cfg_option(WITH_FFTW3)
|
||||
info_cfg_option(WITH_FREESTYLE)
|
||||
info_cfg_option(WITH_GMP)
|
||||
info_cfg_option(WITH_HARU)
|
||||
info_cfg_option(WITH_IK_ITASC)
|
||||
info_cfg_option(WITH_IK_SOLVER)
|
||||
info_cfg_option(WITH_INPUT_NDOF)
|
||||
@@ -1904,8 +1841,6 @@ if(FIRST_RUN)
|
||||
info_cfg_option(WITH_OPENCOLORIO)
|
||||
info_cfg_option(WITH_OPENIMAGEDENOISE)
|
||||
info_cfg_option(WITH_OPENVDB)
|
||||
info_cfg_option(WITH_POTRACE)
|
||||
info_cfg_option(WITH_PUGIXML)
|
||||
info_cfg_option(WITH_QUADRIFLOW)
|
||||
info_cfg_option(WITH_TBB)
|
||||
info_cfg_option(WITH_USD)
|
||||
|
41
GNUmakefile
41
GNUmakefile
@@ -26,31 +26,25 @@
|
||||
|
||||
define HELP_TEXT
|
||||
|
||||
Blender Convenience Targets
|
||||
Convenience Targets
|
||||
Provided for building Blender, (multiple at once can be used).
|
||||
|
||||
* debug: Build a debug binary.
|
||||
* full: Enable all supported dependencies & options.
|
||||
* lite: Disable non essential features for a smaller binary and faster build.
|
||||
* release: Complete build with all options enabled including CUDA and Optix, matching the releases on blender.org
|
||||
* release Complete build with all options enabled including CUDA and Optix, matching the releases on blender.org
|
||||
* headless: Build without an interface (renderfarm or server automation).
|
||||
* cycles: Build Cycles standalone only, without Blender.
|
||||
* bpy: Build as a python module which can be loaded from python directly.
|
||||
* deps: Build library dependencies (intended only for platform maintainers).
|
||||
|
||||
* developer: Enable faster builds, error checking and tests, recommended for developers.
|
||||
* config: Run cmake configuration tool to set build options.
|
||||
* ninja: Use ninja build tool for faster builds.
|
||||
* ccache: Use ccache for faster rebuilds.
|
||||
|
||||
Note: passing the argument 'BUILD_DIR=path' when calling make will override the default build dir.
|
||||
Note: passing the argument 'BUILD_CMAKE_ARGS=args' lets you add cmake arguments.
|
||||
|
||||
Other Convenience Targets
|
||||
Provided for other building operations.
|
||||
|
||||
* config: Run cmake configuration tool to set build options.
|
||||
* deps: Build library dependencies (intended only for platform maintainers).
|
||||
|
||||
The existance of locally build dependancies overrides the pre-built dependencies from subversion.
|
||||
These must be manually removed from '../lib/' to go back to using the pre-compiled libraries.
|
||||
|
||||
Project Files
|
||||
Generate project files for development environments.
|
||||
@@ -90,15 +84,12 @@ Static Source Code Checking
|
||||
* check_descriptions: Check for duplicate/invalid descriptions.
|
||||
|
||||
Spell Checkers
|
||||
This runs the spell checker from the developer tools repositor.
|
||||
|
||||
* check_spelling_c: Check for spelling errors (C/C++ only),
|
||||
* check_spelling_osl: Check for spelling errors (OSL only).
|
||||
* check_spelling_py: Check for spelling errors (Python only).
|
||||
|
||||
Note: an additional word-list is maintained at: 'source/tools/check_source/check_spelling_c_config.py'
|
||||
|
||||
Note: that spell checkers can take a 'CHECK_SPELLING_CACHE' filepath argument,
|
||||
Note that spell checkers can take a 'CHECK_SPELLING_CACHE' filepath argument,
|
||||
so re-running does not need to re-check unchanged files.
|
||||
|
||||
Example:
|
||||
@@ -131,7 +122,7 @@ Utilities
|
||||
* update:
|
||||
updates git and all submodules
|
||||
|
||||
* format:
|
||||
* format
|
||||
Format source code using clang (uses PATHS if passed in). For example::
|
||||
|
||||
make format PATHS="source/blender/blenlib source/blender/blenkernel"
|
||||
@@ -191,13 +182,8 @@ endif
|
||||
ifndef DEPS_INSTALL_DIR
|
||||
DEPS_INSTALL_DIR:=$(shell dirname "$(BLENDER_DIR)")/lib/$(OS_NCASE)
|
||||
|
||||
# Add processor type to directory name, except for darwin x86_64
|
||||
# which by convention does not have it.
|
||||
ifeq ($(OS_NCASE),darwin)
|
||||
ifneq ($(CPU),x86_64)
|
||||
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
|
||||
endif
|
||||
else
|
||||
ifneq ($(OS_NCASE),darwin)
|
||||
# Add processor type to directory name
|
||||
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
|
||||
endif
|
||||
endif
|
||||
@@ -211,7 +197,7 @@ endif
|
||||
# in libraries, or python 2 for running make update to get it.
|
||||
ifeq ($(OS_NCASE),darwin)
|
||||
ifeq (, $(shell command -v $(PYTHON)))
|
||||
PYTHON:=$(DEPS_INSTALL_DIR)/python/bin/python3.7m
|
||||
PYTHON:=../lib/darwin/python/bin/python3.7m
|
||||
ifeq (, $(shell command -v $(PYTHON)))
|
||||
PYTHON:=python
|
||||
endif
|
||||
@@ -255,10 +241,6 @@ ifneq "$(findstring developer, $(MAKECMDGOALS))" ""
|
||||
CMAKE_CONFIG_ARGS:=-C"$(BLENDER_DIR)/build_files/cmake/config/blender_developer.cmake" $(CMAKE_CONFIG_ARGS)
|
||||
endif
|
||||
|
||||
ifneq "$(findstring ccache, $(MAKECMDGOALS))" ""
|
||||
CMAKE_CONFIG_ARGS:=-DWITH_COMPILER_CCACHE=YES $(CMAKE_CONFIG_ARGS)
|
||||
endif
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# build tool
|
||||
|
||||
@@ -358,7 +340,6 @@ headless: all
|
||||
bpy: all
|
||||
developer: all
|
||||
ninja: all
|
||||
ccache: all
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Build dependencies
|
||||
@@ -533,7 +514,7 @@ format: .FORCE
|
||||
|
||||
# Simple version of ./doc/python_api/sphinx_doc_gen.sh with no PDF generation.
|
||||
doc_py: .FORCE
|
||||
ASAN_OPTIONS=halt_on_error=0:${ASAN_OPTIONS} \
|
||||
ASAN_OPTIONS=halt_on_error=0 \
|
||||
$(BLENDER_BIN) --background -noaudio --factory-startup \
|
||||
--python doc/python_api/sphinx_doc_gen.py
|
||||
sphinx-build -b html -j $(NPROCS) doc/python_api/sphinx-in doc/python_api/sphinx-out
|
||||
|
@@ -31,7 +31,6 @@
|
||||
#
|
||||
# MAC OS X USAGE:
|
||||
# Install with homebrew: brew install autoconf automake bison cmake libtool pkg-config yasm
|
||||
# Additional requirements for macOS arm64: brew install flex
|
||||
# Run "make deps" from main Blender directory
|
||||
#
|
||||
# LINUX USAGE:
|
||||
@@ -71,7 +70,9 @@ include(cmake/cuew.cmake)
|
||||
include(cmake/opensubdiv.cmake)
|
||||
include(cmake/sdl.cmake)
|
||||
include(cmake/opencollada.cmake)
|
||||
include(cmake/opencolorio.cmake)
|
||||
include(cmake/llvm.cmake)
|
||||
include(cmake/clang.cmake)
|
||||
if(APPLE)
|
||||
include(cmake/openmp.cmake)
|
||||
endif()
|
||||
@@ -86,40 +87,35 @@ include(cmake/tbb.cmake)
|
||||
include(cmake/openvdb.cmake)
|
||||
include(cmake/nanovdb.cmake)
|
||||
include(cmake/python.cmake)
|
||||
option(USE_PIP_NUMPY "Install NumPy using pip wheel instead of building from source" OFF)
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
set(USE_PIP_NUMPY ON)
|
||||
else()
|
||||
include(cmake/numpy.cmake)
|
||||
endif()
|
||||
include(cmake/python_site_packages.cmake)
|
||||
include(cmake/package_python.cmake)
|
||||
include(cmake/numpy.cmake)
|
||||
include(cmake/usd.cmake)
|
||||
include(cmake/potrace.cmake)
|
||||
include(cmake/haru.cmake)
|
||||
# Boost needs to be included after python.cmake due to the PYTHON_BINARY variable being needed.
|
||||
include(cmake/boost.cmake)
|
||||
include(cmake/pugixml.cmake)
|
||||
include(cmake/ispc.cmake)
|
||||
include(cmake/openimagedenoise.cmake)
|
||||
include(cmake/embree.cmake)
|
||||
if((NOT APPLE) OR ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
include(cmake/ispc.cmake)
|
||||
include(cmake/openimagedenoise.cmake)
|
||||
include(cmake/embree.cmake)
|
||||
endif()
|
||||
if(NOT APPLE)
|
||||
include(cmake/xr_openxr.cmake)
|
||||
endif()
|
||||
|
||||
# OpenColorIO and dependencies.
|
||||
include(cmake/expat.cmake)
|
||||
include(cmake/yamlcpp.cmake)
|
||||
include(cmake/opencolorio.cmake)
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
include(cmake/sse2neon.cmake)
|
||||
endif()
|
||||
|
||||
if(WITH_WEBP)
|
||||
include(cmake/webp.cmake)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
# OCIO deps
|
||||
include(cmake/tinyxml.cmake)
|
||||
include(cmake/yamlcpp.cmake)
|
||||
# LCMS is an OCIO dep, but only if you build the apps, leaving it here for convenience
|
||||
# include(cmake/lcms.cmake)
|
||||
endif()
|
||||
|
||||
if(NOT WIN32 OR ENABLE_MINGW64)
|
||||
include(cmake/gmp.cmake)
|
||||
include(cmake/openjpeg.cmake)
|
||||
|
@@ -19,6 +19,16 @@
|
||||
set(ALEMBIC_EXTRA_ARGS
|
||||
-DBUILDSTATIC=ON
|
||||
-DLINKSTATIC=ON
|
||||
-DALEMBIC_LIB_USES_BOOST=ON
|
||||
-DBoost_COMPILER:STRING=${BOOST_COMPILER_STRING}
|
||||
-DBoost_USE_MULTITHREADED=ON
|
||||
-DUSE_STATIC_BOOST=On
|
||||
-DBoost_USE_STATIC_LIBS=ON
|
||||
-DBoost_USE_STATIC_RUNTIME=OFF
|
||||
-DBoost_DEBUG=ON
|
||||
-DBOOST_ROOT=${LIBDIR}/boost
|
||||
-DBoost_NO_SYSTEM_PATHS=ON
|
||||
-DBoost_NO_BOOST_CMAKE=ON
|
||||
-DILMBASE_ROOT=${LIBDIR}/openexr
|
||||
-DALEMBIC_ILMBASE_INCLUDE_DIRECTORY=${LIBDIR}/openexr/include/OpenEXR
|
||||
-DALEMBIC_ILMBASE_HALF_LIB=${LIBDIR}/openexr/lib/${LIBPREFIX}Half${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
@@ -71,6 +81,7 @@ endif()
|
||||
|
||||
add_dependencies(
|
||||
external_alembic
|
||||
external_boost
|
||||
external_zlib
|
||||
external_openexr
|
||||
)
|
||||
|
@@ -23,7 +23,7 @@ if(WIN32)
|
||||
set(BOOST_COMPILER_STRING -vc141)
|
||||
|
||||
set(BOOST_CONFIGURE_COMMAND bootstrap.bat)
|
||||
set(BOOST_BUILD_COMMAND b2)
|
||||
set(BOOST_BUILD_COMMAND bjam)
|
||||
set(BOOST_BUILD_OPTIONS runtime-link=shared )
|
||||
set(BOOST_HARVEST_CMD ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/boost/lib/ ${HARVEST_TARGET}/boost/lib/ )
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
|
@@ -33,16 +33,6 @@ if(UNIX)
|
||||
yasm
|
||||
)
|
||||
|
||||
if(NOT APPLE)
|
||||
set(_required_software
|
||||
${_required_software}
|
||||
|
||||
# Needed for Mesa.
|
||||
meson
|
||||
ninja
|
||||
)
|
||||
endif()
|
||||
|
||||
foreach(_software ${_required_software})
|
||||
find_program(_software_find NAMES ${_software})
|
||||
if(NOT _software_find)
|
||||
@@ -70,7 +60,7 @@ if(UNIX)
|
||||
" ${_software_missing}\n"
|
||||
"\n"
|
||||
"On Debian and Ubuntu:\n"
|
||||
" apt install autoconf automake libtool yasm tcl ninja-build meson python3-mako\n"
|
||||
" apt install autoconf automake libtool yasm tcl\n"
|
||||
"\n"
|
||||
"On macOS (with homebrew):\n"
|
||||
" brew install autoconf automake bison libtool pkg-config yasm\n"
|
||||
|
74
build_files/build_environment/cmake/clang.cmake
Normal file
74
build_files/build_environment/cmake/clang.cmake
Normal file
@@ -0,0 +1,74 @@
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(CLANG_EXTRA_ARGS
|
||||
-DCLANG_PATH_TO_LLVM_SOURCE=${BUILD_DIR}/ll/src/ll
|
||||
-DCLANG_PATH_TO_LLVM_BUILD=${LIBDIR}/llvm
|
||||
-DLLVM_USE_CRT_RELEASE=MD
|
||||
-DLLVM_USE_CRT_DEBUG=MDd
|
||||
-DLLVM_CONFIG=${LIBDIR}/llvm/bin/llvm-config
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
set(CLANG_GENERATOR "Ninja")
|
||||
else()
|
||||
set(CLANG_GENERATOR "Unix Makefiles")
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
set(CLANG_EXTRA_ARGS ${CLANG_EXTRA_ARGS}
|
||||
-DLIBXML2_LIBRARY=${LIBDIR}/xml2/lib/libxml2.a
|
||||
)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_clang
|
||||
URL ${CLANG_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${CLANG_HASH}
|
||||
PREFIX ${BUILD_DIR}/clang
|
||||
CMAKE_GENERATOR ${CLANG_GENERATOR}
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/clang ${DEFAULT_CMAKE_FLAGS} ${CLANG_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/clang
|
||||
)
|
||||
|
||||
if(MSVC)
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
set(CLANG_HARVEST_COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/clang/ ${HARVEST_TARGET}/llvm/)
|
||||
else()
|
||||
set(CLANG_HARVEST_COMMAND
|
||||
${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/clang/lib/ ${HARVEST_TARGET}/llvm/debug/lib/
|
||||
)
|
||||
endif()
|
||||
ExternalProject_Add_Step(external_clang after_install
|
||||
COMMAND ${CLANG_HARVEST_COMMAND}
|
||||
DEPENDEES mkdir update patch download configure build install
|
||||
)
|
||||
endif()
|
||||
|
||||
add_dependencies(
|
||||
external_clang
|
||||
ll
|
||||
)
|
||||
|
||||
# We currently do not build libxml2 on Windows.
|
||||
if(NOT WIN32)
|
||||
add_dependencies(
|
||||
external_clang
|
||||
external_xml2
|
||||
)
|
||||
endif()
|
@@ -29,7 +29,6 @@ set(EMBREE_EXTRA_ARGS
|
||||
-DEMBREE_MAX_ISA=AVX2
|
||||
-DEMBREE_TASKING_SYSTEM=TBB
|
||||
-DEMBREE_TBB_ROOT=${LIBDIR}/tbb
|
||||
-DTBB_ROOT=${LIBDIR}/tbb
|
||||
-DTBB_STATIC_LIB=${TBB_STATIC_LIBRARY}
|
||||
)
|
||||
|
||||
@@ -47,26 +46,15 @@ else()
|
||||
set(EMBREE_BUILD_DIR)
|
||||
endif()
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
ExternalProject_Add(external_embree
|
||||
GIT_REPOSITORY ${EMBREE_ARM_GIT}
|
||||
GIT_TAG "blender-arm"
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
PREFIX ${BUILD_DIR}/embree
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/embree
|
||||
)
|
||||
else()
|
||||
ExternalProject_Add(external_embree
|
||||
URL ${EMBREE_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${EMBREE_HASH}
|
||||
PREFIX ${BUILD_DIR}/embree
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/embree/src/external_embree < ${PATCH_DIR}/embree.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/embree
|
||||
)
|
||||
endif()
|
||||
ExternalProject_Add(external_embree
|
||||
URL ${EMBREE_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${EMBREE_HASH}
|
||||
PREFIX ${BUILD_DIR}/embree
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/embree/src/external_embree < ${PATCH_DIR}/embree.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/embree
|
||||
)
|
||||
|
||||
add_dependencies(
|
||||
external_embree
|
||||
|
@@ -1,46 +0,0 @@
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(HARU_EXTRA_ARGS
|
||||
-DLIBHPDF_SHARED=OFF
|
||||
-DLIBHPDF_STATIC=ON
|
||||
-DLIBHPDF_EXAMPLES=OFF
|
||||
-DLIBHPDF_ENABLE_EXCEPTIONS=ON
|
||||
)
|
||||
|
||||
ExternalProject_Add(external_haru
|
||||
URL ${HARU_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${HARU_HASH}
|
||||
PREFIX ${BUILD_DIR}/haru
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/haru/src/external_haru < ${PATCH_DIR}/haru.diff
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_INSTALL_PREFIX=${LIBDIR}/haru
|
||||
${DEFAULT_CMAKE_FLAGS} ${HARU_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/haru
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
ExternalProject_Add_Step(external_haru after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/haru/include ${HARVEST_TARGET}/haru/include
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/haru/lib/libhpdfs.lib ${HARVEST_TARGET}/haru/lib/libhpdfs.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
endif()
|
@@ -97,21 +97,14 @@ harvest(jemalloc/lib jemalloc/lib "*.a")
|
||||
harvest(jpg/include jpeg/include "*.h")
|
||||
harvest(jpg/lib jpeg/lib "libjpeg.a")
|
||||
harvest(lame/lib ffmpeg/lib "*.a")
|
||||
harvest(llvm/bin llvm/bin "clang-format")
|
||||
if(BUILD_CLANG_TOOLS)
|
||||
harvest(llvm/bin llvm/bin "clang-tidy")
|
||||
harvest(llvm/share/clang llvm/share "run-clang-tidy.py")
|
||||
endif()
|
||||
harvest(clang/bin llvm/bin "clang-format")
|
||||
harvest(clang/include llvm/include "*")
|
||||
harvest(llvm/include llvm/include "*")
|
||||
harvest(llvm/bin llvm/bin "llvm-config")
|
||||
harvest(llvm/lib llvm/lib "libLLVM*.a")
|
||||
harvest(llvm/lib llvm/lib "libclang*.a")
|
||||
if(APPLE)
|
||||
harvest(openmp/lib openmp/lib "*")
|
||||
harvest(openmp/include openmp/include "*.h")
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
harvest(sse2neon sse2neon "*.h")
|
||||
endif()
|
||||
endif()
|
||||
harvest(ogg/lib ffmpeg/lib "*.a")
|
||||
harvest(openal/include openal/include "*.h")
|
||||
@@ -142,10 +135,12 @@ harvest(openimageio/bin openimageio/bin "maketx")
|
||||
harvest(openimageio/bin openimageio/bin "oiiotool")
|
||||
harvest(openimageio/include openimageio/include "*")
|
||||
harvest(openimageio/lib openimageio/lib "*.a")
|
||||
harvest(openimagedenoise/include openimagedenoise/include "*")
|
||||
harvest(openimagedenoise/lib openimagedenoise/lib "*.a")
|
||||
harvest(embree/include embree/include "*.h")
|
||||
harvest(embree/lib embree/lib "*.a")
|
||||
if((NOT APPLE) OR ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
harvest(openimagedenoise/include openimagedenoise/include "*")
|
||||
harvest(openimagedenoise/lib openimagedenoise/lib "*.a")
|
||||
harvest(embree/include embree/include "*.h")
|
||||
harvest(embree/lib embree/lib "*.a")
|
||||
endif()
|
||||
harvest(openjpeg/include/openjpeg-2.3 openjpeg/include "*.h")
|
||||
harvest(openjpeg/lib openjpeg/lib "*.a")
|
||||
harvest(opensubdiv/include opensubdiv/include "*.h")
|
||||
@@ -158,12 +153,12 @@ harvest(xr_openxr_sdk/lib xr_openxr_sdk/lib "*.a")
|
||||
harvest(osl/bin osl/bin "oslc")
|
||||
harvest(osl/include osl/include "*.h")
|
||||
harvest(osl/lib osl/lib "*.a")
|
||||
harvest(osl/share/OSL/shaders osl/share/OSL/shaders "*.h")
|
||||
harvest(osl/shaders osl/shaders "*.h")
|
||||
harvest(png/include png/include "*.h")
|
||||
harvest(png/lib png/lib "*.a")
|
||||
harvest(pugixml/include pugixml/include "*.hpp")
|
||||
harvest(pugixml/lib pugixml/lib "*.a")
|
||||
harvest(python/bin python/bin "python${PYTHON_SHORT_VERSION}")
|
||||
harvest(python/bin python/bin "python${PYTHON_SHORT_VERSION}m")
|
||||
harvest(python/include python/include "*h")
|
||||
harvest(python/lib python/lib "*")
|
||||
harvest(sdl/include/SDL2 sdl/include "*.h")
|
||||
@@ -188,12 +183,10 @@ harvest(usd/lib/usd usd/lib/usd "*")
|
||||
harvest(usd/plugin usd/plugin "*")
|
||||
harvest(potrace/include potrace/include "*.h")
|
||||
harvest(potrace/lib potrace/lib "*.a")
|
||||
harvest(haru/include haru/include "*.h")
|
||||
harvest(haru/lib haru/lib "*.a")
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
harvest(libglu/lib mesa/lib "*.so*")
|
||||
harvest(mesa/lib64 mesa/lib "*.so*")
|
||||
harvest(mesa/lib mesa/lib "*.so*")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
@@ -21,42 +21,37 @@ if(WIN32)
|
||||
-DFLEX_EXECUTABLE=${LIBDIR}/flexbison/win_flex.exe
|
||||
-DBISON_EXECUTABLE=${LIBDIR}/flexbison/win_bison.exe
|
||||
-DM4_EXECUTABLE=${DOWNLOAD_DIR}/mingw/mingw64/msys/1.0/bin/m4.exe
|
||||
-DARM_ENABLED=Off
|
||||
)
|
||||
elseif(APPLE)
|
||||
# Use bison and flex installed via Homebrew.
|
||||
# The ones that come with Xcode toolset are too old.
|
||||
# Use bison installed via Homebrew.
|
||||
# The one which comes which Xcode toolset is too old.
|
||||
if("${CMAKE_HOST_SYSTEM_PROCESSOR}" STREQUAL "arm64")
|
||||
set(ISPC_EXTRA_ARGS_APPLE
|
||||
-DBISON_EXECUTABLE=/opt/homebrew/opt/bison/bin/bison
|
||||
-DFLEX_EXECUTABLE=/opt/homebrew/opt/flex/bin/flex
|
||||
-DARM_ENABLED=On
|
||||
)
|
||||
set(HOMEBREW_LOCATION "/opt/homebrew")
|
||||
else()
|
||||
set(ISPC_EXTRA_ARGS_APPLE
|
||||
-DBISON_EXECUTABLE=/usr/local/opt/bison/bin/bison
|
||||
-DARM_ENABLED=Off
|
||||
)
|
||||
set(HOMEBREW_LOCATION "/usr/local")
|
||||
endif()
|
||||
set(ISPC_EXTRA_ARGS_APPLE
|
||||
-DBISON_EXECUTABLE=${HOMEBREW_LOCATION}/opt/bison/bin/bison
|
||||
)
|
||||
elseif(UNIX)
|
||||
set(ISPC_EXTRA_ARGS_UNIX
|
||||
-DCMAKE_C_COMPILER=${LIBDIR}/llvm/bin/clang
|
||||
-DCMAKE_CXX_COMPILER=${LIBDIR}/llvm/bin/clang++
|
||||
-DARM_ENABLED=Off
|
||||
-DCMAKE_C_COMPILER=${LIBDIR}/clang/bin/clang
|
||||
-DCMAKE_CXX_COMPILER=${LIBDIR}/clang/bin/clang++
|
||||
)
|
||||
endif()
|
||||
|
||||
set(ISPC_EXTRA_ARGS
|
||||
-DARM_ENABLED=Off
|
||||
-DISPC_NO_DUMPS=On
|
||||
-DISPC_INCLUDE_EXAMPLES=Off
|
||||
-DISPC_INCLUDE_TESTS=Off
|
||||
-DLLVM_ROOT=${LIBDIR}/llvm/lib/cmake/llvm
|
||||
-DLLVM_LIBRARY_DIR=${LIBDIR}/llvm/lib
|
||||
-DCLANG_EXECUTABLE=${LIBDIR}/llvm/bin/clang
|
||||
-DCLANGPP_EXECUTABLE=${LIBDIR}/llvm/bin/clang++
|
||||
-DCLANG_EXECUTABLE=${LIBDIR}/clang/bin/clang
|
||||
-DCLANGPP_EXECUTABLE=${LIBDIR}/clang/bin/clang++
|
||||
-DISPC_INCLUDE_TESTS=Off
|
||||
-DCLANG_LIBRARY_DIR=${LIBDIR}/llvm/lib
|
||||
-DCLANG_INCLUDE_DIRS=${LIBDIR}/llvm/include
|
||||
-DCLANG_LIBRARY_DIR=${LIBDIR}/clang/lib
|
||||
-DCLANG_INCLUDE_DIRS=${LIBDIR}/clang/include
|
||||
${ISPC_EXTRA_ARGS_WIN}
|
||||
${ISPC_EXTRA_ARGS_APPLE}
|
||||
${ISPC_EXTRA_ARGS_UNIX}
|
||||
@@ -75,6 +70,7 @@ ExternalProject_Add(external_ispc
|
||||
add_dependencies(
|
||||
external_ispc
|
||||
ll
|
||||
external_clang
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
@@ -16,20 +16,16 @@
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(EXPAT_EXTRA_ARGS
|
||||
-DEXPAT_BUILD_DOCS=OFF
|
||||
-DEXPAT_BUILD_EXAMPLES=OFF
|
||||
-DEXPAT_BUILD_TESTS=OFF
|
||||
-DEXPAT_BUILD_TOOLS=OFF
|
||||
-DEXPAT_SHARED_LIBS=OFF
|
||||
set(LCMS_EXTRA_ARGS
|
||||
)
|
||||
|
||||
ExternalProject_Add(external_expat
|
||||
URL ${EXPAT_URI}
|
||||
ExternalProject_Add(external_lcms
|
||||
URL ${LCMS_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${EXPAT_HASH}
|
||||
PREFIX ${BUILD_DIR}/expat
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/expat ${DEFAULT_CMAKE_FLAGS} ${EXPAT_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/expat
|
||||
SOURCE_SUBDIR expat
|
||||
URL_HASH MD5=${LCMS_HASH}
|
||||
PREFIX ${BUILD_DIR}/lcms
|
||||
# Patch taken from ocio.
|
||||
PATCH_COMMAND ${CMAKE_COMMAND} -E copy ${PATCH_DIR}/cmakelists_lcms.txt ${BUILD_DIR}/lcms/src/external_lcms/CMakeLists.txt
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/lcms ${DEFAULT_CMAKE_FLAGS} ${LCMS_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/lcms
|
||||
)
|
@@ -17,20 +17,11 @@
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
if(APPLE AND "${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
set(LLVM_TARGETS AArch64$<SEMICOLON>ARM)
|
||||
set(LLVM_TARGETS AArch64)
|
||||
else()
|
||||
set(LLVM_TARGETS X86)
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
set(LLVM_XML2_ARGS
|
||||
-DLIBXML2_LIBRARY=${LIBDIR}/xml2/lib/libxml2.a
|
||||
)
|
||||
set(LLVM_BUILD_CLANG_TOOLS_EXTRA ^^clang-tools-extra)
|
||||
set(BUILD_CLANG_TOOLS ON)
|
||||
endif()
|
||||
|
||||
|
||||
set(LLVM_EXTRA_ARGS
|
||||
-DLLVM_USE_CRT_RELEASE=MD
|
||||
-DLLVM_USE_CRT_DEBUG=MDd
|
||||
@@ -40,8 +31,6 @@ set(LLVM_EXTRA_ARGS
|
||||
-DLLVM_ENABLE_TERMINFO=OFF
|
||||
-DLLVM_BUILD_LLVM_C_DYLIB=OFF
|
||||
-DLLVM_ENABLE_UNWIND_TABLES=OFF
|
||||
-DLLVM_ENABLE_PROJECTS=clang${LLVM_BUILD_CLANG_TOOLS_EXTRA}
|
||||
${LLVM_XML2_ARGS}
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
@@ -56,9 +45,7 @@ ExternalProject_Add(ll
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${LLVM_HASH}
|
||||
CMAKE_GENERATOR ${LLVM_GENERATOR}
|
||||
LIST_SEPARATOR ^^
|
||||
PREFIX ${BUILD_DIR}/ll
|
||||
SOURCE_SUBDIR llvm
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/ll/src/ll < ${PATCH_DIR}/llvm.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/llvm ${DEFAULT_CMAKE_FLAGS} ${LLVM_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/llvm
|
||||
@@ -78,11 +65,3 @@ if(MSVC)
|
||||
DEPENDEES mkdir update patch download configure build install
|
||||
)
|
||||
endif()
|
||||
|
||||
# We currently do not build libxml2 on Windows.
|
||||
if(APPLE)
|
||||
add_dependencies(
|
||||
ll
|
||||
external_xml2
|
||||
)
|
||||
endif()
|
||||
|
@@ -20,36 +20,19 @@ set(MESA_CFLAGS "-static-libgcc")
|
||||
set(MESA_CXXFLAGS "-static-libgcc -static-libstdc++ -Bstatic -lstdc++ -Bdynamic -l:libstdc++.a")
|
||||
set(MESA_LDFLAGS "-L${LIBDIR}/zlib/lib -pthread -static-libgcc -static-libstdc++ -Bstatic -lstdc++ -Bdynamic -l:libstdc++.a -l:libz_pic.a")
|
||||
|
||||
# The 'native-file', used for overrides with the meson build system.
|
||||
# meson does not provide a way to do this using command line arguments.
|
||||
#
|
||||
# Note that we can't output to "${BUILD_DIR}/mesa/src/external_mesa" as
|
||||
# it doesn't exist when CMake first executes.
|
||||
file(WRITE ${BUILD_DIR}/mesa/tmp/native-file.ini "\
|
||||
[binaries]
|
||||
llvm-config = '${LIBDIR}/llvm/bin/llvm-config'"
|
||||
)
|
||||
|
||||
set(MESA_EXTRA_FLAGS
|
||||
-Dbuildtype=release
|
||||
-Dc_args=${MESA_CFLAGS}
|
||||
-Dcpp_args=${MESA_CXXFLAGS}
|
||||
-Dc_link_args=${MESA_LDFLAGS}
|
||||
-Dcpp_link_args=${MESA_LDFLAGS}
|
||||
-Dglx=gallium-xlib
|
||||
-Dgallium-drivers=swrast
|
||||
-Ddri-drivers=
|
||||
-Dvulkan-drivers=
|
||||
-Dgbm=disabled
|
||||
-Degl=disabled
|
||||
-Dgles1=disabled
|
||||
-Dgles2=disabled
|
||||
-Dshared-llvm=disabled
|
||||
# Without this, the build fails when: `wayland-scanner` is not found.
|
||||
# At some point we will likely want to support Wayland.
|
||||
# Disable for now since it's not officially supported.
|
||||
-Dplatforms=x11
|
||||
--native-file ${BUILD_DIR}/mesa/tmp/native-file.ini
|
||||
CFLAGS=${MESA_CFLAGS}
|
||||
CXXFLAGS=${MESA_CXXFLAGS}
|
||||
LDFLAGS=${MESA_LDFLAGS}
|
||||
--enable-glx=gallium-xlib
|
||||
--with-gallium-drivers=swrast
|
||||
--disable-dri
|
||||
--disable-gbm
|
||||
--disable-egl
|
||||
--disable-gles1
|
||||
--disable-gles2
|
||||
--disable-llvm-shared-libs
|
||||
--with-llvm-prefix=${LIBDIR}/llvm
|
||||
)
|
||||
|
||||
ExternalProject_Add(external_mesa
|
||||
@@ -59,9 +42,9 @@ ExternalProject_Add(external_mesa
|
||||
PREFIX ${BUILD_DIR}/mesa
|
||||
CONFIGURE_COMMAND ${CONFIGURE_ENV} &&
|
||||
cd ${BUILD_DIR}/mesa/src/external_mesa/ &&
|
||||
meson ${BUILD_DIR}/mesa/src/external_mesa-build --prefix=${LIBDIR}/mesa ${MESA_EXTRA_FLAGS}
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa-build && ninja -j${MAKE_THREADS}
|
||||
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa-build && ninja install
|
||||
${CONFIGURE_COMMAND_NO_TARGET} --prefix=${LIBDIR}/mesa ${MESA_EXTRA_FLAGS}
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa/ && make -j${MAKE_THREADS}
|
||||
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/mesa/src/external_mesa/ && make install
|
||||
INSTALL_DIR ${LIBDIR}/mesa
|
||||
)
|
||||
|
||||
|
@@ -22,17 +22,8 @@ ExternalProject_Add(external_nasm
|
||||
URL_HASH SHA256=${NASM_HASH}
|
||||
PREFIX ${BUILD_DIR}/nasm
|
||||
PATCH_COMMAND ${PATCH_CMD} --verbose -p 1 -N -d ${BUILD_DIR}/nasm/src/external_nasm < ${PATCH_DIR}/nasm.diff
|
||||
CONFIGURE_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && ./autogen.sh && ${CONFIGURE_COMMAND} --prefix=${LIBDIR}/nasm
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make -j${MAKE_THREADS} && make manpages
|
||||
CONFIGURE_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && ${CONFIGURE_COMMAND} --prefix=${LIBDIR}/nasm
|
||||
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make -j${MAKE_THREADS}
|
||||
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/nasm/src/external_nasm/ && make install
|
||||
INSTALL_DIR ${LIBDIR}/nasm
|
||||
)
|
||||
|
||||
if(UNIX)
|
||||
# `touch nasm.1 ndisasm.1` helps to create the manual pages files, even when
|
||||
# local `asciidoc` and `xmlto` packages are not installed.
|
||||
ExternalProject_Add_Step(external_nasm after_configure
|
||||
COMMAND ${CMAKE_COMMAND} -E touch ${BUILD_DIR}/nasm/src/external_nasm/nasm.1 ${BUILD_DIR}/nasm/src/external_nasm/ndisasm.1
|
||||
DEPENDEES configure
|
||||
)
|
||||
endif()
|
||||
|
@@ -47,5 +47,4 @@ ExternalProject_Add(external_numpy
|
||||
add_dependencies(
|
||||
external_numpy
|
||||
external_python
|
||||
external_python_site_packages
|
||||
)
|
||||
|
@@ -18,37 +18,50 @@
|
||||
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
-DOCIO_BUILD_APPS=OFF
|
||||
-DOCIO_BUILD_PYTHON=OFF
|
||||
-DOCIO_BUILD_PYGLUE=OFF
|
||||
-DOCIO_BUILD_NUKE=OFF
|
||||
-DOCIO_BUILD_JAVA=OFF
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DOCIO_USE_BOOST_PTR=OFF
|
||||
-DOCIO_BUILD_STATIC=ON
|
||||
-DOCIO_BUILD_SHARED=OFF
|
||||
-DOCIO_BUILD_TRUELIGHT=OFF
|
||||
-DOCIO_BUILD_DOCS=OFF
|
||||
-DOCIO_BUILD_TESTS=OFF
|
||||
-DOCIO_BUILD_GPU_TESTS=OFF
|
||||
-DOCIO_USE_SSE=ON
|
||||
|
||||
# Manually build ext packages except for pystring, which does not have
|
||||
# a CMake or autotools build system that we can easily use.
|
||||
-DOCIO_INSTALL_EXT_PACKAGES=MISSING
|
||||
-DHalf_ROOT=${LIBDIR}/openexr
|
||||
-DHalf_STATIC_LIBRARY=ON
|
||||
-Dexpat_ROOT=${LIBDIR}/expat
|
||||
-Dyaml-cpp_ROOT=${LIBDIR}/yamlcpp
|
||||
-DOCIO_BUILD_PYGLUE=OFF
|
||||
-DOCIO_BUILD_JNIGLUE=OFF
|
||||
-DOCIO_STATIC_JNIGLUE=OFF
|
||||
)
|
||||
|
||||
if(APPLE AND NOT("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
-DOCIO_USE_SSE=OFF
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
-DOCIO_USE_SSE=OFF
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
set(OCIO_PATCH opencolorio_win.diff)
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
-DOCIO_BUILD_TESTS=OFF
|
||||
-DOCIO_USE_SSE=ON
|
||||
-DOCIO_INLINES_HIDDEN=OFF
|
||||
-DOCIO_PYGLUE_LINK=OFF
|
||||
-DOCIO_PYGLUE_RESPECT_ABI=OFF
|
||||
-DOCIO_PYGLUE_SONAME=OFF
|
||||
-DOCIO_PYGLUE_LIB_PREFIX=OFF
|
||||
-DUSE_EXTERNAL_TINYXML=ON
|
||||
-DTINYXML_INCLUDE_DIR=${LIBDIR}/tinyxml/include
|
||||
-DTINYXML_LIBRARY=${LIBDIR}/tinyxml/lib/tinyxml${libext}
|
||||
-DUSE_EXTERNAL_YAML=ON
|
||||
-DYAML_CPP_FOUND=ON
|
||||
-DYAML_CPP_VERSION=${YAMLCPP_VERSION}
|
||||
-DUSE_EXTERNAL_LCMS=ON
|
||||
-DINC_1=${LIBDIR}/tinyxml/include
|
||||
-DINC_2=${LIBDIR}/yamlcpp/include
|
||||
# Lie because ocio cmake is demanding boost even though it is not needed.
|
||||
-DYAML_CPP_VERSION=0.5.0
|
||||
)
|
||||
else()
|
||||
set(OCIO_PATCH opencolorio.diff)
|
||||
set(OPENCOLORIO_EXTRA_ARGS
|
||||
${OPENCOLORIO_EXTRA_ARGS}
|
||||
)
|
||||
@@ -59,43 +72,48 @@ ExternalProject_Add(external_opencolorio
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${OPENCOLORIO_HASH}
|
||||
PREFIX ${BUILD_DIR}/opencolorio
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/opencolorio/src/external_opencolorio < ${PATCH_DIR}/opencolorio.diff
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/opencolorio/src/external_opencolorio < ${PATCH_DIR}/${OCIO_PATCH}
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/opencolorio ${DEFAULT_CMAKE_FLAGS} ${OPENCOLORIO_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/opencolorio
|
||||
)
|
||||
|
||||
if(NOT WIN32)
|
||||
add_custom_command(
|
||||
OUTPUT ${LIBDIR}/opencolorio/lib/libtinyxml.a
|
||||
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libtinyxml.a ${LIBDIR}/opencolorio/lib/libtinyxml.a
|
||||
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libyaml-cpp.a ${LIBDIR}/opencolorio/lib/libyaml-cpp.a
|
||||
)
|
||||
add_custom_target(external_opencolorio_extra ALL DEPENDS external_opencolorio ${LIBDIR}/opencolorio/lib/libtinyxml.a)
|
||||
endif()
|
||||
|
||||
add_dependencies(
|
||||
external_opencolorio
|
||||
external_yamlcpp
|
||||
external_expat
|
||||
external_openexr
|
||||
external_boost
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
add_dependencies(
|
||||
external_opencolorio
|
||||
external_tinyxml
|
||||
external_yamlcpp
|
||||
|
||||
)
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
ExternalProject_Add_Step(external_opencolorio after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/include ${HARVEST_TARGET}/opencolorio/include
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/lib ${HARVEST_TARGET}/opencolorio/lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/opencolorio/lib/static ${HARVEST_TARGET}/opencolorio/lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/yamlcpp/lib/libyaml-cppmd.lib ${HARVEST_TARGET}/opencolorio/lib/libyaml-cpp.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/expat/lib/libexpatMD.lib ${HARVEST_TARGET}/opencolorio/lib/libexpatMD.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/pystring.lib ${HARVEST_TARGET}/opencolorio/lib/pystring.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tinyxml/lib/tinyxml.lib ${HARVEST_TARGET}/opencolorio/lib/tinyxml.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
if(BUILD_MODE STREQUAL Debug)
|
||||
ExternalProject_Add_Step(external_opencolorio after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/opencolorio/lib/Opencolorio.lib ${HARVEST_TARGET}/opencolorio/lib/OpencolorIO_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/opencolorio/lib/static/Opencolorio.lib ${HARVEST_TARGET}/opencolorio/lib/OpencolorIO_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/yamlcpp/lib/libyaml-cppmdd.lib ${HARVEST_TARGET}/opencolorio/lib/libyaml-cpp_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/expat/lib/libexpatdMD.lib ${HARVEST_TARGET}/opencolorio/lib/libexpatdMD.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/pystring.lib ${HARVEST_TARGET}/opencolorio/lib/pystring_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tinyxml/lib/tinyxml.lib ${HARVEST_TARGET}/opencolorio/lib/tinyxml_d.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
else()
|
||||
ExternalProject_Add_Step(external_opencolorio after_install
|
||||
COMMAND cp ${LIBDIR}/yamlcpp/lib/libyaml-cpp.a ${LIBDIR}/opencolorio/lib/
|
||||
COMMAND cp ${LIBDIR}/expat/lib/libexpat.a ${LIBDIR}/opencolorio/lib/
|
||||
COMMAND cp ${BUILD_DIR}/opencolorio/src/external_opencolorio-build/ext/dist/lib/libpystring.a ${LIBDIR}/opencolorio/lib/
|
||||
DEPENDEES install
|
||||
)
|
||||
|
||||
endif()
|
||||
|
@@ -45,7 +45,6 @@ ExternalProject_Add(external_openimagedenoise
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${OIDN_HASH}
|
||||
PREFIX ${BUILD_DIR}/openimagedenoise
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/openimagedenoise/src/external_openimagedenoise < ${PATCH_DIR}/oidn.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/openimagedenoise ${DEFAULT_CMAKE_FLAGS} ${OIDN_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/openimagedenoise
|
||||
)
|
||||
|
@@ -30,5 +30,5 @@ ExternalProject_Add(external_openmp
|
||||
|
||||
add_dependencies(
|
||||
external_openmp
|
||||
ll
|
||||
external_clang
|
||||
)
|
||||
|
@@ -54,8 +54,6 @@ set(OPENVDB_EXTRA_ARGS
|
||||
-DOPENVDB_CORE_STATIC=${OPENVDB_STATIC}
|
||||
-DOPENVDB_BUILD_BINARIES=Off
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
-DILMBASE_USE_STATIC_LIBS=On
|
||||
-DOPENEXR_USE_STATIC_LIBS=On
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
@@ -19,7 +19,12 @@
|
||||
if(WIN32)
|
||||
set(OSL_CMAKE_CXX_STANDARD_LIBRARIES "kernel32${LIBEXT} user32${LIBEXT} gdi32${LIBEXT} winspool${LIBEXT} shell32${LIBEXT} ole32${LIBEXT} oleaut32${LIBEXT} uuid${LIBEXT} comdlg32${LIBEXT} advapi32${LIBEXT} psapi${LIBEXT}")
|
||||
set(OSL_FLEX_BISON -DFLEX_EXECUTABLE=${LIBDIR}/flexbison/win_flex.exe -DBISON_EXECUTABLE=${LIBDIR}/flexbison/win_bison.exe)
|
||||
set(OSL_SIMD_FLAGS -DOIIO_NOSIMD=1 -DOIIO_SIMD=sse2)
|
||||
set(OSL_OPENIMAGEIO_LIBRARY "${LIBDIR}/openimageio/lib/${LIBPREFIX}OpenImageIO${LIBEXT};${LIBDIR}/openimageio/lib/${LIBPREFIX}OpenImageIO_Util${LIBEXT};${LIBDIR}/png/lib/libpng16${LIBEXT};${LIBDIR}/jpg/lib/${LIBPREFIX}jpeg${LIBEXT};${LIBDIR}/tiff/lib/${LIBPREFIX}tiff${LIBEXT};${LIBDIR}/openexr/lib/${LIBPREFIX}IlmImf${OPENEXR_VERSION_POSTFIX}${LIBEXT}")
|
||||
if("${CMAKE_SIZEOF_VOID_P}" EQUAL "4")
|
||||
set(OSL_SIMD_FLAGS -DOIIO_NOSIMD=1 -DOIIO_SIMD=0)
|
||||
else()
|
||||
set(OSL_SIMD_FLAGS -DOIIO_NOSIMD=1 -DOIIO_SIMD=sse2)
|
||||
endif()
|
||||
SET(OSL_PLATFORM_FLAGS -DLINKSTATIC=ON)
|
||||
else()
|
||||
set(OSL_CMAKE_CXX_STANDARD_LIBRARIES)
|
||||
@@ -29,6 +34,7 @@ else()
|
||||
endif()
|
||||
|
||||
set(OSL_ILMBASE_CUSTOM_LIBRARIES "${LIBDIR}/openexr/lib/Imath${OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/Half{OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/IlmThread${OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/Iex${OPENEXR_VERSION_POSTFIX}.lib")
|
||||
set(OSL_LLVM_LIBRARY "${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMAnalysis${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMAsmParser${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMAsmPrinter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMBitReader${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMBitWriter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMCodeGen${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMCore${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMDebugInfo${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMExecutionEngine${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMInstCombine${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMInstrumentation${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMInterpreter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMJIT${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMLinker${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMC${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMCDisassembler${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMCJIT${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMMCParser${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMObject${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMRuntimeDyld${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMScalarOpts${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMSelectionDAG${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMSupport${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMTableGen${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMTarget${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMTransformUtils${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMVectorize${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86AsmParser${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86AsmPrinter${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86CodeGen${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Desc${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Disassembler${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Info${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMX86Utils${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMipa${LIBEXT};${LIBDIR}/llvm/lib/${LIBPREFIX}LLVMipo${LIBEXT}")
|
||||
|
||||
set(OSL_EXTRA_ARGS
|
||||
-DBoost_COMPILER:STRING=${BOOST_COMPILER_STRING}
|
||||
@@ -39,8 +45,13 @@ set(OSL_EXTRA_ARGS
|
||||
-DBOOST_LIBRARYDIR=${LIBDIR}/boost/lib/
|
||||
-DBoost_NO_SYSTEM_PATHS=ON
|
||||
-DBoost_NO_BOOST_CMAKE=ON
|
||||
-DOpenEXR_ROOT=${LIBDIR}/openexr/
|
||||
-DIlmBase_ROOT=${LIBDIR}/openexr/
|
||||
-DLLVM_DIRECTORY=${LIBDIR}/llvm
|
||||
-DLLVM_INCLUDES=${LIBDIR}/llvm/include
|
||||
-DLLVM_LIB_DIR=${LIBDIR}/llvm/lib
|
||||
-DLLVM_VERSION=3.4
|
||||
-DLLVM_LIBRARY=${OSL_LLVM_LIBRARY}
|
||||
-DOPENEXR_HOME=${LIBDIR}/openexr/
|
||||
-DILMBASE_HOME=${LIBDIR}/openexr/
|
||||
-DILMBASE_INCLUDE_DIR=${LIBDIR}/openexr/include/
|
||||
-DOPENEXR_HALF_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Half${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
-DOPENEXR_IMATH_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Imath${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
@@ -48,32 +59,35 @@ set(OSL_EXTRA_ARGS
|
||||
-DOPENEXR_IEX_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}Iex${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
-DOPENEXR_INCLUDE_DIR=${LIBDIR}/openexr/include/
|
||||
-DOPENEXR_ILMIMF_LIBRARY=${LIBDIR}/openexr/lib/${LIBPREFIX}IlmImf${OPENEXR_VERSION_POSTFIX}${LIBEXT}
|
||||
-DOpenImageIO_ROOT=${LIBDIR}/openimageio/
|
||||
-DOSL_BUILD_TESTS=OFF
|
||||
-DOSL_BUILD_MATERIALX=OFF
|
||||
-DZLIB_LIBRARY=${LIBDIR}/zlib/lib/${ZLIB_LIBRARY}
|
||||
-DZLIB_INCLUDE_DIR=${LIBDIR}/zlib/include/
|
||||
-DOPENIMAGEIOHOME=${LIBDIR}/openimageio/
|
||||
-DOPENIMAGEIO_INCLUDE_DIR=${LIBDIR}/openimageio/include
|
||||
-DOPENIMAGEIO_LIBRARY=${OSL_OPENIMAGEIO_LIBRARY}
|
||||
${OSL_FLEX_BISON}
|
||||
-DCMAKE_CXX_STANDARD_LIBRARIES=${OSL_CMAKE_CXX_STANDARD_LIBRARIES}
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DBUILDSTATIC=ON
|
||||
${OSL_PLATFORM_FLAGS}
|
||||
-DOSL_BUILD_PLUGINS=OFF
|
||||
-DOSL_BUILD_PLUGINS=Off
|
||||
-DSTOP_ON_WARNING=OFF
|
||||
-DUSE_LLVM_BITCODE=OFF
|
||||
-DLLVM_ROOT=${LIBDIR}/llvm/
|
||||
-DLLVM_DIRECTORY=${LIBDIR}/llvm/
|
||||
-DUSE_PARTIO=OFF
|
||||
-DUSE_QT=OFF
|
||||
-DUSE_Qt5=OFF
|
||||
-DINSTALL_DOCS=OFF
|
||||
${OSL_SIMD_FLAGS}
|
||||
-Dpugixml_ROOT=${LIBDIR}/pugixml
|
||||
-DUSE_PYTHON=OFF
|
||||
-DPARTIO_LIBRARIES=
|
||||
-DPUGIXML_HOME=${LIBDIR}/pugixml
|
||||
)
|
||||
|
||||
# Apple arm64 uses LLVM 11, LLVM 10+ requires C++14
|
||||
if (APPLE AND "${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
list(APPEND OSL_EXTRA_ARGS -DCMAKE_CXX_STANDARD=14)
|
||||
if(APPLE)
|
||||
# Make symbol hiding consistent with OIIO which defaults to OFF,
|
||||
# avoids linker warnings on macOS
|
||||
set(OSL_EXTRA_ARGS
|
||||
${OSL_EXTRA_ARGS}
|
||||
-DHIDE_SYMBOLS=OFF
|
||||
)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_osl
|
||||
@@ -91,6 +105,7 @@ add_dependencies(
|
||||
external_osl
|
||||
external_boost
|
||||
ll
|
||||
external_clang
|
||||
external_openexr
|
||||
external_zlib
|
||||
external_flexbison
|
||||
|
@@ -43,7 +43,7 @@ if(WIN32)
|
||||
PREFIX ${BUILD_DIR}/python
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND cd ${BUILD_DIR}/python/src/external_python/pcbuild/ && set IncludeTkinter=false && call build.bat -e -p x64 -c ${BUILD_MODE}
|
||||
INSTALL_COMMAND ${PYTHON_BINARY_INTERNAL} ${PYTHON_SRC}/PC/layout/main.py -b ${PYTHON_SRC}/PCbuild/amd64 -s ${PYTHON_SRC} -t ${PYTHON_SRC}/tmp/ --include-stable --include-pip --include-dev --include-launchers --include-venv --include-symbols ${PYTHON_EXTRA_INSTLAL_FLAGS} --copy ${LIBDIR}/python
|
||||
INSTALL_COMMAND ${PYTHON_BINARY_INTERNAL} ${PYTHON_SRC}/PC/layout/main.py -b ${PYTHON_SRC}/PCbuild/amd64 -s ${PYTHON_SRC} -t ${PYTHON_SRC}/tmp/ --include-underpth --include-stable --include-pip --include-dev --include-launchers --include-venv --include-symbols ${PYTHON_EXTRA_INSTLAL_FLAGS} --copy ${LIBDIR}/python
|
||||
)
|
||||
|
||||
else()
|
||||
@@ -74,15 +74,16 @@ else()
|
||||
endif()
|
||||
set(PYTHON_CONFIGURE_ENV ${CONFIGURE_ENV} && ${PYTHON_FUNC_CONFIGS})
|
||||
set(PYTHON_BINARY ${BUILD_DIR}/python/src/external_python/python.exe)
|
||||
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_macos.diff)
|
||||
else()
|
||||
set(PYTHON_CONFIGURE_ENV ${CONFIGURE_ENV})
|
||||
set(PYTHON_BINARY ${BUILD_DIR}/python/src/external_python/python)
|
||||
endif()
|
||||
# Link against zlib statically (Unix). Avoid rpath issues (macOS).
|
||||
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_unix.diff)
|
||||
set(PYTHON_PATCH ${PATCH_CMD} --verbose -p1 -d ${BUILD_DIR}/python/src/external_python < ${PATCH_DIR}/python_linux.diff)
|
||||
endif()
|
||||
|
||||
set(PYTHON_CONFIGURE_EXTRA_ARGS "--with-openssl=${LIBDIR}/ssl")
|
||||
set(PYTHON_CFLAGS "-I${LIBDIR}/sqlite/include -I${LIBDIR}/bzip2/include -I${LIBDIR}/lzma/include -I${LIBDIR}/zlib/include ${PLATFORM_CFLAGS}")
|
||||
set(PYTHON_LDFLAGS "-L${LIBDIR}/ffi/lib -L${LIBDIR}/sqlite/lib -L${LIBDIR}/bzip2/lib -L${LIBDIR}/lzma/lib -L${LIBDIR}/zlib/lib ${PLATFORM_LDFLAGS}")
|
||||
set(PYTHON_CFLAGS "-I${LIBDIR}/sqlite/include -I${LIBDIR}/bzip2/include -I${LIBDIR}/lzma/include -I${LIBDIR}/zlib/include")
|
||||
set(PYTHON_LDFLAGS "-L${LIBDIR}/ffi/lib -L${LIBDIR}/sqlite/lib -L${LIBDIR}/bzip2/lib -L${LIBDIR}/lzma/lib -L${LIBDIR}/zlib/lib")
|
||||
set(PYTHON_CONFIGURE_EXTRA_ENV
|
||||
export CFLAGS=${PYTHON_CFLAGS} &&
|
||||
export CPPFLAGS=${PYTHON_CFLAGS} &&
|
||||
|
@@ -16,27 +16,14 @@
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
if(WIN32 AND BUILD_MODE STREQUAL Debug)
|
||||
set(SITE_PACKAGES_EXTRA --global-option build --global-option --debug)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_python_site_packages
|
||||
DOWNLOAD_COMMAND ""
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
PREFIX ${BUILD_DIR}/site_packages
|
||||
INSTALL_COMMAND ${PYTHON_BINARY} -m pip install ${SITE_PACKAGES_EXTRA} cython==${CYTHON_VERSION} idna==${IDNA_VERSION} chardet==${CHARDET_VERSION} urllib3==${URLLIB3_VERSION} certifi==${CERTIFI_VERSION} requests==${REQUESTS_VERSION} --no-binary :all:
|
||||
INSTALL_COMMAND ${PYTHON_BINARY} -m pip install idna==${IDNA_VERSION} chardet==${CHARDET_VERSION} urllib3==${URLLIB3_VERSION} certifi==${CERTIFI_VERSION} requests==${REQUESTS_VERSION} --no-binary :all:
|
||||
)
|
||||
|
||||
if(USE_PIP_NUMPY)
|
||||
# Use only wheel (and not build from source) to stop NumPy from linking against buggy
|
||||
# Accelerate framework backend on macOS. Official wheels are built with OpenBLAS.
|
||||
ExternalProject_Add_Step(external_python_site_packages after_install
|
||||
COMMAND ${PYTHON_BINARY} -m pip install --no-cache-dir numpy==${NUMPY_VERSION} --only-binary :all:
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
|
||||
add_dependencies(
|
||||
external_python_site_packages
|
||||
external_python
|
||||
|
@@ -42,21 +42,8 @@ if(UNIX)
|
||||
-DSQLITE_MAX_VARIABLE_NUMBER=250000 \
|
||||
-fPIC")
|
||||
set(SQLITE_CONFIGURE_ENV ${SQLITE_CONFIGURE_ENV} && export LDFLAGS=${SQLITE_LDFLAGS} && export CFLAGS=${SQLITE_CFLAGS})
|
||||
set(SQLITE_CONFIGURATION_ARGS
|
||||
${SQLITE_CONFIGURATION_ARGS}
|
||||
--enable-threadsafe
|
||||
--enable-load-extension
|
||||
--enable-json1
|
||||
--enable-fts4
|
||||
--enable-fts5
|
||||
# While building `tcl` is harmless, it causes problems when the install step
|
||||
# tries to copy the files into the system path.
|
||||
# Since this isn't required by Python or Blender this can be disabled.
|
||||
# Note that Debian (for example), splits this off into a separate package,
|
||||
# so it's safe to turn off.
|
||||
--disable-tcl
|
||||
--enable-shared=no
|
||||
)
|
||||
set(SQLITE_CONFIGURATION_ARGS ${SQLITE_CONFIGURATION_ARGS} --enable-threadsafe --enable-load-extension --enable-json1 --enable-fts4 --enable-fts5
|
||||
--enable-shared=no)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_sqlite
|
||||
|
@@ -1,30 +0,0 @@
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
ExternalProject_Add(external_sse2neon
|
||||
GIT_REPOSITORY ${SSE2NEON_GIT}
|
||||
GIT_TAG ${SSE2NEON_GIT_HASH}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
PREFIX ${BUILD_DIR}/sse2neon
|
||||
CONFIGURE_COMMAND echo sse2neon - Nothing to configure
|
||||
BUILD_COMMAND echo sse2neon - nothing to build
|
||||
INSTALL_COMMAND mkdir -p ${LIBDIR}/sse2neon && cp ${BUILD_DIR}/sse2neon/src/external_sse2neon/sse2neon.h ${LIBDIR}/sse2neon
|
||||
INSTALL_DIR ${LIBDIR}/sse2neon
|
||||
)
|
||||
endif()
|
@@ -20,7 +20,7 @@ set(SSL_CONFIGURE_COMMAND ./Configure)
|
||||
set(SSL_PATCH_CMD echo .)
|
||||
|
||||
if(APPLE)
|
||||
set(SSL_OS_COMPILER "blender-darwin-${CMAKE_OSX_ARCHITECTURES}")
|
||||
set(SSL_OS_COMPILER "blender-darwin-${CMAKE_OSX_ARCHITECTURES}")
|
||||
else()
|
||||
if("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
|
||||
set(SSL_EXTRA_ARGS enable-ec_nistp_64_gcc_128)
|
||||
|
@@ -42,8 +42,7 @@ ExternalProject_Add(external_tbb
|
||||
URL_HASH MD5=${TBB_HASH}
|
||||
PREFIX ${BUILD_DIR}/tbb
|
||||
PATCH_COMMAND COMMAND ${CMAKE_COMMAND} -E copy ${PATCH_DIR}/cmakelists_tbb.txt ${BUILD_DIR}/tbb/src/external_tbb/CMakeLists.txt &&
|
||||
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/tbb/src/external_tbb/build/vs2013/version_string.ver ${BUILD_DIR}/tbb/src/external_tbb/src/tbb/version_string.ver &&
|
||||
${PATCH_CMD} -p 1 -d ${BUILD_DIR}/tbb/src/external_tbb < ${PATCH_DIR}/tbb.diff
|
||||
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/tbb/src/external_tbb/build/vs2013/version_string.ver ${BUILD_DIR}/tbb/src/external_tbb/src/tbb/version_string.ver
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/tbb ${DEFAULT_CMAKE_FLAGS} ${TBB_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/tbb
|
||||
)
|
||||
|
@@ -13,27 +13,19 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(INC
|
||||
../include
|
||||
../../blenkernel
|
||||
../../blenlib
|
||||
../../makesdna
|
||||
../../makesrna
|
||||
../../windowmanager
|
||||
../../../../intern/guardedalloc
|
||||
set(TINYXML_EXTRA_ARGS
|
||||
)
|
||||
|
||||
set(INC_SYS
|
||||
ExternalProject_Add(external_tinyxml
|
||||
URL ${TINYXML_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${TINYXML_HASH}
|
||||
PREFIX ${BUILD_DIR}/tinyxml
|
||||
# patch taken from ocio
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/tinyxml/src/external_tinyxml < ${PATCH_DIR}/tinyxml.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/tinyxml ${DEFAULT_CMAKE_FLAGS} ${TINYXML_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/tinyxml
|
||||
)
|
||||
|
||||
set(SRC
|
||||
asset_edit.cc
|
||||
asset_ops.cc
|
||||
)
|
||||
|
||||
set(LIB
|
||||
)
|
||||
|
||||
blender_add_lib(bf_editor_asset "${SRC}" "${INC}" "${INC_SYS}" "${LIB}")
|
@@ -32,11 +32,11 @@ set(JPEG_VERSION 2.0.4)
|
||||
set(JPEG_URI https://github.com/libjpeg-turbo/libjpeg-turbo/archive/${JPEG_VERSION}.tar.gz)
|
||||
set(JPEG_HASH 44c43e4a9fb352f47090804529317c88)
|
||||
|
||||
set(BOOST_VERSION 1.73.0)
|
||||
set(BOOST_VERSION_NODOTS 1_73_0)
|
||||
set(BOOST_VERSION_NODOTS_SHORT 1_73)
|
||||
set(BOOST_VERSION 1.70.0)
|
||||
set(BOOST_VERSION_NODOTS 1_70_0)
|
||||
set(BOOST_VERSION_NODOTS_SHORT 1_70)
|
||||
set(BOOST_URI https://dl.bintray.com/boostorg/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz)
|
||||
set(BOOST_HASH 4036cd27ef7548b8d29c30ea10956196)
|
||||
set(BOOST_HASH fea771fe8176828fabf9c09242ee8c26)
|
||||
|
||||
# Using old version as recommended by OpenVDB build documentation.
|
||||
set(BLOSC_VERSION 1.5.0)
|
||||
@@ -47,9 +47,9 @@ set(PTHREADS_VERSION 3.0.0)
|
||||
set(PTHREADS_URI http://sourceforge.mirrorservice.org/p/pt/pthreads4w/pthreads4w-code-v${PTHREADS_VERSION}.zip)
|
||||
set(PTHREADS_HASH f3bf81bb395840b3446197bcf4ecd653)
|
||||
|
||||
set(OPENEXR_VERSION 2.5.5)
|
||||
set(OPENEXR_VERSION 2.4.0)
|
||||
set(OPENEXR_URI https://github.com/AcademySoftwareFoundation/openexr/archive/v${OPENEXR_VERSION}.tar.gz)
|
||||
set(OPENEXR_HASH 85e8a979092c9055d10ed103062d31a0)
|
||||
set(OPENEXR_HASH 9e4d69cf2a12c6fb19b98af7c5e0eaee)
|
||||
if(WIN32)
|
||||
# Openexr started appending _d on its own so now
|
||||
# we need to tell the build the postfix is _s while
|
||||
@@ -78,9 +78,9 @@ set(FREEGLUT_VERSION 3.0.0)
|
||||
set(FREEGLUT_URI http://pilotfiber.dl.sourceforge.net/project/freeglut/freeglut/${FREEGLUT_VERSION}/freeglut-${FREEGLUT_VERSION}.tar.gz)
|
||||
set(FREEGLUT_HASH 90c3ca4dd9d51cf32276bc5344ec9754)
|
||||
|
||||
set(ALEMBIC_VERSION 1.7.16)
|
||||
set(ALEMBIC_VERSION 1.7.12)
|
||||
set(ALEMBIC_URI https://github.com/alembic/alembic/archive/${ALEMBIC_VERSION}.tar.gz)
|
||||
set(ALEMBIC_MD5 effcc86e42fe6605588e3de57bde6677)
|
||||
set(ALEMBIC_MD5 e2b3777f23c5c09481a008cc6f0f8a40)
|
||||
|
||||
# hash is for 3.1.2
|
||||
set(GLFW_GIT_UID 30306e54705c3adae9fe082c816a3be71963485c)
|
||||
@@ -109,27 +109,19 @@ set(OPENCOLLADA_VERSION v1.6.68)
|
||||
set(OPENCOLLADA_URI https://github.com/KhronosGroup/OpenCOLLADA/archive/${OPENCOLLADA_VERSION}.tar.gz)
|
||||
set(OPENCOLLADA_HASH ee7dae874019fea7be11613d07567493)
|
||||
|
||||
set(OPENCOLORIO_VERSION 2.0.0)
|
||||
set(OPENCOLORIO_VERSION 1.1.1)
|
||||
set(OPENCOLORIO_URI https://github.com/AcademySoftwareFoundation/OpenColorIO/archive/v${OPENCOLORIO_VERSION}.tar.gz)
|
||||
set(OPENCOLORIO_HASH 1a2e3478b6cd9a1549f24e1b2205e3f0)
|
||||
set(OPENCOLORIO_HASH 23d8b9ac81599305539a5a8674b94a3d)
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
# Newer version required by ISPC with arm support.
|
||||
set(LLVM_VERSION 11.0.1)
|
||||
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-project-${LLVM_VERSION}.src.tar.xz)
|
||||
set(LLVM_HASH e700af40ab83463e4e9ab0ba3708312e)
|
||||
set(LLVM_VERSION 9.0.1)
|
||||
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-${LLVM_VERSION}.src.tar.xz)
|
||||
set(LLVM_HASH 31eb9ce73dd2a0f8dcab8319fb03f8fc)
|
||||
|
||||
set(OPENMP_VERSION 9.0.1)
|
||||
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${OPENMP_VERSION}/openmp-${OPENMP_VERSION}.src.tar.xz)
|
||||
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
|
||||
else()
|
||||
set(LLVM_VERSION 9.0.1)
|
||||
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-project-${LLVM_VERSION}.tar.xz)
|
||||
set(LLVM_HASH b4268e733dfe352960140dc07ef2efcb)
|
||||
set(CLANG_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/clang-${LLVM_VERSION}.src.tar.xz)
|
||||
set(CLANG_HASH 13468e4a44940efef1b75e8641752f90)
|
||||
|
||||
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/openmp-${LLVM_VERSION}.src.tar.xz)
|
||||
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
|
||||
endif()
|
||||
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/openmp-${LLVM_VERSION}.src.tar.xz)
|
||||
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
|
||||
|
||||
set(OPENIMAGEIO_VERSION 2.1.15.0)
|
||||
set(OPENIMAGEIO_URI https://github.com/OpenImageIO/oiio/archive/Release-${OPENIMAGEIO_VERSION}.tar.gz)
|
||||
@@ -139,39 +131,38 @@ set(TIFF_VERSION 4.1.0)
|
||||
set(TIFF_URI http://download.osgeo.org/libtiff/tiff-${TIFF_VERSION}.tar.gz)
|
||||
set(TIFF_HASH 2165e7aba557463acc0664e71a3ed424)
|
||||
|
||||
set(OSL_VERSION 1.11.10.0)
|
||||
set(OSL_VERSION 1.10.10)
|
||||
set(OSL_URI https://github.com/imageworks/OpenShadingLanguage/archive/Release-${OSL_VERSION}.tar.gz)
|
||||
set(OSL_HASH dfdc23597aeef083832cbada62211756)
|
||||
set(OSL_HASH 00dec08a93c8084e53848b9ad047889f)
|
||||
|
||||
set(PYTHON_VERSION 3.9.2)
|
||||
set(PYTHON_SHORT_VERSION 3.9)
|
||||
set(PYTHON_SHORT_VERSION_NO_DOTS 39)
|
||||
set(PYTHON_VERSION 3.7.7)
|
||||
set(PYTHON_SHORT_VERSION 3.7)
|
||||
set(PYTHON_SHORT_VERSION_NO_DOTS 37)
|
||||
set(PYTHON_URI https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tar.xz)
|
||||
set(PYTHON_HASH f0dc9000312abeb16de4eccce9a870ab)
|
||||
set(PYTHON_HASH 172c650156f7bea68ce31b2fd01fa766)
|
||||
|
||||
set(TBB_VERSION 2020_U2)
|
||||
set(TBB_VERSION 2019_U9)
|
||||
set(TBB_URI https://github.com/oneapi-src/oneTBB/archive/${TBB_VERSION}.tar.gz)
|
||||
set(TBB_HASH 1b711ae956524855088df3bbf5ec65dc)
|
||||
set(TBB_HASH 26263622e9187212ec240dcf01b66207)
|
||||
|
||||
set(OPENVDB_VERSION 8.0.1)
|
||||
set(OPENVDB_VERSION 7.0.0)
|
||||
set(OPENVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/v${OPENVDB_VERSION}.tar.gz)
|
||||
set(OPENVDB_HASH 01b490be16cc0e15c690f9a153c21461)
|
||||
set(OPENVDB_HASH fd6c4f168282f7e0e494d290cd531fa8)
|
||||
|
||||
set(NANOVDB_GIT_UID e62f7a0bf1e27397223c61ddeaaf57edf111b77f)
|
||||
set(NANOVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_GIT_UID}.tar.gz)
|
||||
set(NANOVDB_HASH 90919510bc6ccd630fedc56f748cb199)
|
||||
|
||||
set(IDNA_VERSION 2.10)
|
||||
set(CHARDET_VERSION 4.0.0)
|
||||
set(URLLIB3_VERSION 1.26.3)
|
||||
set(CERTIFI_VERSION 2020.12.5)
|
||||
set(REQUESTS_VERSION 2.25.1)
|
||||
set(CYTHON_VERSION 0.29.21)
|
||||
set(IDNA_VERSION 2.9)
|
||||
set(CHARDET_VERSION 3.0.4)
|
||||
set(URLLIB3_VERSION 1.25.9)
|
||||
set(CERTIFI_VERSION 2020.4.5.2)
|
||||
set(REQUESTS_VERSION 2.23.0)
|
||||
|
||||
set(NUMPY_VERSION 1.19.5)
|
||||
set(NUMPY_SHORT_VERSION 1.19)
|
||||
set(NUMPY_VERSION 1.17.5)
|
||||
set(NUMPY_SHORT_VERSION 1.17)
|
||||
set(NUMPY_URI https://github.com/numpy/numpy/releases/download/v${NUMPY_VERSION}/numpy-${NUMPY_VERSION}.zip)
|
||||
set(NUMPY_HASH f6a1b48717c552bbc18f1adc3cc1fe0e)
|
||||
set(NUMPY_HASH 763a5646fa6eef7a22f4895bca0524f2)
|
||||
|
||||
set(LAME_VERSION 3.100)
|
||||
set(LAME_URI http://downloads.sourceforge.net/project/lame/lame/3.100/lame-${LAME_VERSION}.tar.gz)
|
||||
@@ -262,16 +253,16 @@ set(YAMLCPP_VERSION 0.6.3)
|
||||
set(YAMLCPP_URI https://codeload.github.com/jbeder/yaml-cpp/tar.gz/yaml-cpp-${YAMLCPP_VERSION})
|
||||
set(YAMLCPP_HASH b45bf1089a382e81f6b661062c10d0c2)
|
||||
|
||||
set(EXPAT_VERSION 2_2_10)
|
||||
set(EXPAT_URI https://github.com/libexpat/libexpat/archive/R_${EXPAT_VERSION}.tar.gz)
|
||||
set(EXPAT_HASH 7ca5f09959fcb9a57618368deb627b9f)
|
||||
set(LCMS_VERSION 2.9)
|
||||
set(LCMS_URI https://nchc.dl.sourceforge.net/project/lcms/lcms/${LCMS_VERSION}/lcms2-${LCMS_VERSION}.tar.gz)
|
||||
set(LCMS_HASH 8de1b7724f578d2995c8fdfa35c3ad0e)
|
||||
|
||||
set(PUGIXML_VERSION 1.10)
|
||||
set(PUGIXML_URI https://github.com/zeux/pugixml/archive/v${PUGIXML_VERSION}.tar.gz)
|
||||
set(PUGIXML_HASH 0c208b0664c7fb822bf1b49ad035e8fd)
|
||||
|
||||
set(FLEXBISON_VERSION 2.5.5)
|
||||
set(FLEXBISON_URI http://prdownloads.sourceforge.net/winflexbison/win_flex_bison-2.5.5.zip)
|
||||
set(FLEXBISON_URI http://prdownloads.sourceforge.net/winflexbison//win_flex_bison-2.5.5.zip)
|
||||
set(FLEXBISON_HASH d87a3938194520d904013abef3df10ce)
|
||||
|
||||
# Libraries to keep Python modules static on Linux.
|
||||
@@ -301,41 +292,34 @@ set(SQLITE_HASH fb558c49ee21a837713c4f1e7e413309aabdd9c7)
|
||||
set(EMBREE_VERSION 3.10.0)
|
||||
set(EMBREE_URI https://github.com/embree/embree/archive/v${EMBREE_VERSION}.zip)
|
||||
set(EMBREE_HASH 4bbe29e7eaa46417efc75fc5f1e8eb87)
|
||||
set(EMBREE_ARM_GIT https://github.com/brechtvl/embree.git)
|
||||
|
||||
set(USD_VERSION 21.02)
|
||||
set(USD_VERSION 20.05)
|
||||
set(USD_URI https://github.com/PixarAnimationStudios/USD/archive/v${USD_VERSION}.tar.gz)
|
||||
set(USD_HASH 1dd1e2092d085ed393c1f7c450a4155a)
|
||||
set(USD_HASH 6d679e739e7f65725d9c029e37dda9fc)
|
||||
|
||||
set(OIDN_VERSION 1.3.0)
|
||||
set(OIDN_VERSION 1.2.3)
|
||||
set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz)
|
||||
set(OIDN_HASH 301a5a0958d375a942014df0679b9270)
|
||||
set(OIDN_HASH 1f11466c2c3efc27faba5ec7078d12b2)
|
||||
|
||||
set(LIBGLU_VERSION 9.0.1)
|
||||
set(LIBGLU_URI ftp://ftp.freedesktop.org/pub/mesa/glu/glu-${LIBGLU_VERSION}.tar.xz)
|
||||
set(LIBGLU_HASH 151aef599b8259efe9acd599c96ea2a3)
|
||||
|
||||
set(MESA_VERSION 20.3.4)
|
||||
set(MESA_URI ftp://ftp.freedesktop.org/pub/mesa/mesa-${MESA_VERSION}.tar.xz)
|
||||
set(MESA_HASH 556338446aef8ae947a789b3e0b5e056)
|
||||
set(MESA_VERSION 18.3.1)
|
||||
set(MESA_URI ftp://ftp.freedesktop.org/pub/mesa//mesa-${MESA_VERSION}.tar.xz)
|
||||
set(MESA_HASH d60828056d77bfdbae0970f9b15fb1be)
|
||||
|
||||
set(NASM_VERSION 2.15.02)
|
||||
set(NASM_URI https://github.com/netwide-assembler/nasm/archive/nasm-${NASM_VERSION}.tar.gz)
|
||||
set(NASM_HASH aded8b796c996a486a56e0515c83e414116decc3b184d88043480b32eb0a8589)
|
||||
set(NASM_URI https://www.nasm.us/pub/nasm/releasebuilds/${NASM_VERSION}/nasm-${NASM_VERSION}.tar.xz)
|
||||
set(NASM_HASH f4fd1329b1713e1ccd34b2fc121c4bcd278c9f91cc4cb205ae8fcd2e4728dd14)
|
||||
|
||||
set(XR_OPENXR_SDK_VERSION 1.0.14)
|
||||
set(XR_OPENXR_SDK_VERSION 1.0.8)
|
||||
set(XR_OPENXR_SDK_URI https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_SDK_VERSION}.tar.gz)
|
||||
set(XR_OPENXR_SDK_HASH 0df6b2fd6045423451a77ff6bc3e1a75)
|
||||
set(XR_OPENXR_SDK_HASH c6de63d2e0f9029aa58dfa97cad8ce07)
|
||||
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))
|
||||
# Unreleased version with macOS arm support.
|
||||
set(ISPC_URI https://github.com/ispc/ispc/archive/f5949c055eb9eeb93696978a3da4bfb3a6a30b35.zip)
|
||||
set(ISPC_HASH d382fea18d01dbd0cd05d9e1ede36d7d)
|
||||
else()
|
||||
set(ISPC_VERSION v1.14.1)
|
||||
set(ISPC_URI https://github.com/ispc/ispc/archive/${ISPC_VERSION}.tar.gz)
|
||||
set(ISPC_HASH 968fbc8dfd16a60ba4e32d2e0e03ea7a)
|
||||
endif()
|
||||
set(ISPC_VERSION v1.14.1)
|
||||
set(ISPC_URI https://github.com/ispc/ispc/archive/${ISPC_VERSION}.tar.gz)
|
||||
set(ISPC_HASH 968fbc8dfd16a60ba4e32d2e0e03ea7a)
|
||||
|
||||
set(GMP_VERSION 6.2.0)
|
||||
set(GMP_URI https://gmplib.org/download/gmp/gmp-${GMP_VERSION}.tar.xz)
|
||||
@@ -344,10 +328,3 @@ set(GMP_HASH a325e3f09e6d91e62101e59f9bda3ec1)
|
||||
set(POTRACE_VERSION 1.16)
|
||||
set(POTRACE_URI http://potrace.sourceforge.net/download/${POTRACE_VERSION}/potrace-${POTRACE_VERSION}.tar.gz)
|
||||
set(POTRACE_HASH 5f0bd87ddd9a620b0c4e65652ef93d69)
|
||||
|
||||
set(HARU_VERSION 2_3_0)
|
||||
set(HARU_URI https://github.com/libharu/libharu/archive/RELEASE_${HARU_VERSION}.tar.gz)
|
||||
set(HARU_HASH 4f916aa49c3069b3a10850013c507460)
|
||||
|
||||
set(SSE2NEON_GIT https://github.com/DLTcollab/sse2neon.git)
|
||||
set(SSE2NEON_GIT_HASH fe5ff00bb8d19b327714a3c290f3e2ce81ba3525)
|
||||
|
@@ -53,7 +53,7 @@ if(WIN32)
|
||||
endif()
|
||||
if(BUILD_MODE STREQUAL Debug)
|
||||
ExternalProject_Add_Step(external_xr_openxr_sdk after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/xr_openxr_sdk/lib/openxr_loaderd.lib ${HARVEST_TARGET}/xr_openxr_sdk/lib/openxr_loaderd.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/xr_openxr_sdk/lib/openxr_loader.lib ${HARVEST_TARGET}/xr_openxr_sdk/lib/openxr_loader_d.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
|
@@ -17,18 +17,13 @@
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
set(YAMLCPP_EXTRA_ARGS
|
||||
-DBUILD_GMOCK=OFF
|
||||
-DYAML_CPP_BUILD_TESTS=OFF
|
||||
-DYAML_CPP_BUILD_TOOLS=OFF
|
||||
-DYAML_CPP_BUILD_CONTRIB=OFF
|
||||
-DYAML_MSVC_SHARED_RT=ON
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
set(YAMLCPP_EXTRA_ARGS
|
||||
${YAMLCPP_EXTRA_ARGS}
|
||||
-DBUILD_GMOCK=OFF
|
||||
-DYAML_MSVC_SHARED_RT=ON)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_yamlcpp
|
||||
URL ${YAMLCPP_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -20,7 +20,6 @@ if(APPLE)
|
||||
endif()
|
||||
|
||||
file(GLOB tbb_src "${CMAKE_CURRENT_SOURCE_DIR}/src/tbb/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/src/old/*.cpp")
|
||||
list(REMOVE_ITEM tbb_src ${CMAKE_CURRENT_SOURCE_DIR}/src/tbb/tbb_bind.cpp)
|
||||
list(APPEND tbb_src ${CMAKE_CURRENT_SOURCE_DIR}/src/rml/client/rml_tbb.cpp)
|
||||
file(GLOB to_remove "${CMAKE_CURRENT_SOURCE_DIR}/src/old/test*.cpp")
|
||||
list(REMOVE_ITEM tbb_src ${to_remove})
|
||||
|
@@ -1,12 +0,0 @@
|
||||
diff --git a/src/hpdf_image_ccitt.c b/src/hpdf_image_ccitt.c
|
||||
index 8672763..9be531a 100644
|
||||
--- a/src/hpdf_image_ccitt.c
|
||||
+++ b/src/hpdf_image_ccitt.c
|
||||
@@ -21,7 +21,6 @@
|
||||
#include <memory.h>
|
||||
#include <assert.h>
|
||||
|
||||
-#define G3CODES
|
||||
#include "t4.h"
|
||||
|
||||
typedef unsigned int uint32;
|
@@ -1,5 +1,5 @@
|
||||
--- a/llvm/lib/Support/Unix/Path.inc 2020-02-17 09:24:26.000000000 +0100
|
||||
+++ b/llvm/lib/Support/Unix/Path.inc 2020-02-17 09:26:25.000000000 +0100
|
||||
--- a/lib/Support/Unix/Path.inc 2020-02-17 09:24:26.000000000 +0100
|
||||
+++ b/lib/Support/Unix/Path.inc 2020-02-17 09:26:25.000000000 +0100
|
||||
@@ -1200,7 +1200,9 @@
|
||||
/// implementation.
|
||||
std::error_code copy_file(const Twine &From, const Twine &To) {
|
||||
|
@@ -1,40 +0,0 @@
|
||||
diff -Naur oidn-1.3.0/cmake/FindTBB.cmake external_openimagedenoise/cmake/FindTBB.cmake
|
||||
--- oidn-1.3.0/cmake/FindTBB.cmake 2021-02-04 16:20:26 -0700
|
||||
+++ external_openimagedenoise/cmake/FindTBB.cmake 2021-02-12 09:35:53 -0700
|
||||
@@ -332,20 +332,22 @@
|
||||
${TBB_ROOT}/lib/${TBB_ARCH}/${TBB_VCVER}
|
||||
${TBB_ROOT}/lib
|
||||
)
|
||||
-
|
||||
# On Windows, also search the DLL so that the client may install it.
|
||||
file(GLOB DLL_NAMES
|
||||
${TBB_ROOT}/bin/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/bin/${LIB_NAME}.dll
|
||||
+ ${TBB_ROOT}/lib/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB1}.dll
|
||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB2}.dll
|
||||
${TBB_ROOT}/../redist/${TBB_ARCH}/tbb/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
${TBB_ROOT}/../redist/${TBB_ARCH}_win/tbb/${TBB_VCVER}/${LIB_NAME}.dll
|
||||
)
|
||||
- list(GET DLL_NAMES 0 DLL_NAME)
|
||||
- get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
|
||||
- set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
|
||||
+ if (DLL_NAMES)
|
||||
+ list(GET DLL_NAMES 0 DLL_NAME)
|
||||
+ get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
|
||||
+ set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
|
||||
+ endif()
|
||||
elseif(APPLE)
|
||||
set(LIB_PATHS ${TBB_ROOT}/lib)
|
||||
else()
|
||||
--- external_openimagedenoise/cmake/oidn_ispc.cmake 2021-02-15 17:29:34.000000000 +0100
|
||||
+++ external_openimagedenoise/cmake/oidn_ispc.cmake2 2021-02-15 17:29:28.000000000 +0100
|
||||
@@ -98,7 +98,7 @@
|
||||
elseif(OIDN_ARCH STREQUAL "ARM64")
|
||||
set(ISPC_ARCHITECTURE "aarch64")
|
||||
if(APPLE)
|
||||
- set(ISPC_TARGET_OS "--target-os=ios")
|
||||
+ set(ISPC_TARGET_OS "--target-os=macos")
|
||||
endif()
|
||||
endif()
|
@@ -1,16 +1,33 @@
|
||||
diff --git a/share/cmake/modules/Findpystring.cmake b/share/cmake/modules/Findpystring.cmake
|
||||
index 7b894a45..92618215 100644
|
||||
--- a/share/cmake/modules/Findpystring.cmake
|
||||
+++ b/share/cmake/modules/Findpystring.cmake
|
||||
@@ -113,6 +113,11 @@ if(NOT pystring_FOUND)
|
||||
-DCMAKE_INSTALL_MESSAGE=${CMAKE_INSTALL_MESSAGE}
|
||||
-DCMAKE_INSTALL_PREFIX=${_EXT_DIST_ROOT}
|
||||
-DCMAKE_OBJECT_PATH_MAX=${CMAKE_OBJECT_PATH_MAX}
|
||||
+ -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES}
|
||||
+ -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET}
|
||||
+ -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT}
|
||||
+ -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG}
|
||||
+ -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE}
|
||||
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
|
||||
index 1eb691b..cff9bd8 100644
|
||||
--- a/src/core/CMakeLists.txt
|
||||
+++ b/src/core/CMakeLists.txt
|
||||
@@ -23,8 +23,6 @@ if(WIN32)
|
||||
if("${CMAKE_BUILD_TYPE}" STREQUAL "Release")
|
||||
set(EXTERNAL_COMPILE_FLAGS "${EXTERNAL_COMPILE_FLAGS} /WX")
|
||||
endif()
|
||||
-else()
|
||||
- set(EXTERNAL_COMPILE_FLAGS "${EXTERNAL_COMPILE_FLAGS} -Werror")
|
||||
endif()
|
||||
|
||||
# SHARED
|
||||
--- a/CMakeLists.txt 2018-09-10 22:15:29.000000000 +0200
|
||||
+++ b/CMakeLists.txt 2018-09-10 22:17:40.000000000 +0200
|
||||
@@ -229,7 +229,7 @@
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-whitespace ${TINYXML_PATCHFILE}
|
||||
BINARY_DIR ext/build/tinyxml
|
||||
INSTALL_DIR ext/dist
|
||||
- CMAKE_ARGS ${TINYXML_CMAKE_ARGS}
|
||||
+ CMAKE_ARGS ${TINYXML_CMAKE_ARGS} -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET} -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} -DCMAKE_C_FLAGS_DEBUG=${CMAKE_C_FLAGS_DEBUG} -DCMAKE_C_FLAGS_RELEASE=${CMAKE_C_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
|
||||
)
|
||||
if(CMAKE_TOOLCHAIN_FILE)
|
||||
set(pystring_CMAKE_ARGS
|
||||
if(WIN32)
|
||||
set(TINYXML_STATIC_LIBRARIES ${PROJECT_BINARY_DIR}/ext/dist/lib/tinyxml.lib)
|
||||
@@ -343,7 +343,7 @@
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-whitespace ${YAML_CPP_PATCHFILE}
|
||||
BINARY_DIR ext/build/yaml-cpp
|
||||
INSTALL_DIR ext/dist
|
||||
- CMAKE_ARGS ${YAML_CPP_CMAKE_ARGS}
|
||||
+ CMAKE_ARGS ${YAML_CPP_CMAKE_ARGS} -DCMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET} -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} -DCMAKE_C_FLAGS_DEBUG=${CMAKE_C_FLAGS_DEBUG} -DCMAKE_C_FLAGS_RELEASE=${CMAKE_C_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}
|
||||
)
|
||||
set(YAML_CPP_INCLUDE_DIRS ${PROJECT_BINARY_DIR}/ext/dist/include)
|
||||
set(YAML_CPP_LIBRARY_DIRS ${PROJECT_BINARY_DIR}/ext/dist/lib)
|
||||
|
51
build_files/build_environment/patches/opencolorio_win.diff
Normal file
51
build_files/build_environment/patches/opencolorio_win.diff
Normal file
@@ -0,0 +1,51 @@
|
||||
diff -Naur external_opencolorio/CMakeLists.txt external_opencolorio.patched/CMakeLists.txt
|
||||
--- external_opencolorio/CMakeLists.txt 2018-01-04 18:38:27 -0700
|
||||
+++ external_opencolorio.patched/CMakeLists.txt 2018-08-15 11:46:53 -0600
|
||||
@@ -251,25 +251,30 @@
|
||||
if(USE_EXTERNAL_YAML)
|
||||
# Set minimum yaml version for non-patched sources.
|
||||
set(YAML_VERSION_MIN "0.3.0")
|
||||
- include(FindPkgConfig)
|
||||
- pkg_check_modules(PC_YAML_CPP REQUIRED QUIET yaml-cpp)
|
||||
- find_path(YAML_CPP_INCLUDE_DIR yaml-cpp/yaml.h
|
||||
- HINTS ${PC_YAML_CPP_INCLUDEDIR} ${PC_YAML_CPP_INCLUDE_DIRS} )
|
||||
- find_library(YAML_CPP_LIBRARY LIBRARY_NAMES yaml-cpp libyaml-cpp
|
||||
- HINTS ${PC_YAML_CPP_LIBRARY_DIRS} )
|
||||
- set(YAML_CPP_LIBRARIES ${YAML_CPP_LIBRARY})
|
||||
- set(YAML_CPP_INCLUDE_DIRS ${YAML_CPP_INCLUDE_DIR})
|
||||
- set(YAML_CPP_VERSION ${PC_YAML_CPP_VERSION})
|
||||
+ if(NOT WIN32)
|
||||
+ include(FindPkgConfig)
|
||||
+ pkg_check_modules(PC_YAML_CPP REQUIRED QUIET yaml-cpp)
|
||||
+ find_path(YAML_CPP_INCLUDE_DIR yaml-cpp/yaml.h
|
||||
+ HINTS ${PC_YAML_CPP_INCLUDEDIR} ${PC_YAML_CPP_INCLUDE_DIRS} )
|
||||
+ find_library(YAML_CPP_LIBRARY LIBRARY_NAMES yaml-cpp libyaml-cpp
|
||||
+ HINTS ${PC_YAML_CPP_LIBRARY_DIRS} )
|
||||
+ set(YAML_CPP_LIBRARIES ${YAML_CPP_LIBRARY})
|
||||
+ set(YAML_CPP_INCLUDE_DIRS ${YAML_CPP_INCLUDE_DIR})
|
||||
+ set(YAML_CPP_VERSION ${PC_YAML_CPP_VERSION})
|
||||
|
||||
- if(YAML_CPP_VERSION VERSION_LESS ${YAML_VERSION_MIN})
|
||||
- message(FATAL_ERROR "ERROR: yaml-cpp ${YAML_VERSION_MIN} or greater is required.")
|
||||
- endif()
|
||||
-
|
||||
- find_package_handle_standard_args(yaml-cpp
|
||||
- REQUIRED_VARS YAML_CPP_LIBRARIES YAML_CPP_INCLUDE_DIRS )
|
||||
- set(YAML_CPP_FOUND ${YAML-CPP_FOUND})
|
||||
- mark_as_advanced(YAML_CPP_INCLUDE_DIR YAML_CPP_LIBRARY YAML-CPP_FOUND)
|
||||
+ if(YAML_CPP_VERSION VERSION_LESS ${YAML_VERSION_MIN})
|
||||
+ message(FATAL_ERROR "ERROR: yaml-cpp ${YAML_VERSION_MIN} or greater is required.")
|
||||
+ endif()
|
||||
|
||||
+ find_package_handle_standard_args(yaml-cpp
|
||||
+ REQUIRED_VARS YAML_CPP_LIBRARIES YAML_CPP_INCLUDE_DIRS )
|
||||
+ set(YAML_CPP_FOUND ${YAML-CPP_FOUND})
|
||||
+ mark_as_advanced(YAML_CPP_INCLUDE_DIR YAML_CPP_LIBRARY YAML-CPP_FOUND)
|
||||
+ else()
|
||||
+ set(EXTERNAL_INCLUDE_DIRS ${EXTERNAL_INCLUDE_DIRS} ${INC_1})
|
||||
+ set(EXTERNAL_INCLUDE_DIRS ${EXTERNAL_INCLUDE_DIRS} ${INC_2})
|
||||
+ message("INCLUDE DIRS = i:${EXTERNAL_INCLUDE_DIRS} |1:${INC_1} |2:${INC_2}")
|
||||
+ endif()
|
||||
if(YAML_CPP_FOUND)
|
||||
if(YAML_CPP_VERSION VERSION_GREATER "0.5.0")
|
||||
# Need to also get the boost headers here, as yaml-cpp 0.5.0+ requires them.
|
@@ -1,10 +1,10 @@
|
||||
diff -Naur openvdb-8.0.0/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
|
||||
--- openvdb-8.0.0/cmake/FindIlmBase.cmake 2020-12-24 10:13:14 -0700
|
||||
+++ openvdb/cmake/FindIlmBase.cmake 2021-02-05 12:07:49 -0700
|
||||
@@ -217,6 +217,12 @@
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib")
|
||||
endif()
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${_IlmBase_Version_Suffix}.lib")
|
||||
diff -Naur orig/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
|
||||
--- orig/cmake/FindIlmBase.cmake 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/cmake/FindIlmBase.cmake 2020-08-12 12:48:44 -0600
|
||||
@@ -225,6 +225,12 @@
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
"-${IlmBase_VERSION_MAJOR}_${IlmBase_VERSION_MINOR}.lib"
|
||||
)
|
||||
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
+ "_s.lib"
|
||||
+ )
|
||||
@@ -13,14 +13,14 @@ diff -Naur openvdb-8.0.0/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
|
||||
+ )
|
||||
else()
|
||||
if(ILMBASE_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
||||
diff -Naur openvdb-8.0.0/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
|
||||
--- openvdb-8.0.0/cmake/FindOpenEXR.cmake 2020-12-24 10:13:14 -0700
|
||||
+++ openvdb/cmake/FindOpenEXR.cmake 2021-02-05 12:23:39 -0700
|
||||
@@ -210,6 +210,12 @@
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib")
|
||||
endif()
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${_OpenEXR_Version_Suffix}.lib")
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
diff -Naur orig/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
|
||||
--- orig/cmake/FindOpenEXR.cmake 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/cmake/FindOpenEXR.cmake 2020-08-12 12:48:44 -0600
|
||||
@@ -218,6 +218,12 @@
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
"-${OpenEXR_VERSION_MAJOR}_${OpenEXR_VERSION_MINOR}.lib"
|
||||
)
|
||||
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
+ "_s.lib"
|
||||
+ )
|
||||
@@ -29,11 +29,11 @@ diff -Naur openvdb-8.0.0/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
|
||||
+ )
|
||||
else()
|
||||
if(OPENEXR_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
||||
diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/CMakeLists.txt
|
||||
--- openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt 2020-12-24 10:13:14 -0700
|
||||
+++ openvdb/openvdb/openvdb/CMakeLists.txt 2021-02-05 11:18:33 -0700
|
||||
@@ -107,7 +107,9 @@
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
diff -Naur orig/openvdb/CMakeLists.txt openvdb/openvdb/CMakeLists.txt
|
||||
--- orig/openvdb/CMakeLists.txt 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/openvdb/CMakeLists.txt 2020-08-12 14:12:26 -0600
|
||||
@@ -105,7 +105,9 @@
|
||||
# http://boost.2283326.n4.nabble.com/CMake-config-scripts-broken-in-1-70-td4708957.html
|
||||
# https://github.com/boostorg/boost_install/commit/160c7cb2b2c720e74463865ef0454d4c4cd9ae7c
|
||||
set(BUILD_SHARED_LIBS ON)
|
||||
@@ -44,15 +44,15 @@ diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/
|
||||
endif()
|
||||
|
||||
find_package(Boost ${MINIMUM_BOOST_VERSION} REQUIRED COMPONENTS iostreams system)
|
||||
@@ -146,6 +148,7 @@
|
||||
Boost::disable_autolinking # add -DBOOST_ALL_NO_LIB
|
||||
)
|
||||
@@ -193,6 +195,7 @@
|
||||
if(OPENVDB_DISABLE_BOOST_IMPLICIT_LINKING)
|
||||
add_definitions(-DBOOST_ALL_NO_LIB)
|
||||
endif()
|
||||
+ add_definitions(-D__TBB_NO_IMPLICIT_LINKAGE -DOPENVDB_OPENEXR_STATICLIB)
|
||||
endif()
|
||||
|
||||
if(USE_EXR)
|
||||
@@ -379,7 +382,12 @@
|
||||
# @todo Should be target definitions
|
||||
@@ -383,7 +386,12 @@
|
||||
# imported targets.
|
||||
|
||||
if(OPENVDB_CORE_SHARED)
|
||||
@@ -66,9 +66,9 @@ diff -Naur openvdb-8.0.0/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/
|
||||
endif()
|
||||
|
||||
if(OPENVDB_CORE_STATIC)
|
||||
diff -Naur openvdb-8.0.0/openvdb/openvdb/version.rc.in openvdb/openvdb/openvdb/version.rc.in
|
||||
--- openvdb-8.0.0/openvdb/openvdb/version.rc.in 1969-12-31 17:00:00 -0700
|
||||
+++ openvdb/openvdb/openvdb/version.rc.in 2021-02-05 11:18:33 -0700
|
||||
diff -Naur orig/openvdb/version.rc.in openvdb/openvdb/version.rc.in
|
||||
--- orig/openvdb/version.rc.in 1969-12-31 17:00:00 -0700
|
||||
+++ openvdb/openvdb/version.rc.in 2020-08-12 14:15:01 -0600
|
||||
@@ -0,0 +1,48 @@
|
||||
+#include <winver.h>
|
||||
+
|
||||
|
@@ -43,23 +43,27 @@ diff -Naur OpenShadingLanguage-Release-1.9.9/src/liboslexec/llvm_util.cpp extern
|
||||
-
|
||||
+void LLVM_Util::Cleanup ()
|
||||
+{
|
||||
+ if(jitmm_hold) jitmm_hold->clear();
|
||||
+ jitmm_hold.clear();
|
||||
+}
|
||||
|
||||
size_t
|
||||
LLVM_Util::total_jit_memory_held ()
|
||||
diff -Naur org/CMakeLists.txt external_osl/CMakeLists.txt
|
||||
--- org/CMakeLists.txt 2020-12-01 12:37:15 -0700
|
||||
+++ external_osl/CMakeLists.txt 2021-01-20 13:26:50 -0700
|
||||
@@ -84,6 +84,11 @@
|
||||
CACHE STRING "Directory where OptiX PTX files will be installed")
|
||||
set (CMAKE_DEBUG_POSTFIX "" CACHE STRING "Library naming postfix for Debug builds (e.g., '_debug')")
|
||||
|
||||
diff -Naur OpenShadingLanguage-Release-1.9.9/CMakeLists.txt external_osl/CMakeLists.txt
|
||||
--- orig/CMakeLists.txt 2020-01-27 16:22:31 -0700
|
||||
+++ external_osl/CMakeLists.txt 2020-05-13 18:04:52 -0600
|
||||
@@ -102,10 +102,11 @@
|
||||
set (OPTIX_EXTRA_LIBS CACHE STRING "Extra lib targets needed for OptiX")
|
||||
set (CUDA_TARGET_ARCH "sm_35" CACHE STRING "CUDA GPU architecture (e.g. sm_35)")
|
||||
|
||||
-# set (USE_OIIO_STATIC ON CACHE BOOL "If OIIO is built static")
|
||||
-# if (USE_OIIO_STATIC)
|
||||
-# add_definitions ("-DOIIO_STATIC_BUILD=1")
|
||||
-# endif ()
|
||||
+set (USE_OIIO_STATIC ON CACHE BOOL "If OIIO is built static")
|
||||
+if (USE_OIIO_STATIC)
|
||||
+ add_definitions ("-DOIIO_STATIC_BUILD=1")
|
||||
+ add_definitions ("-DOIIO_STATIC_DEFINE=1")
|
||||
+endif ()
|
||||
|
||||
|
||||
set (OSL_NO_DEFAULT_TEXTURESYSTEM OFF CACHE BOOL "Do not use create a raw OIIO::TextureSystem")
|
||||
if (OSL_NO_DEFAULT_TEXTURESYSTEM)
|
||||
if (OSL_NO_DEFAULT_TEXTURESYSTEM)
|
@@ -2,23 +2,23 @@ diff --git a/setup.py.orig b/setup.py
|
||||
index a97a755..07ce853 100644
|
||||
--- a/setup.py.orig
|
||||
+++ b/setup.py
|
||||
@@ -1603,13 +1603,13 @@
|
||||
@@ -1422,13 +1422,13 @@ class PyBuildExt(build_ext):
|
||||
version = line.split()[2]
|
||||
break
|
||||
if version >= version_req:
|
||||
- if (self.compiler.find_library_file(self.lib_dirs, 'z')):
|
||||
+ if (self.compiler.find_library_file(self.lib_dirs, 'z_pic')):
|
||||
if MACOS:
|
||||
- if (self.compiler.find_library_file(lib_dirs, 'z')):
|
||||
+ if (self.compiler.find_library_file(lib_dirs, 'z_pic')):
|
||||
if host_platform == "darwin":
|
||||
zlib_extra_link_args = ('-Wl,-search_paths_first',)
|
||||
else:
|
||||
zlib_extra_link_args = ()
|
||||
self.add(Extension('zlib', ['zlibmodule.c'],
|
||||
- libraries=['z'],
|
||||
+ libraries=['z_pic'],
|
||||
extra_link_args=zlib_extra_link_args))
|
||||
exts.append( Extension('zlib', ['zlibmodule.c'],
|
||||
- libraries = ['z'],
|
||||
+ libraries = ['z_pic'],
|
||||
extra_link_args = zlib_extra_link_args))
|
||||
have_zlib = True
|
||||
else:
|
||||
@@ -1623,7 +1623,7 @@
|
||||
@@ -1442,7 +1442,7 @@ class PyBuildExt(build_ext):
|
||||
# crc32 if we have it. Otherwise binascii uses its own.
|
||||
if have_zlib:
|
||||
extra_compile_args = ['-DUSE_ZLIB_CRC32']
|
||||
@@ -27,12 +27,12 @@ index a97a755..07ce853 100644
|
||||
extra_link_args = zlib_extra_link_args
|
||||
else:
|
||||
extra_compile_args = []
|
||||
@@ -2168,7 +2168,7 @@
|
||||
ffi_inc = None
|
||||
@@ -1991,7 +1991,7 @@ class PyBuildExt(build_ext):
|
||||
print('Header file {} does not exist'.format(ffi_h))
|
||||
if ffi_lib is None and ffi_inc:
|
||||
ffi_lib = None
|
||||
if ffi_inc is not None:
|
||||
- for lib_name in ('ffi', 'ffi_pic'):
|
||||
+ for lib_name in ('ffi_pic', ):
|
||||
if (self.compiler.find_library_file(self.lib_dirs, lib_name)):
|
||||
if (self.compiler.find_library_file(lib_dirs, lib_name)):
|
||||
ffi_lib = lib_name
|
||||
break
|
289
build_files/build_environment/patches/python_macos.diff
Normal file
289
build_files/build_environment/patches/python_macos.diff
Normal file
@@ -0,0 +1,289 @@
|
||||
diff -ru a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst
|
||||
--- a/Doc/library/ctypes.rst 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Doc/library/ctypes.rst 2020-07-14 08:10:10.000000000 +0200
|
||||
@@ -1551,6 +1551,13 @@
|
||||
value usable as argument (integer, string, ctypes instance). This allows
|
||||
defining adapters that can adapt custom objects as function parameters.
|
||||
|
||||
+ .. attribute:: variadic
|
||||
+
|
||||
+ Assign a boolean to specify that the function takes a variable number of
|
||||
+ arguments. This does not matter on most platforms, but for Apple arm64
|
||||
+ platforms variadic functions have a different calling convention than
|
||||
+ normal functions.
|
||||
+
|
||||
.. attribute:: errcheck
|
||||
|
||||
Assign a Python function or another callable to this attribute. The
|
||||
diff -ru a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
|
||||
--- a/Modules/_ctypes/_ctypes.c 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Modules/_ctypes/_ctypes.c 2020-07-14 08:14:41.000000000 +0200
|
||||
@@ -3175,6 +3175,35 @@
|
||||
}
|
||||
|
||||
static int
|
||||
+PyCFuncPtr_set_variadic(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ignored))
|
||||
+{
|
||||
+ StgDictObject *dict = PyObject_stgdict((PyObject *)self);
|
||||
+ assert(dict);
|
||||
+ int r = PyObject_IsTrue(ob);
|
||||
+ if (r == 1) {
|
||||
+ dict->flags |= FUNCFLAG_VARIADIC;
|
||||
+ return 0;
|
||||
+ } else if (r == 0) {
|
||||
+ dict->flags &= ~FUNCFLAG_VARIADIC;
|
||||
+ return 0;
|
||||
+ } else {
|
||||
+ return -1;
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+static PyObject *
|
||||
+PyCFuncPtr_get_variadic(PyCFuncPtrObject *self, void *Py_UNUSED(ignored))
|
||||
+{
|
||||
+ StgDictObject *dict = PyObject_stgdict((PyObject *)self);
|
||||
+ assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */
|
||||
+ if (dict->flags & FUNCFLAG_VARIADIC)
|
||||
+ Py_RETURN_TRUE;
|
||||
+ else
|
||||
+ Py_RETURN_FALSE;
|
||||
+}
|
||||
+
|
||||
+
|
||||
+static int
|
||||
PyCFuncPtr_set_argtypes(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ignored))
|
||||
{
|
||||
PyObject *converters;
|
||||
@@ -5632,6 +5661,7 @@
|
||||
PyModule_AddObject(m, "FUNCFLAG_USE_ERRNO", PyLong_FromLong(FUNCFLAG_USE_ERRNO));
|
||||
PyModule_AddObject(m, "FUNCFLAG_USE_LASTERROR", PyLong_FromLong(FUNCFLAG_USE_LASTERROR));
|
||||
PyModule_AddObject(m, "FUNCFLAG_PYTHONAPI", PyLong_FromLong(FUNCFLAG_PYTHONAPI));
|
||||
+ PyModule_AddObject(m, "FUNCFLAG_VARIADIC", PyLong_FromLong(FUNCFLAG_VARIADIC));
|
||||
PyModule_AddStringConstant(m, "__version__", "1.1.0");
|
||||
|
||||
PyModule_AddObject(m, "_memmove_addr", PyLong_FromVoidPtr(memmove));
|
||||
diff -ru a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
|
||||
--- a/Modules/_ctypes/callproc.c 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Modules/_ctypes/callproc.c 2020-07-14 08:18:33.000000000 +0200
|
||||
@@ -767,7 +767,8 @@
|
||||
ffi_type **atypes,
|
||||
ffi_type *restype,
|
||||
void *resmem,
|
||||
- int argcount)
|
||||
+ int argcount,
|
||||
+ int argtypecount)
|
||||
{
|
||||
PyThreadState *_save = NULL; /* For Py_BLOCK_THREADS and Py_UNBLOCK_THREADS */
|
||||
PyObject *error_object = NULL;
|
||||
@@ -793,15 +794,38 @@
|
||||
if ((flags & FUNCFLAG_CDECL) == 0)
|
||||
cc = FFI_STDCALL;
|
||||
#endif
|
||||
- if (FFI_OK != ffi_prep_cif(&cif,
|
||||
- cc,
|
||||
- argcount,
|
||||
- restype,
|
||||
- atypes)) {
|
||||
- PyErr_SetString(PyExc_RuntimeError,
|
||||
- "ffi_prep_cif failed");
|
||||
- return -1;
|
||||
- }
|
||||
+#if HAVE_FFI_PREP_CIF_VAR
|
||||
+ /* Everyone SHOULD set f.variadic=True on variadic function pointers, but
|
||||
+ * lots of existing code will not. If there's at least one arg and more
|
||||
+ * args are passed than are defined in the prototype, then it must be a
|
||||
+ * variadic function. */
|
||||
+ if ((flags & FUNCFLAG_VARIADIC) ||
|
||||
+ (argtypecount != 0 && argcount > argtypecount))
|
||||
+ {
|
||||
+ if (FFI_OK != ffi_prep_cif_var(&cif,
|
||||
+ cc,
|
||||
+ argtypecount,
|
||||
+ argcount,
|
||||
+ restype,
|
||||
+ atypes)) {
|
||||
+ PyErr_SetString(PyExc_RuntimeError,
|
||||
+ "ffi_prep_cif_var failed");
|
||||
+ return -1;
|
||||
+ }
|
||||
+ } else {
|
||||
+#endif
|
||||
+ if (FFI_OK != ffi_prep_cif(&cif,
|
||||
+ cc,
|
||||
+ argcount,
|
||||
+ restype,
|
||||
+ atypes)) {
|
||||
+ PyErr_SetString(PyExc_RuntimeError,
|
||||
+ "ffi_prep_cif failed");
|
||||
+ return -1;
|
||||
+ }
|
||||
+#if HAVE_FFI_PREP_CIF_VAR
|
||||
+ }
|
||||
+#endif
|
||||
|
||||
if (flags & (FUNCFLAG_USE_ERRNO | FUNCFLAG_USE_LASTERROR)) {
|
||||
error_object = _ctypes_get_errobj(&space);
|
||||
@@ -1185,9 +1209,8 @@
|
||||
|
||||
if (-1 == _call_function_pointer(flags, pProc, avalues, atypes,
|
||||
rtype, resbuf,
|
||||
- Py_SAFE_DOWNCAST(argcount,
|
||||
- Py_ssize_t,
|
||||
- int)))
|
||||
+ Py_SAFE_DOWNCAST(argcount, Py_ssize_t, int),
|
||||
+ Py_SAFE_DOWNCAST(argtype_count, Py_ssize_t, int)))
|
||||
goto cleanup;
|
||||
|
||||
#ifdef WORDS_BIGENDIAN
|
||||
diff -ru a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h
|
||||
--- a/Modules/_ctypes/ctypes.h 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/Modules/_ctypes/ctypes.h 2020-07-14 08:30:53.000000000 +0200
|
||||
@@ -285,6 +285,7 @@
|
||||
#define FUNCFLAG_PYTHONAPI 0x4
|
||||
#define FUNCFLAG_USE_ERRNO 0x8
|
||||
#define FUNCFLAG_USE_LASTERROR 0x10
|
||||
+#define FUNCFLAG_VARIADIC 0x20
|
||||
|
||||
#define TYPEFLAG_ISPOINTER 0x100
|
||||
#define TYPEFLAG_HASPOINTER 0x200
|
||||
diff -ru a/configure b/configure
|
||||
--- a/configure 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/configure 2020-07-14 08:03:27.000000000 +0200
|
||||
@@ -3374,7 +3374,7 @@
|
||||
# has no effect, don't bother defining them
|
||||
Darwin/[6789].*)
|
||||
define_xopen_source=no;;
|
||||
- Darwin/1[0-9].*)
|
||||
+ Darwin/[12][0-9].*)
|
||||
define_xopen_source=no;;
|
||||
# On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but
|
||||
# used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined
|
||||
@@ -9251,6 +9251,9 @@
|
||||
ppc)
|
||||
MACOSX_DEFAULT_ARCH="ppc64"
|
||||
;;
|
||||
+ arm64)
|
||||
+ MACOSX_DEFAULT_ARCH="arm64"
|
||||
+ ;;
|
||||
*)
|
||||
as_fn_error $? "Unexpected output of 'arch' on OSX" "$LINENO" 5
|
||||
;;
|
||||
diff -ru a/configure.ac b/configure.ac
|
||||
--- a/configure.ac 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/configure.ac 2020-07-14 08:03:27.000000000 +0200
|
||||
@@ -2456,6 +2456,9 @@
|
||||
ppc)
|
||||
MACOSX_DEFAULT_ARCH="ppc64"
|
||||
;;
|
||||
+ arm64)
|
||||
+ MACOSX_DEFAULT_ARCH="arm64"
|
||||
+ ;;
|
||||
*)
|
||||
AC_MSG_ERROR([Unexpected output of 'arch' on OSX])
|
||||
;;
|
||||
diff -ru a/setup.py b/setup.py
|
||||
--- a/setup.py 2020-03-10 07:11:12.000000000 +0100
|
||||
+++ b/setup.py 2020-07-14 08:28:12.000000000 +0200
|
||||
@@ -141,6 +141,13 @@
|
||||
os.unlink(tmpfile)
|
||||
|
||||
return MACOS_SDK_ROOT
|
||||
+
|
||||
+def is_macosx_at_least(vers):
|
||||
+ if host_platform == 'darwin':
|
||||
+ dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
|
||||
+ if dep_target:
|
||||
+ return tuple(map(int, dep_target.split('.'))) >= vers
|
||||
+ return False
|
||||
|
||||
def is_macosx_sdk_path(path):
|
||||
"""
|
||||
@@ -150,6 +157,13 @@
|
||||
or path.startswith('/System/')
|
||||
or path.startswith('/Library/') )
|
||||
|
||||
+def grep_headers_for(function, headers):
|
||||
+ for header in headers:
|
||||
+ with open(header, 'r') as f:
|
||||
+ if function in f.read():
|
||||
+ return True
|
||||
+ return False
|
||||
+
|
||||
def find_file(filename, std_dirs, paths):
|
||||
"""Searches for the directory where a given file is located,
|
||||
and returns a possibly-empty list of additional directories, or None
|
||||
@@ -1972,7 +1986,11 @@
|
||||
return True
|
||||
|
||||
def detect_ctypes(self, inc_dirs, lib_dirs):
|
||||
- self.use_system_libffi = False
|
||||
+ if not sysconfig.get_config_var("LIBFFI_INCLUDEDIR") and is_macosx_at_least((10,15)):
|
||||
+ self.use_system_libffi = True
|
||||
+ else:
|
||||
+ self.use_system_libffi = '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS")
|
||||
+
|
||||
include_dirs = []
|
||||
extra_compile_args = []
|
||||
extra_link_args = []
|
||||
@@ -2016,32 +2034,48 @@
|
||||
ext_test = Extension('_ctypes_test',
|
||||
sources=['_ctypes/_ctypes_test.c'],
|
||||
libraries=['m'])
|
||||
+ ffi_inc = sysconfig.get_config_var("LIBFFI_INCLUDEDIR")
|
||||
+ ffi_lib = None
|
||||
+
|
||||
self.extensions.extend([ext, ext_test])
|
||||
|
||||
if host_platform == 'darwin':
|
||||
- if '--with-system-ffi' not in sysconfig.get_config_var("CONFIG_ARGS"):
|
||||
+ if not self.use_system_libffi:
|
||||
return
|
||||
- # OS X 10.5 comes with libffi.dylib; the include files are
|
||||
- # in /usr/include/ffi
|
||||
- inc_dirs.append('/usr/include/ffi')
|
||||
-
|
||||
- ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
|
||||
- if not ffi_inc or ffi_inc[0] == '':
|
||||
- ffi_inc = find_file('ffi.h', [], inc_dirs)
|
||||
- if ffi_inc is not None:
|
||||
- ffi_h = ffi_inc[0] + '/ffi.h'
|
||||
+ ffi_in_sdk = os.path.join(macosx_sdk_root(), "usr/include/ffi")
|
||||
+ if os.path.exists(ffi_in_sdk):
|
||||
+ ffi_inc = ffi_in_sdk
|
||||
+ ffi_lib = 'ffi'
|
||||
+ else:
|
||||
+ # OS X 10.5 comes with libffi.dylib; the include files are
|
||||
+ # in /usr/include/ffi
|
||||
+ ffi_inc_dirs.append('/usr/include/ffi')
|
||||
+
|
||||
+ if not ffi_inc:
|
||||
+ found = find_file('ffi.h', [], ffi_inc_dirs)
|
||||
+ if found:
|
||||
+ ffi_inc = found[0]
|
||||
+ if ffi_inc:
|
||||
+ ffi_h = ffi_inc + '/ffi.h'
|
||||
if not os.path.exists(ffi_h):
|
||||
ffi_inc = None
|
||||
print('Header file {} does not exist'.format(ffi_h))
|
||||
- ffi_lib = None
|
||||
- if ffi_inc is not None:
|
||||
+ if ffi_lib is None and ffi_inc:
|
||||
for lib_name in ('ffi', 'ffi_pic'):
|
||||
if (self.compiler.find_library_file(lib_dirs, lib_name)):
|
||||
ffi_lib = lib_name
|
||||
break
|
||||
|
||||
if ffi_inc and ffi_lib:
|
||||
- ext.include_dirs.extend(ffi_inc)
|
||||
+ ffi_headers = glob(os.path.join(ffi_inc, '*.h'))
|
||||
+ if grep_headers_for('ffi_closure_alloc', ffi_headers):
|
||||
+ try:
|
||||
+ sources.remove('_ctypes/malloc_closure.c')
|
||||
+ except ValueError:
|
||||
+ pass
|
||||
+ if grep_headers_for('ffi_prep_cif_var', ffi_headers):
|
||||
+ ext.extra_compile_args.append("-DHAVE_FFI_PREP_CIF_VAR=1")
|
||||
+ ext.include_dirs.append(ffi_inc)
|
||||
ext.libraries.append(ffi_lib)
|
||||
self.use_system_libffi = True
|
||||
|
@@ -1,13 +0,0 @@
|
||||
diff --git a/include/tbb/tbb_config.h b/include/tbb/tbb_config.h
|
||||
index 7a8d06a0..886699d8 100644
|
||||
--- a/include/tbb/tbb_config.h
|
||||
+++ b/include/tbb/tbb_config.h
|
||||
@@ -620,7 +620,7 @@ There are four cases that are supported:
|
||||
// instantiation site, which is too late for suppression of the corresponding messages for internal
|
||||
// stuff.
|
||||
#if !defined(__INTEL_COMPILER) && (!defined(TBB_SUPPRESS_DEPRECATED_MESSAGES) || (TBB_SUPPRESS_DEPRECATED_MESSAGES == 0))
|
||||
- #if (__cplusplus >= 201402L)
|
||||
+ #if (__cplusplus >= 201402L && (!defined(_MSC_VER) || _MSC_VER >= 1920))
|
||||
#define __TBB_DEPRECATED [[deprecated]]
|
||||
#define __TBB_DEPRECATED_MSG(msg) [[deprecated(msg)]]
|
||||
#elif _MSC_VER
|
@@ -26,30 +26,36 @@ diff -Naur external_usd_base/cmake/macros/Public.cmake external_usd/cmake/macros
|
||||
endforeach()
|
||||
foreach(lib ${PXR_OBJECT_LIBS})
|
||||
set(objects "${objects};\$<TARGET_OBJECTS:${lib}>")
|
||||
diff -ru USD-20.11/pxr/base/tf/pxrLZ4/lz4.cpp external_usd/pxr/base/tf/pxrLZ4/lz4.cpp
|
||||
--- USD-20.11/pxr/base/tf/pxrLZ4/lz4.cpp 2020-10-14 19:25:19.000000000 +0100
|
||||
+++ external_usd/pxr/base/tf/pxrLZ4/lz4.cpp 2021-02-09 09:28:51.496190085 +0000
|
||||
@@ -614,6 +614,15 @@
|
||||
/*-************************************
|
||||
* Internal Definitions used in Tests
|
||||
**************************************/
|
||||
+
|
||||
+/*******************************************************************
|
||||
+ * Disabled in Blender. The BLOSC library also exposes these
|
||||
+ * functions, and this causes 'duplicate symbol' linker errors.
|
||||
+ *
|
||||
+ * This problem has been reported upstream at
|
||||
+ * https://github.com/PixarAnimationStudios/USD/issues/1447
|
||||
+ *
|
||||
+ *******************************************************************
|
||||
#if defined (__cplusplus)
|
||||
extern "C" {
|
||||
#endif
|
||||
@@ -627,6 +636,7 @@
|
||||
#if defined (__cplusplus)
|
||||
}
|
||||
#endif
|
||||
+********************************************************************/
|
||||
|
||||
diff --git a/pxr/base/arch/align.h b/pxr/base/arch/align.h
|
||||
index f3cabf4..ebc8a69 100644
|
||||
--- a/pxr/base/arch/align.h
|
||||
+++ b/pxr/base/arch/align.h
|
||||
@@ -77,7 +77,11 @@ ArchAlignMemory(void *base)
|
||||
/// The size of a CPU cache line on the current processor architecture in bytes.
|
||||
///
|
||||
/// \hideinitializer
|
||||
+#if defined(ARCH_OS_DARWIN) && defined(ARCH_CPU_ARM)
|
||||
+#define ARCH_CACHE_LINE_SIZE 128
|
||||
+#else
|
||||
#define ARCH_CACHE_LINE_SIZE 64
|
||||
+#endif
|
||||
|
||||
/*-******************************
|
||||
* Compression functions
|
||||
///@}
|
||||
|
||||
diff --git a/pxr/base/arch/math.h b/pxr/base/arch/math.h
|
||||
index 3e66c37..64a052c 100644
|
||||
--- a/pxr/base/arch/math.h
|
||||
+++ b/pxr/base/arch/math.h
|
||||
@@ -42,7 +42,7 @@ PXR_NAMESPACE_OPEN_SCOPE
|
||||
/// \addtogroup group_arch_Math
|
||||
///@{
|
||||
|
||||
-#if defined (ARCH_CPU_INTEL) || defined(doxygen)
|
||||
+#if defined (ARCH_CPU_INTEL) || defined(ARCH_CPU_ARM) || defined(doxygen)
|
||||
|
||||
/// This is the smallest value e such that 1+e^2 == 1, using floats.
|
||||
/// True for all IEEE754 chipsets.
|
||||
|
||||
|
||||
|
||||
|
@@ -117,7 +117,7 @@ set path=%BUILD_DIR%\downloads\mingw\mingw64\msys\1.0\bin\;%BUILD_DIR%\downloads
|
||||
mkdir %STAGING%\%BuildDir%%ARCH%R
|
||||
cd %Staging%\%BuildDir%%ARCH%R
|
||||
echo %DATE% %TIME% : Start > %StatusFile%
|
||||
cmake -G "%CMAKE_BUILDER%" -Thost=x64 %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DBUILD_MODE=Release -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/
|
||||
cmake -G "%CMAKE_BUILDER%" %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DBUILD_MODE=Release -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/
|
||||
echo %DATE% %TIME% : Release Configuration done >> %StatusFile%
|
||||
if "%dobuild%" == "1" (
|
||||
msbuild /m "ll.vcxproj" /p:Configuration=Release /fl /flp:logfile=BlenderDeps_llvm.log;Verbosity=normal
|
||||
@@ -130,7 +130,7 @@ if "%NODEBUG%" == "1" goto exit
|
||||
cd %BUILD_DIR%
|
||||
mkdir %STAGING%\%BuildDir%%ARCH%D
|
||||
cd %Staging%\%BuildDir%%ARCH%D
|
||||
cmake -G "%CMAKE_BUILDER%" -Thost=x64 %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DCMAKE_BUILD_TYPE=Debug -DBUILD_MODE=Debug -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/ %CMAKE_DEBUG_OPTIONS%
|
||||
cmake -G "%CMAKE_BUILDER%" %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DCMAKE_BUILD_TYPE=Debug -DBUILD_MODE=Debug -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/ %CMAKE_DEBUG_OPTIONS%
|
||||
echo %DATE% %TIME% : Debug Configuration done >> %StatusFile%
|
||||
if "%dobuild%" == "1" (
|
||||
msbuild /m "ll.vcxproj" /p:Configuration=Debug /fl /flp:logfile=BlenderDeps_llvm.log;;Verbosity=normal
|
||||
|
@@ -88,6 +88,7 @@ class VersionInfo:
|
||||
self.short_version = "%d.%02d" % (version_numbers[0], version_numbers[1])
|
||||
self.version = "%d.%02d.%d" % version_numbers
|
||||
self.version_cycle = self._parse_header_file(blender_h, 'BLENDER_VERSION_CYCLE')
|
||||
self.version_cycle_number = self._parse_header_file(blender_h, 'BLENDER_VERSION_CYCLE_NUMBER')
|
||||
self.hash = self._parse_header_file(buildinfo_h, 'BUILD_HASH')[1:-1]
|
||||
|
||||
if self.version_cycle == "release":
|
||||
@@ -96,7 +97,8 @@ class VersionInfo:
|
||||
self.is_development_build = False
|
||||
elif self.version_cycle == "rc":
|
||||
# Release candidate
|
||||
self.full_version = self.version + self.version_cycle
|
||||
version_cycle = self.version_cycle + self.version_cycle_number
|
||||
self.full_version = self.version + version_cycle
|
||||
self.is_development_build = False
|
||||
else:
|
||||
# Development build
|
||||
|
@@ -64,12 +64,7 @@ class WindowsCodeSigner(BaseCodeSigner):
|
||||
|
||||
def run_codesign_tool(self, filepath: Path) -> None:
|
||||
command = self.get_sign_command_prefix() + [filepath]
|
||||
|
||||
try:
|
||||
codesign_output = self.check_output_or_mock(command, util.Platform.WINDOWS)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise SigntoolException(f'Error running signtool {e}')
|
||||
|
||||
codesign_output = self.check_output_or_mock(command, util.Platform.WINDOWS)
|
||||
logger_server.info(f'signtool output:\n{codesign_output}')
|
||||
|
||||
got_number_of_success = False
|
||||
|
@@ -42,7 +42,7 @@ def get_cmake_options(builder):
|
||||
elif builder.platform == 'linux':
|
||||
config_file = "build_files/buildbot/config/blender_linux.cmake"
|
||||
|
||||
optix_sdk_dir = os.path.join(builder.blender_dir, '..', '..', 'NVIDIA-Optix-SDK-7.1')
|
||||
optix_sdk_dir = os.path.join(builder.blender_dir, '..', '..', 'NVIDIA-Optix-SDK')
|
||||
options.append('-DOPTIX_ROOT_DIR:PATH=' + optix_sdk_dir)
|
||||
|
||||
# Workaround to build sm_30 kernels with CUDA 10, since CUDA 11 no longer supports that architecture
|
||||
|
@@ -19,7 +19,7 @@
|
||||
#=============================================================================
|
||||
|
||||
IF(NOT AUDASPACE_ROOT_DIR AND NOT $ENV{AUDASPACE_ROOT_DIR} STREQUAL "")
|
||||
SET(AUDASPACE_ROOT_DIR $ENV{AUDASPACE_ROOT_DIR})
|
||||
SET(AUDASPACE_ROOT_DIR $ENV{AUDASPACE_ROOT_DIR})
|
||||
ENDIF()
|
||||
|
||||
SET(_audaspace_SEARCH_DIRS
|
||||
|
@@ -1,111 +0,0 @@
|
||||
# - Find Clang library
|
||||
# Find the native Clang includes and library
|
||||
# This module defines
|
||||
# CLANG_INCLUDE_DIRS, where to find AST/AST.h, Set when
|
||||
# CLANG_INCLUDE_DIR is found.
|
||||
# CLANG_LIBRARIES, libraries to link against to use Clang.
|
||||
# CLANG_ROOT_DIR, The base directory to search for Clang.
|
||||
# This can also be an environment variable.
|
||||
# CLANG_FOUND, If false, do not try to use Clang.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2021 Blender Foundation.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD 3-Clause License,
|
||||
# see accompanying file BSD-3-Clause-license.txt for details.
|
||||
#=============================================================================
|
||||
|
||||
# If CLANG_ROOT_DIR was defined in the environment, use it.
|
||||
if(NOT CLANG_ROOT_DIR AND NOT $ENV{CLANG_ROOT_DIR} STREQUAL "")
|
||||
set(CLANG_ROOT_DIR $ENV{CLANG_ROOT_DIR})
|
||||
endif()
|
||||
|
||||
set(_CLANG_SEARCH_DIRS
|
||||
${CLANG_ROOT_DIR}
|
||||
/opt/lib/clang
|
||||
)
|
||||
|
||||
find_path(CLANG_INCLUDE_DIR
|
||||
NAMES
|
||||
AST/AST.h
|
||||
HINTS
|
||||
${_CLANG_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
include
|
||||
include/clang
|
||||
)
|
||||
|
||||
|
||||
set(_CLANG_FIND_COMPONENTS
|
||||
clangDependencyScanning
|
||||
clangDynamicASTMatchers
|
||||
clangFrontendTool
|
||||
clangStaticAnalyzerFrontend
|
||||
clangHandleCXX
|
||||
clangStaticAnalyzerCheckers
|
||||
clangStaticAnalyzerCore
|
||||
clangToolingASTDiff
|
||||
clangToolingRefactoring
|
||||
clangToolingSyntax
|
||||
clangARCMigrate
|
||||
clangCodeGen
|
||||
clangCrossTU
|
||||
clangIndex
|
||||
clangTooling
|
||||
clangFormat
|
||||
clangToolingInclusions
|
||||
clangRewriteFrontend
|
||||
clangFrontend
|
||||
clangSerialization
|
||||
clangDriver
|
||||
clangToolingCore
|
||||
clangParse
|
||||
clangRewrite
|
||||
clangSema
|
||||
clangEdit
|
||||
clangAnalysis
|
||||
clangASTMatchers
|
||||
clangAST
|
||||
clangLex
|
||||
clangBasic
|
||||
)
|
||||
|
||||
set(_CLANG_LIBRARIES)
|
||||
foreach(COMPONENT ${_CLANG_FIND_COMPONENTS})
|
||||
string(TOUPPER ${COMPONENT} UPPERCOMPONENT)
|
||||
|
||||
find_library(CLANG_${UPPERCOMPONENT}_LIBRARY
|
||||
NAMES
|
||||
${COMPONENT}
|
||||
HINTS
|
||||
${_CLANG_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
lib64 lib
|
||||
)
|
||||
list(APPEND _CLANG_LIBRARIES "${CLANG_${UPPERCOMPONENT}_LIBRARY}")
|
||||
endforeach()
|
||||
|
||||
|
||||
# Handle the QUIETLY and REQUIRED arguments and set CLANG_FOUND to TRUE if
|
||||
# all listed variables are TRUE.
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Clang DEFAULT_MSG
|
||||
_CLANG_LIBRARIES CLANG_INCLUDE_DIR)
|
||||
|
||||
if(CLANG_FOUND)
|
||||
set(CLANG_LIBRARIES ${_CLANG_LIBRARIES})
|
||||
set(CLANG_INCLUDE_DIRS ${CLANG_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
mark_as_advanced(
|
||||
CLANG_INCLUDE_DIR
|
||||
)
|
||||
|
||||
foreach(COMPONENT ${_CLANG_FIND_COMPONENTS})
|
||||
string(TOUPPER ${COMPONENT} UPPERCOMPONENT)
|
||||
mark_as_advanced(CLANG_${UPPERCOMPONENT}_LIBRARY)
|
||||
endforeach()
|
||||
|
||||
unset(_CLANG_SEARCH_DIRS)
|
||||
unset(_CLANG_FIND_COMPONENTS)
|
||||
unset(_CLANG_LIBRARIES)
|
@@ -34,8 +34,6 @@ set(_clang_tidy_SEARCH_DIRS
|
||||
# TODO(sergey): Find more reliable way of finding the latest clang-tidy.
|
||||
find_program(CLANG_TIDY_EXECUTABLE
|
||||
NAMES
|
||||
clang-tidy-12
|
||||
clang-tidy-11
|
||||
clang-tidy-10
|
||||
clang-tidy-9
|
||||
clang-tidy-8
|
||||
|
@@ -34,17 +34,12 @@ FIND_PATH(EMBREE_INCLUDE_DIR
|
||||
include
|
||||
)
|
||||
|
||||
IF(NOT (APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")))
|
||||
SET(_embree_SIMD_COMPONENTS
|
||||
embree_sse42
|
||||
embree_avx
|
||||
embree_avx2
|
||||
)
|
||||
ENDIF()
|
||||
|
||||
SET(_embree_FIND_COMPONENTS
|
||||
embree3
|
||||
${_embree_SIMD_COMPONENTS}
|
||||
embree_sse42
|
||||
embree_avx
|
||||
embree_avx2
|
||||
lexers
|
||||
math
|
||||
simd
|
||||
@@ -64,14 +59,14 @@ FOREACH(COMPONENT ${_embree_FIND_COMPONENTS})
|
||||
PATH_SUFFIXES
|
||||
lib64 lib
|
||||
)
|
||||
IF(NOT EMBREE_${UPPERCOMPONENT}_LIBRARY)
|
||||
IF(EMBREE_EMBREE3_LIBRARY)
|
||||
IF (NOT EMBREE_${UPPERCOMPONENT}_LIBRARY)
|
||||
IF (EMBREE_EMBREE3_LIBRARY)
|
||||
# If we can't find all the static libraries, try to fall back to the shared library if found.
|
||||
# This allows building with a shared embree library
|
||||
SET(_embree_LIBRARIES ${EMBREE_EMBREE3_LIBRARY})
|
||||
BREAK()
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
ENDIF ()
|
||||
ENDIF ()
|
||||
LIST(APPEND _embree_LIBRARIES "${EMBREE_${UPPERCOMPONENT}_LIBRARY}")
|
||||
ENDFOREACH()
|
||||
|
||||
|
@@ -1,66 +0,0 @@
|
||||
# - Find HARU library
|
||||
# Find the native Haru includes and library
|
||||
# This module defines
|
||||
# HARU_INCLUDE_DIRS, where to find hpdf.h, set when
|
||||
# HARU_INCLUDE_DIR is found.
|
||||
# HARU_LIBRARIES, libraries to link against to use Haru.
|
||||
# HARU_ROOT_DIR, The base directory to search for Haru.
|
||||
# This can also be an environment variable.
|
||||
# HARU_FOUND, If false, do not try to use Haru.
|
||||
#
|
||||
# also defined, but not for general use are
|
||||
# HARU_LIBRARY, where to find the Haru library.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2021 Blender Foundation.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD 3-Clause License,
|
||||
# see accompanying file BSD-3-Clause-license.txt for details.
|
||||
#=============================================================================
|
||||
|
||||
# If HARU_ROOT_DIR was defined in the environment, use it.
|
||||
if(NOT HARU_ROOT_DIR AND NOT $ENV{HARU_ROOT_DIR} STREQUAL "")
|
||||
set(HARU_ROOT_DIR $ENV{HARU_ROOT_DIR})
|
||||
endif()
|
||||
|
||||
set(_haru_SEARCH_DIRS
|
||||
${HARU_ROOT_DIR}
|
||||
/opt/lib/haru
|
||||
)
|
||||
|
||||
find_path(HARU_INCLUDE_DIR
|
||||
NAMES
|
||||
hpdf.h
|
||||
HINTS
|
||||
${_haru_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
include/haru
|
||||
include
|
||||
)
|
||||
|
||||
find_library(HARU_LIBRARY
|
||||
NAMES
|
||||
hpdfs
|
||||
hpdf
|
||||
HINTS
|
||||
${_haru_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
lib64 lib
|
||||
)
|
||||
|
||||
# Handle the QUIETLY and REQUIRED arguments and set HARU_FOUND to TRUE if
|
||||
# all listed variables are TRUE.
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Haru DEFAULT_MSG HARU_LIBRARY HARU_INCLUDE_DIR)
|
||||
|
||||
if(HARU_FOUND)
|
||||
set(HARU_LIBRARIES ${HARU_LIBRARY})
|
||||
set(HARU_INCLUDE_DIRS ${HARU_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
mark_as_advanced(
|
||||
HARU_INCLUDE_DIR
|
||||
HARU_LIBRARY
|
||||
)
|
||||
|
||||
unset(_haru_SEARCH_DIRS)
|
@@ -26,8 +26,7 @@ ENDIF()
|
||||
SET(_opencolorio_FIND_COMPONENTS
|
||||
OpenColorIO
|
||||
yaml-cpp
|
||||
expat
|
||||
pystring
|
||||
tinyxml
|
||||
)
|
||||
|
||||
SET(_opencolorio_SEARCH_DIRS
|
||||
@@ -61,23 +60,12 @@ FOREACH(COMPONENT ${_opencolorio_FIND_COMPONENTS})
|
||||
ENDIF()
|
||||
ENDFOREACH()
|
||||
|
||||
IF(EXISTS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h")
|
||||
# Search twice, because this symbol changed between OCIO 1.x and 2.x
|
||||
FILE(STRINGS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h" _opencolorio_version
|
||||
REGEX "^#define OCIO_VERSION_STR[ \t].*$")
|
||||
IF(NOT _opencolorio_version)
|
||||
file(STRINGS "${OPENCOLORIO_INCLUDE_DIR}/OpenColorIO/OpenColorABI.h" _opencolorio_version
|
||||
REGEX "^#define OCIO_VERSION[ \t].*$")
|
||||
ENDIF()
|
||||
STRING(REGEX MATCHALL "[0-9]+[.0-9]+" OPENCOLORIO_VERSION ${_opencolorio_version})
|
||||
ENDIF()
|
||||
|
||||
# handle the QUIETLY and REQUIRED arguments and set OPENCOLORIO_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
INCLUDE(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenColorIO
|
||||
REQUIRED_VARS _opencolorio_LIBRARIES OPENCOLORIO_INCLUDE_DIR
|
||||
VERSION_VAR OPENCOLORIO_VERSION)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenColorIO DEFAULT_MSG
|
||||
_opencolorio_LIBRARIES OPENCOLORIO_INCLUDE_DIR)
|
||||
|
||||
IF(OPENCOLORIO_FOUND)
|
||||
SET(OPENCOLORIO_LIBRARIES ${_opencolorio_LIBRARIES})
|
||||
@@ -90,7 +78,6 @@ MARK_AS_ADVANCED(
|
||||
OPENCOLORIO_OPENCOLORIO_LIBRARY
|
||||
OPENCOLORIO_TINYXML_LIBRARY
|
||||
OPENCOLORIO_YAML-CPP_LIBRARY
|
||||
OPENCOLORIO_VERSION
|
||||
)
|
||||
|
||||
UNSET(COMPONENT)
|
||||
|
@@ -49,7 +49,7 @@ FIND_LIBRARY(PUGIXML_LIBRARY
|
||||
# handle the QUIETLY and REQUIRED arguments and set PUGIXML_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
INCLUDE(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(PugiXML DEFAULT_MSG
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(PUGIXML DEFAULT_MSG
|
||||
PUGIXML_LIBRARY PUGIXML_INCLUDE_DIR)
|
||||
|
||||
IF(PUGIXML_FOUND)
|
||||
|
@@ -34,7 +34,7 @@ IF(NOT PYTHON_ROOT_DIR AND NOT $ENV{PYTHON_ROOT_DIR} STREQUAL "")
|
||||
SET(PYTHON_ROOT_DIR $ENV{PYTHON_ROOT_DIR})
|
||||
ENDIF()
|
||||
|
||||
SET(PYTHON_VERSION 3.9 CACHE STRING "Python Version (major and minor only)")
|
||||
SET(PYTHON_VERSION 3.7 CACHE STRING "Python Version (major and minor only)")
|
||||
MARK_AS_ADVANCED(PYTHON_VERSION)
|
||||
|
||||
|
||||
@@ -73,8 +73,8 @@ SET(_python_SEARCH_DIRS
|
||||
# only search for the dirs if we haven't already
|
||||
IF((NOT _IS_INC_DEF) OR (NOT _IS_INC_CONF_DEF) OR (NOT _IS_LIB_DEF) OR (NOT _IS_LIB_PATH_DEF))
|
||||
SET(_PYTHON_ABI_FLAGS_TEST
|
||||
"u; " # release
|
||||
"du;d" # debug
|
||||
"m;mu;u; " # release
|
||||
"dm;dmu;du;d" # debug
|
||||
)
|
||||
|
||||
FOREACH(_CURRENT_ABI_FLAGS ${_PYTHON_ABI_FLAGS_TEST})
|
||||
|
@@ -1,49 +0,0 @@
|
||||
# - Find sse2neon library
|
||||
# Find the native sse2neon includes and library
|
||||
# This module defines
|
||||
# SSE2NEON_INCLUDE_DIRS, where to find sse2neon.h, Set when
|
||||
# SSE2NEON_INCLUDE_DIR is found.
|
||||
# SSE2NEON_ROOT_DIR, The base directory to search for sse2neon.
|
||||
# This can also be an environment variable.
|
||||
# SSE2NEON_FOUND, If false, do not try to use sse2neon.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2020 Blender Foundation.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD 3-Clause License,
|
||||
# see accompanying file BSD-3-Clause-license.txt for details.
|
||||
#=============================================================================
|
||||
|
||||
# If SSE2NEON_ROOT_DIR was defined in the environment, use it.
|
||||
IF(NOT SSE2NEON_ROOT_DIR AND NOT $ENV{SSE2NEON_ROOT_DIR} STREQUAL "")
|
||||
SET(SSE2NEON_ROOT_DIR $ENV{SSE2NEON_ROOT_DIR})
|
||||
ENDIF()
|
||||
|
||||
SET(_sse2neon_SEARCH_DIRS
|
||||
${SSE2NEON_ROOT_DIR}
|
||||
)
|
||||
|
||||
FIND_PATH(SSE2NEON_INCLUDE_DIR
|
||||
NAMES
|
||||
sse2neon.h
|
||||
HINTS
|
||||
${_sse2neon_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
include
|
||||
)
|
||||
|
||||
# handle the QUIETLY and REQUIRED arguments and set SSE2NEON_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
INCLUDE(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(sse2neon DEFAULT_MSG
|
||||
SSE2NEON_INCLUDE_DIR)
|
||||
|
||||
IF(SSE2NEON_FOUND)
|
||||
SET(SSE2NEON_INCLUDE_DIRS ${SSE2NEON_INCLUDE_DIR})
|
||||
ENDIF(SSE2NEON_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
SSE2NEON_INCLUDE_DIR
|
||||
)
|
||||
|
||||
UNSET(_sse2neon_SEARCH_DIRS)
|
@@ -272,7 +272,7 @@ cmake_policy(SET CMP0057 NEW) # if IN_LIST
|
||||
#------------------------------------------------------------------------------
|
||||
function(gtest_add_tests)
|
||||
|
||||
if(ARGC LESS 1)
|
||||
if (ARGC LESS 1)
|
||||
message(FATAL_ERROR "No arguments supplied to gtest_add_tests()")
|
||||
endif()
|
||||
|
||||
@@ -298,7 +298,7 @@ function(gtest_add_tests)
|
||||
set(autoAddSources YES)
|
||||
else()
|
||||
# Non-keyword syntax, convert to keyword form
|
||||
if(ARGC LESS 3)
|
||||
if (ARGC LESS 3)
|
||||
message(FATAL_ERROR "gtest_add_tests() without keyword options requires at least 3 arguments")
|
||||
endif()
|
||||
set(ARGS_TARGET "${ARGV0}")
|
||||
@@ -330,9 +330,6 @@ function(gtest_add_tests)
|
||||
set(gtest_case_name_regex ".*\\( *([A-Za-z_0-9]+) *, *([A-Za-z_0-9]+) *\\).*")
|
||||
set(gtest_test_type_regex "(TYPED_TEST|TEST_?[FP]?)")
|
||||
|
||||
# This will get a filter for each test suite.
|
||||
set(test_filters "")
|
||||
|
||||
foreach(source IN LISTS ARGS_SOURCES)
|
||||
if(NOT ARGS_SKIP_DEPENDENCY)
|
||||
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS ${source})
|
||||
@@ -379,32 +376,175 @@ function(gtest_add_tests)
|
||||
list(APPEND testList ${ctest_test_name})
|
||||
endif()
|
||||
else()
|
||||
# BLENDER: collect tests named "suite.testcase" as list of "suite.*" filters.
|
||||
string(REGEX REPLACE "\\..*$" "" gtest_suite_name ${gtest_test_name})
|
||||
list(APPEND test_filters "${gtest_suite_name}.*")
|
||||
set(ctest_test_name ${ARGS_TEST_PREFIX}${gtest_test_name}${ARGS_TEST_SUFFIX})
|
||||
add_test(NAME ${ctest_test_name}
|
||||
${workDir}
|
||||
COMMAND ${ARGS_TARGET}
|
||||
--gtest_filter=${gtest_test_name}
|
||||
${ARGS_EXTRA_ARGS}
|
||||
)
|
||||
list(APPEND testList ${ctest_test_name})
|
||||
endif()
|
||||
endforeach()
|
||||
endforeach()
|
||||
|
||||
# Join all found GTest suite names into one big filter.
|
||||
list(REMOVE_DUPLICATES test_filters)
|
||||
list(JOIN test_filters ":" gtest_filter)
|
||||
add_test(NAME ${ARGS_TEST_PREFIX}
|
||||
${workDir}
|
||||
COMMAND ${ARGS_TARGET}
|
||||
--gtest_filter=${gtest_filter}
|
||||
${ARGS_EXTRA_ARGS}
|
||||
)
|
||||
list(APPEND testList ${ARGS_TEST_PREFIX})
|
||||
|
||||
if(ARGS_TEST_LIST)
|
||||
set(${ARGS_TEST_LIST} ${testList} PARENT_SCOPE)
|
||||
endif()
|
||||
|
||||
endfunction()
|
||||
|
||||
# BLENDER: remove the discovery function gtest_discover_tests(). It's not used,
|
||||
# as it generates too many test invocations.
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
function(gtest_discover_tests TARGET)
|
||||
cmake_parse_arguments(
|
||||
""
|
||||
"NO_PRETTY_TYPES;NO_PRETTY_VALUES"
|
||||
"TEST_PREFIX;TEST_SUFFIX;WORKING_DIRECTORY;TEST_LIST;DISCOVERY_TIMEOUT;XML_OUTPUT_DIR;DISCOVERY_MODE"
|
||||
"EXTRA_ARGS;PROPERTIES"
|
||||
${ARGN}
|
||||
)
|
||||
|
||||
if(NOT _WORKING_DIRECTORY)
|
||||
set(_WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
endif()
|
||||
if(NOT _TEST_LIST)
|
||||
set(_TEST_LIST ${TARGET}_TESTS)
|
||||
endif()
|
||||
if(NOT _DISCOVERY_TIMEOUT)
|
||||
set(_DISCOVERY_TIMEOUT 5)
|
||||
endif()
|
||||
if(NOT _DISCOVERY_MODE)
|
||||
if(NOT CMAKE_GTEST_DISCOVER_TESTS_DISCOVERY_MODE)
|
||||
set(CMAKE_GTEST_DISCOVER_TESTS_DISCOVERY_MODE "POST_BUILD")
|
||||
endif()
|
||||
set(_DISCOVERY_MODE ${CMAKE_GTEST_DISCOVER_TESTS_DISCOVERY_MODE})
|
||||
endif()
|
||||
|
||||
get_property(
|
||||
has_counter
|
||||
TARGET ${TARGET}
|
||||
PROPERTY CTEST_DISCOVERED_TEST_COUNTER
|
||||
SET
|
||||
)
|
||||
if(has_counter)
|
||||
get_property(
|
||||
counter
|
||||
TARGET ${TARGET}
|
||||
PROPERTY CTEST_DISCOVERED_TEST_COUNTER
|
||||
)
|
||||
math(EXPR counter "${counter} + 1")
|
||||
else()
|
||||
set(counter 1)
|
||||
endif()
|
||||
set_property(
|
||||
TARGET ${TARGET}
|
||||
PROPERTY CTEST_DISCOVERED_TEST_COUNTER
|
||||
${counter}
|
||||
)
|
||||
|
||||
# Define rule to generate test list for aforementioned test executable
|
||||
# Blender: use _ instead of [] to avoid problems with zsh regex.
|
||||
set(ctest_file_base "${CMAKE_CURRENT_BINARY_DIR}/${TARGET}_${counter}_")
|
||||
set(ctest_include_file "${ctest_file_base}_include.cmake")
|
||||
set(ctest_tests_file "${ctest_file_base}_tests.cmake")
|
||||
get_property(crosscompiling_emulator
|
||||
TARGET ${TARGET}
|
||||
PROPERTY CROSSCOMPILING_EMULATOR
|
||||
)
|
||||
|
||||
if(_DISCOVERY_MODE STREQUAL "POST_BUILD")
|
||||
add_custom_command(
|
||||
TARGET ${TARGET} POST_BUILD
|
||||
BYPRODUCTS "${ctest_tests_file}"
|
||||
COMMAND "${CMAKE_COMMAND}"
|
||||
-D "TEST_TARGET=${TARGET}"
|
||||
-D "TEST_EXECUTABLE=$<TARGET_FILE:${TARGET}>"
|
||||
-D "TEST_EXECUTOR=${crosscompiling_emulator}"
|
||||
-D "TEST_WORKING_DIR=${_WORKING_DIRECTORY}"
|
||||
-D "TEST_EXTRA_ARGS=${_EXTRA_ARGS}"
|
||||
-D "TEST_PROPERTIES=${_PROPERTIES}"
|
||||
-D "TEST_PREFIX=${_TEST_PREFIX}"
|
||||
-D "TEST_SUFFIX=${_TEST_SUFFIX}"
|
||||
-D "NO_PRETTY_TYPES=${_NO_PRETTY_TYPES}"
|
||||
-D "NO_PRETTY_VALUES=${_NO_PRETTY_VALUES}"
|
||||
-D "TEST_LIST=${_TEST_LIST}"
|
||||
-D "CTEST_FILE=${ctest_tests_file}"
|
||||
-D "TEST_DISCOVERY_TIMEOUT=${_DISCOVERY_TIMEOUT}"
|
||||
-D "TEST_XML_OUTPUT_DIR=${_XML_OUTPUT_DIR}"
|
||||
-P "${_GOOGLETEST_DISCOVER_TESTS_SCRIPT}"
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
file(WRITE "${ctest_include_file}"
|
||||
"if(EXISTS \"${ctest_tests_file}\")\n"
|
||||
" include(\"${ctest_tests_file}\")\n"
|
||||
"else()\n"
|
||||
" add_test(${TARGET}_NOT_BUILT ${TARGET}_NOT_BUILT)\n"
|
||||
"endif()\n"
|
||||
)
|
||||
elseif(_DISCOVERY_MODE STREQUAL "PRE_TEST")
|
||||
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL
|
||||
PROPERTY GENERATOR_IS_MULTI_CONFIG
|
||||
)
|
||||
|
||||
if(GENERATOR_IS_MULTI_CONFIG)
|
||||
set(ctest_tests_file "${ctest_file_base}_tests-$<CONFIG>.cmake")
|
||||
endif()
|
||||
|
||||
string(CONCAT ctest_include_content
|
||||
"if(EXISTS \"$<TARGET_FILE:${TARGET}>\")" "\n"
|
||||
" if(\"$<TARGET_FILE:${TARGET}>\" IS_NEWER_THAN \"${ctest_tests_file}\")" "\n"
|
||||
" include(\"${_GOOGLETEST_DISCOVER_TESTS_SCRIPT}\")" "\n"
|
||||
" gtest_discover_tests_impl(" "\n"
|
||||
" TEST_EXECUTABLE" " [==[" "$<TARGET_FILE:${TARGET}>" "]==]" "\n"
|
||||
" TEST_EXECUTOR" " [==[" "${crosscompiling_emulator}" "]==]" "\n"
|
||||
" TEST_WORKING_DIR" " [==[" "${_WORKING_DIRECTORY}" "]==]" "\n"
|
||||
" TEST_EXTRA_ARGS" " [==[" "${_EXTRA_ARGS}" "]==]" "\n"
|
||||
" TEST_PROPERTIES" " [==[" "${_PROPERTIES}" "]==]" "\n"
|
||||
" TEST_PREFIX" " [==[" "${_TEST_PREFIX}" "]==]" "\n"
|
||||
" TEST_SUFFIX" " [==[" "${_TEST_SUFFIX}" "]==]" "\n"
|
||||
" NO_PRETTY_TYPES" " [==[" "${_NO_PRETTY_TYPES}" "]==]" "\n"
|
||||
" NO_PRETTY_VALUES" " [==[" "${_NO_PRETTY_VALUES}" "]==]" "\n"
|
||||
" TEST_LIST" " [==[" "${_TEST_LIST}" "]==]" "\n"
|
||||
" CTEST_FILE" " [==[" "${ctest_tests_file}" "]==]" "\n"
|
||||
" TEST_DISCOVERY_TIMEOUT" " [==[" "${_DISCOVERY_TIMEOUT}" "]==]" "\n"
|
||||
" TEST_XML_OUTPUT_DIR" " [==[" "${_XML_OUTPUT_DIR}" "]==]" "\n"
|
||||
" )" "\n"
|
||||
" endif()" "\n"
|
||||
" include(\"${ctest_tests_file}\")" "\n"
|
||||
"else()" "\n"
|
||||
" add_test(${TARGET}_NOT_BUILT ${TARGET}_NOT_BUILT)" "\n"
|
||||
"endif()" "\n"
|
||||
)
|
||||
|
||||
if(GENERATOR_IS_MULTI_CONFIG)
|
||||
foreach(_config ${CMAKE_CONFIGURATION_TYPES})
|
||||
file(GENERATE OUTPUT "${ctest_file_base}_include-${_config}.cmake" CONTENT "${ctest_include_content}" CONDITION $<CONFIG:${_config}>)
|
||||
endforeach()
|
||||
file(WRITE "${ctest_include_file}" "include(\"${ctest_file_base}_include-\${CTEST_CONFIGURATION_TYPE}.cmake\")")
|
||||
else()
|
||||
file(GENERATE OUTPUT "${ctest_file_base}_include.cmake" CONTENT "${ctest_include_content}")
|
||||
file(WRITE "${ctest_include_file}" "include(\"${ctest_file_base}_include.cmake\")")
|
||||
endif()
|
||||
|
||||
else()
|
||||
message(FATAL_ERROR "Unknown DISCOVERY_MODE: ${_DISCOVERY_MODE}")
|
||||
endif()
|
||||
|
||||
# Add discovered tests to directory TEST_INCLUDE_FILES
|
||||
set_property(DIRECTORY
|
||||
APPEND PROPERTY TEST_INCLUDE_FILES "${ctest_include_file}"
|
||||
)
|
||||
|
||||
endfunction()
|
||||
|
||||
###############################################################################
|
||||
|
||||
set(_GOOGLETEST_DISCOVER_TESTS_SCRIPT
|
||||
${CMAKE_CURRENT_LIST_DIR}/GTestAddTests.cmake
|
||||
)
|
||||
|
||||
# Restore project's policies
|
||||
cmake_policy(POP)
|
||||
|
194
build_files/cmake/Modules/GTestAddTests.cmake
Normal file
194
build_files/cmake/Modules/GTestAddTests.cmake
Normal file
@@ -0,0 +1,194 @@
|
||||
# Distributed under the OSI-approved BSD 3-Clause License,
|
||||
# see accompanying file BSD-3-Clause-license.txt for details.
|
||||
|
||||
# Changes made to this script have been marked with "BLENDER".
|
||||
|
||||
|
||||
# BLENDER: disable ASAN leak detection when trying to discover tests.
|
||||
set(ENV{ASAN_OPTIONS} "detect_leaks=0")
|
||||
|
||||
cmake_minimum_required(VERSION ${CMAKE_VERSION})
|
||||
|
||||
# Overwrite possibly existing ${_CTEST_FILE} with empty file
|
||||
set(flush_tests_MODE WRITE)
|
||||
|
||||
# Flushes script to ${_CTEST_FILE}
|
||||
macro(flush_script)
|
||||
file(${flush_tests_MODE} "${_CTEST_FILE}" "${script}")
|
||||
set(flush_tests_MODE APPEND)
|
||||
|
||||
set(script "")
|
||||
endmacro()
|
||||
|
||||
# Flushes tests_buffer to tests
|
||||
macro(flush_tests_buffer)
|
||||
list(APPEND tests "${tests_buffer}")
|
||||
set(tests_buffer "")
|
||||
endmacro()
|
||||
|
||||
macro(add_command NAME)
|
||||
set(_args "")
|
||||
foreach(_arg ${ARGN})
|
||||
if(_arg MATCHES "[^-./:a-zA-Z0-9_]")
|
||||
string(APPEND _args " [==[${_arg}]==]")
|
||||
else()
|
||||
string(APPEND _args " ${_arg}")
|
||||
endif()
|
||||
endforeach()
|
||||
string(APPEND script "${NAME}(${_args})\n")
|
||||
string(LENGTH "${script}" _script_len)
|
||||
if(${_script_len} GREATER "50000")
|
||||
flush_script()
|
||||
endif()
|
||||
# Unsets macro local variables to prevent leakage outside of this macro.
|
||||
unset(_args)
|
||||
unset(_script_len)
|
||||
endmacro()
|
||||
|
||||
function(gtest_discover_tests_impl)
|
||||
|
||||
cmake_parse_arguments(
|
||||
""
|
||||
""
|
||||
"NO_PRETTY_TYPES;NO_PRETTY_VALUES;TEST_EXECUTABLE;TEST_EXECUTOR;TEST_WORKING_DIR;TEST_PREFIX;TEST_SUFFIX;TEST_LIST;CTEST_FILE;TEST_DISCOVERY_TIMEOUT;TEST_XML_OUTPUT_DIR"
|
||||
"TEST_EXTRA_ARGS;TEST_PROPERTIES"
|
||||
${ARGN}
|
||||
)
|
||||
|
||||
set(prefix "${_TEST_PREFIX}")
|
||||
set(suffix "${_TEST_SUFFIX}")
|
||||
set(extra_args ${_TEST_EXTRA_ARGS})
|
||||
set(properties ${_TEST_PROPERTIES})
|
||||
set(script)
|
||||
set(suite)
|
||||
set(tests)
|
||||
set(tests_buffer)
|
||||
|
||||
# Run test executable to get list of available tests
|
||||
if(NOT EXISTS "${_TEST_EXECUTABLE}")
|
||||
message(FATAL_ERROR
|
||||
"Specified test executable does not exist.\n"
|
||||
" Path: '${_TEST_EXECUTABLE}'"
|
||||
)
|
||||
endif()
|
||||
execute_process(
|
||||
COMMAND ${_TEST_EXECUTOR} "${_TEST_EXECUTABLE}" --gtest_list_tests
|
||||
WORKING_DIRECTORY "${_TEST_WORKING_DIR}"
|
||||
TIMEOUT ${_TEST_DISCOVERY_TIMEOUT}
|
||||
OUTPUT_VARIABLE output
|
||||
RESULT_VARIABLE result
|
||||
)
|
||||
if(NOT ${result} EQUAL 0)
|
||||
string(REPLACE "\n" "\n " output "${output}")
|
||||
message(FATAL_ERROR
|
||||
"Error running test executable.\n"
|
||||
" Path: '${_TEST_EXECUTABLE}'\n"
|
||||
" Result: ${result}\n"
|
||||
" Output:\n"
|
||||
" ${output}\n"
|
||||
)
|
||||
endif()
|
||||
|
||||
# Preserve semicolon in test-parameters
|
||||
string(REPLACE [[;]] [[\;]] output "${output}")
|
||||
string(REPLACE "\n" ";" output "${output}")
|
||||
|
||||
# Parse output
|
||||
foreach(line ${output})
|
||||
# Skip header
|
||||
if(NOT line MATCHES "gtest_main\\.cc")
|
||||
# Do we have a module name or a test name?
|
||||
if(NOT line MATCHES "^ ")
|
||||
# Module; remove trailing '.' to get just the name...
|
||||
string(REGEX REPLACE "\\.( *#.*)?" "" suite "${line}")
|
||||
if(line MATCHES "#" AND NOT _NO_PRETTY_TYPES)
|
||||
string(REGEX REPLACE "/[0-9]\\.+ +#.*= +" "/" pretty_suite "${line}")
|
||||
else()
|
||||
set(pretty_suite "${suite}")
|
||||
endif()
|
||||
string(REGEX REPLACE "^DISABLED_" "" pretty_suite "${pretty_suite}")
|
||||
else()
|
||||
# Test name; strip spaces and comments to get just the name...
|
||||
string(REGEX REPLACE " +" "" test "${line}")
|
||||
if(test MATCHES "#" AND NOT _NO_PRETTY_VALUES)
|
||||
string(REGEX REPLACE "/[0-9]+#GetParam..=" "/" pretty_test "${test}")
|
||||
else()
|
||||
string(REGEX REPLACE "#.*" "" pretty_test "${test}")
|
||||
endif()
|
||||
string(REGEX REPLACE "^DISABLED_" "" pretty_test "${pretty_test}")
|
||||
string(REGEX REPLACE "#.*" "" test "${test}")
|
||||
if(NOT "${_TEST_XML_OUTPUT_DIR}" STREQUAL "")
|
||||
set(TEST_XML_OUTPUT_PARAM "--gtest_output=xml:${_TEST_XML_OUTPUT_DIR}/${prefix}${suite}.${test}${suffix}.xml")
|
||||
else()
|
||||
unset(TEST_XML_OUTPUT_PARAM)
|
||||
endif()
|
||||
|
||||
# sanitize test name for further processing downstream
|
||||
set(testname "${prefix}${pretty_suite}.${pretty_test}${suffix}")
|
||||
# escape \
|
||||
string(REPLACE [[\]] [[\\]] testname "${testname}")
|
||||
# escape ;
|
||||
string(REPLACE [[;]] [[\;]] testname "${testname}")
|
||||
# escape $
|
||||
string(REPLACE [[$]] [[\$]] testname "${testname}")
|
||||
|
||||
# ...and add to script
|
||||
add_command(add_test
|
||||
"${testname}"
|
||||
${_TEST_EXECUTOR}
|
||||
"${_TEST_EXECUTABLE}"
|
||||
"--gtest_filter=${suite}.${test}"
|
||||
"--gtest_also_run_disabled_tests"
|
||||
${TEST_XML_OUTPUT_PARAM}
|
||||
${extra_args}
|
||||
)
|
||||
if(suite MATCHES "^DISABLED" OR test MATCHES "^DISABLED")
|
||||
add_command(set_tests_properties
|
||||
"${testname}"
|
||||
PROPERTIES DISABLED TRUE
|
||||
)
|
||||
endif()
|
||||
add_command(set_tests_properties
|
||||
"${testname}"
|
||||
PROPERTIES
|
||||
WORKING_DIRECTORY "${_TEST_WORKING_DIR}"
|
||||
SKIP_REGULAR_EXPRESSION "\\\\[ SKIPPED \\\\]"
|
||||
${properties}
|
||||
)
|
||||
list(APPEND tests_buffer "${testname}")
|
||||
list(LENGTH tests_buffer tests_buffer_length)
|
||||
if(${tests_buffer_length} GREATER "250")
|
||||
flush_tests_buffer()
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
|
||||
# Create a list of all discovered tests, which users may use to e.g. set
|
||||
# properties on the tests
|
||||
flush_tests_buffer()
|
||||
add_command(set ${_TEST_LIST} ${tests})
|
||||
|
||||
# Write CTest script
|
||||
flush_script()
|
||||
|
||||
endfunction()
|
||||
|
||||
if(CMAKE_SCRIPT_MODE_FILE)
|
||||
gtest_discover_tests_impl(
|
||||
NO_PRETTY_TYPES ${NO_PRETTY_TYPES}
|
||||
NO_PRETTY_VALUES ${NO_PRETTY_VALUES}
|
||||
TEST_EXECUTABLE ${TEST_EXECUTABLE}
|
||||
TEST_EXECUTOR ${TEST_EXECUTOR}
|
||||
TEST_WORKING_DIR ${TEST_WORKING_DIR}
|
||||
TEST_PREFIX ${TEST_PREFIX}
|
||||
TEST_SUFFIX ${TEST_SUFFIX}
|
||||
TEST_LIST ${TEST_LIST}
|
||||
CTEST_FILE ${CTEST_FILE}
|
||||
TEST_DISCOVERY_TIMEOUT ${TEST_DISCOVERY_TIMEOUT}
|
||||
TEST_XML_OUTPUT_DIR ${TEST_XML_OUTPUT_DIR}
|
||||
TEST_EXTRA_ARGS ${TEST_EXTRA_ARGS}
|
||||
TEST_PROPERTIES ${TEST_PROPERTIES}
|
||||
)
|
||||
endif()
|
@@ -8,17 +8,6 @@
|
||||
#
|
||||
#=============================================================================
|
||||
|
||||
function(GET_BLENDER_TEST_INSTALL_DIR VARIABLE_NAME)
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
if(GENERATOR_IS_MULTI_CONFIG)
|
||||
string(REPLACE "\${BUILD_TYPE}" "$<CONFIG>" TEST_INSTALL_DIR ${CMAKE_INSTALL_PREFIX})
|
||||
else()
|
||||
string(REPLACE "\${BUILD_TYPE}" "" TEST_INSTALL_DIR ${CMAKE_INSTALL_PREFIX})
|
||||
endif()
|
||||
set(${VARIABLE_NAME} "${TEST_INSTALL_DIR}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
|
||||
macro(BLENDER_SRC_GTEST_EX)
|
||||
if(WITH_GTESTS)
|
||||
set(options SKIP_ADD_TEST)
|
||||
@@ -86,7 +75,13 @@ macro(BLENDER_SRC_GTEST_EX)
|
||||
target_link_libraries(${TARGET_NAME} ${GMP_LIBRARIES})
|
||||
endif()
|
||||
|
||||
GET_BLENDER_TEST_INSTALL_DIR(TEST_INSTALL_DIR)
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
if(GENERATOR_IS_MULTI_CONFIG)
|
||||
string(REPLACE "\${BUILD_TYPE}" "$<CONFIG>" TEST_INSTALL_DIR ${CMAKE_INSTALL_PREFIX})
|
||||
else()
|
||||
string(REPLACE "\${BUILD_TYPE}" "" TEST_INSTALL_DIR ${CMAKE_INSTALL_PREFIX})
|
||||
endif()
|
||||
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES
|
||||
RUNTIME_OUTPUT_DIRECTORY "${TESTS_OUTPUT_DIR}"
|
||||
RUNTIME_OUTPUT_DIRECTORY_RELEASE "${TESTS_OUTPUT_DIR}"
|
||||
@@ -99,9 +94,7 @@ macro(BLENDER_SRC_GTEST_EX)
|
||||
|
||||
# Don't fail tests on leaks since these often happen in external libraries
|
||||
# that we can't fix.
|
||||
set_tests_properties(${TARGET_NAME} PROPERTIES
|
||||
ENVIRONMENT LSAN_OPTIONS=exitcode=0:$ENV{LSAN_OPTIONS}
|
||||
)
|
||||
set_tests_properties(${TARGET_NAME} PROPERTIES ENVIRONMENT LSAN_OPTIONS=exitcode=0)
|
||||
endif()
|
||||
if(WIN32)
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES VS_GLOBAL_VcpkgEnabled "false")
|
||||
|
@@ -13,7 +13,7 @@ Invocation:
|
||||
export CLANG_BIND_DIR="/dsk/src/llvm/tools/clang/bindings/python"
|
||||
export CLANG_LIB_DIR="/opt/llvm/lib"
|
||||
|
||||
python clang_array_check.py somefile.c -DSOME_DEFINE -I/some/include
|
||||
python2 clang_array_check.py somefile.c -DSOME_DEFINE -I/some/include
|
||||
|
||||
... defines and includes are optional
|
||||
|
||||
@@ -76,32 +76,6 @@ defs_precalc = {
|
||||
"glNormal3bv": {0: 3},
|
||||
"glNormal3iv": {0: 3},
|
||||
"glNormal3sv": {0: 3},
|
||||
|
||||
# GPU immediate mode.
|
||||
"immVertex2iv": {1: 2},
|
||||
|
||||
"immVertex2fv": {1: 2},
|
||||
"immVertex3fv": {1: 3},
|
||||
|
||||
"immAttr2fv": {1: 2},
|
||||
"immAttr3fv": {1: 3},
|
||||
"immAttr4fv": {1: 4},
|
||||
|
||||
"immAttr3ubv": {1: 3},
|
||||
"immAttr4ubv": {1: 4},
|
||||
|
||||
"immUniform2fv": {1: 2},
|
||||
"immUniform3fv": {1: 3},
|
||||
"immUniform4fv": {1: 4},
|
||||
|
||||
"immUniformColor3fv": {0: 3},
|
||||
"immUniformColor4fv": {0: 4},
|
||||
|
||||
"immUniformColor3ubv": {1: 3},
|
||||
"immUniformColor4ubv": {1: 4},
|
||||
|
||||
"immUniformColor3fvAlpha": {0: 3},
|
||||
"immUniformColor4fvAlpha": {0: 4},
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
@@ -126,8 +100,7 @@ else:
|
||||
if CLANG_LIB_DIR is None:
|
||||
print("$CLANG_LIB_DIR clang lib dir not set")
|
||||
|
||||
if CLANG_BIND_DIR:
|
||||
sys.path.append(CLANG_BIND_DIR)
|
||||
sys.path.append(CLANG_BIND_DIR)
|
||||
|
||||
import clang
|
||||
import clang.cindex
|
||||
@@ -135,8 +108,7 @@ from clang.cindex import (CursorKind,
|
||||
TypeKind,
|
||||
TokenKind)
|
||||
|
||||
if CLANG_LIB_DIR:
|
||||
clang.cindex.Config.set_library_path(CLANG_LIB_DIR)
|
||||
clang.cindex.Config.set_library_path(CLANG_LIB_DIR)
|
||||
|
||||
index = clang.cindex.Index.create()
|
||||
|
||||
|
@@ -20,8 +20,6 @@
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Note: this code should be cleaned up / refactored.
|
||||
|
||||
import sys
|
||||
if sys.version_info.major < 3:
|
||||
print("\nPython3.x needed, found %s.\nAborting!\n" %
|
||||
@@ -39,23 +37,12 @@ from cmake_consistency_check_config import (
|
||||
|
||||
|
||||
import os
|
||||
from os.path import (
|
||||
dirname,
|
||||
join,
|
||||
normpath,
|
||||
splitext,
|
||||
)
|
||||
from os.path import join, dirname, normpath, splitext
|
||||
|
||||
global_h = set()
|
||||
global_c = set()
|
||||
global_refs = {}
|
||||
|
||||
# Flatten `IGNORE_SOURCE_MISSING` to avoid nested looping.
|
||||
IGNORE_SOURCE_MISSING = [
|
||||
(k, ignore_path) for k, ig_list in IGNORE_SOURCE_MISSING
|
||||
for ignore_path in ig_list
|
||||
]
|
||||
|
||||
# Ignore cmake file, path pairs.
|
||||
global_ignore_source_missing = {}
|
||||
for k, v in IGNORE_SOURCE_MISSING:
|
||||
@@ -191,8 +178,6 @@ def cmake_get_src(f):
|
||||
|
||||
if not l:
|
||||
pass
|
||||
elif l in local_ignore_source_missing:
|
||||
local_ignore_source_missing.remove(l)
|
||||
elif l.startswith("$"):
|
||||
if context_name == "SRC":
|
||||
# assume if it ends with context_name we know about it
|
||||
@@ -242,7 +227,10 @@ def cmake_get_src(f):
|
||||
# replace_line(f, i - 1, new_path_rel)
|
||||
|
||||
else:
|
||||
raise Exception("non existent include %s:%d -> %s" % (f, i, new_file))
|
||||
if l in local_ignore_source_missing:
|
||||
local_ignore_source_missing.remove(l)
|
||||
else:
|
||||
raise Exception("non existent include %s:%d -> %s" % (f, i, new_file))
|
||||
|
||||
# print(new_file)
|
||||
|
||||
@@ -270,16 +258,16 @@ def cmake_get_src(f):
|
||||
|
||||
|
||||
def is_ignore_source(f, ignore_used):
|
||||
for index, ignore_path in enumerate(IGNORE_SOURCE):
|
||||
if ignore_path in f:
|
||||
for index, ig in enumerate(IGNORE_SOURCE):
|
||||
if ig in f:
|
||||
ignore_used[index] = True
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_ignore_cmake(f, ignore_used):
|
||||
for index, ignore_path in enumerate(IGNORE_CMAKE):
|
||||
if ignore_path in f:
|
||||
for index, ig in enumerate(IGNORE_CMAKE):
|
||||
if ig in f:
|
||||
ignore_used[index] = True
|
||||
return True
|
||||
return False
|
||||
@@ -310,7 +298,7 @@ def main():
|
||||
for cf, i in refs:
|
||||
errs.append((cf, i))
|
||||
else:
|
||||
raise Exception("CMake references missing, internal error, aborting!")
|
||||
raise Exception("CMake referenecs missing, internal error, aborting!")
|
||||
is_err = True
|
||||
|
||||
errs.sort()
|
||||
@@ -321,7 +309,7 @@ def main():
|
||||
# print("sed '%dd' '%s' > '%s.tmp' ; mv '%s.tmp' '%s'" % (i, cf, cf, cf, cf))
|
||||
|
||||
if is_err:
|
||||
raise Exception("CMake references missing files, aborting!")
|
||||
raise Exception("CMake referenecs missing files, aborting!")
|
||||
del is_err
|
||||
del errs
|
||||
|
||||
@@ -332,7 +320,7 @@ def main():
|
||||
if cf not in global_c:
|
||||
print("missing_c: ", cf)
|
||||
|
||||
# Check if automake builds a corresponding .o file.
|
||||
# check if automake builds a corrasponding .o file.
|
||||
'''
|
||||
if cf in global_c:
|
||||
out1 = os.path.splitext(cf)[0] + ".o"
|
||||
@@ -368,21 +356,21 @@ def main():
|
||||
|
||||
# Check ignores aren't stale
|
||||
print("\nCheck for unused 'IGNORE_SOURCE' paths...")
|
||||
for index, ignore_path in enumerate(IGNORE_SOURCE):
|
||||
for index, ig in enumerate(IGNORE_SOURCE):
|
||||
if not ignore_used_source[index]:
|
||||
print("unused ignore: %r" % ignore_path)
|
||||
print("unused ignore: %r" % ig)
|
||||
|
||||
# Check ignores aren't stale
|
||||
print("\nCheck for unused 'IGNORE_SOURCE_MISSING' paths...")
|
||||
for k, v in sorted(global_ignore_source_missing.items()):
|
||||
for ignore_path in v:
|
||||
print("unused ignore: %r -> %r" % (ignore_path, k))
|
||||
for ig in v:
|
||||
print("unused ignore: %r -> %r" % (ig, k))
|
||||
|
||||
# Check ignores aren't stale
|
||||
print("\nCheck for unused 'IGNORE_CMAKE' paths...")
|
||||
for index, ignore_path in enumerate(IGNORE_CMAKE):
|
||||
for index, ig in enumerate(IGNORE_CMAKE):
|
||||
if not ignore_used_cmake[index]:
|
||||
print("unused ignore: %r" % ignore_path)
|
||||
print("unused ignore: %r" % ig)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -34,18 +34,8 @@ IGNORE_SOURCE = (
|
||||
|
||||
# Ignore cmake file, path pairs.
|
||||
IGNORE_SOURCE_MISSING = (
|
||||
( # Use for cycles stand-alone.
|
||||
"intern/cycles/util/CMakeLists.txt", (
|
||||
"../../third_party/numaapi/include",
|
||||
)),
|
||||
( # Use for `WITH_NANOVDB`.
|
||||
"intern/cycles/kernel/CMakeLists.txt", (
|
||||
"nanovdb/util/CSampleFromVoxels.h",
|
||||
"nanovdb/util/SampleFromVoxels.h",
|
||||
"nanovdb/NanoVDB.h",
|
||||
"nanovdb/CNanoVDB.h",
|
||||
),
|
||||
),
|
||||
# Use for cycles stand-alone.
|
||||
("intern/cycles/util/CMakeLists.txt", "../../third_party/numaapi/include"),
|
||||
)
|
||||
|
||||
IGNORE_CMAKE = (
|
||||
|
@@ -32,7 +32,7 @@ CHECKER_IGNORE_PREFIX = [
|
||||
"intern/moto",
|
||||
]
|
||||
|
||||
CHECKER_BIN = "python3"
|
||||
CHECKER_BIN = "python2"
|
||||
|
||||
CHECKER_ARGS = [
|
||||
os.path.join(os.path.dirname(__file__), "clang_array_check.py"),
|
||||
|
@@ -19,7 +19,6 @@ set(WITH_DRACO ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FFTW3 ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FREESTYLE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_HARU ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_ITASC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_SOLVER ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_CINEON ON CACHE BOOL "" FORCE)
|
||||
@@ -45,8 +44,6 @@ set(WITH_OPENMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB_BLOSC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_PUGIXML ON CACHE BOOL "" FORCE)
|
||||
set(WITH_NANOVDB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_PYTHON_INSTALL ON CACHE BOOL "" FORCE)
|
||||
|
@@ -24,7 +24,6 @@ set(WITH_DRACO OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_FFTW3 OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_FREESTYLE OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_HARU OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_ITASC OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_SOLVER OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_CINEON OFF CACHE BOOL "" FORCE)
|
||||
@@ -52,8 +51,6 @@ set(WITH_OPENIMAGEIO OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENMP OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENSUBDIV OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_PUGIXML OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_NANOVDB OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_QUADRIFLOW OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_SDL OFF CACHE BOOL "" FORCE)
|
||||
|
@@ -20,7 +20,6 @@ set(WITH_DRACO ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FFTW3 ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FREESTYLE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_HARU ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_SOLVER ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_ITASC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_CINEON ON CACHE BOOL "" FORCE)
|
||||
@@ -46,8 +45,6 @@ set(WITH_OPENMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB_BLOSC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_PUGIXML ON CACHE BOOL "" FORCE)
|
||||
set(WITH_NANOVDB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_PYTHON_INSTALL ON CACHE BOOL "" FORCE)
|
||||
|
@@ -388,43 +388,6 @@ function(blender_add_lib
|
||||
set_property(GLOBAL APPEND PROPERTY BLENDER_LINK_LIBS ${name})
|
||||
endfunction()
|
||||
|
||||
function(blender_add_test_suite)
|
||||
if(ARGC LESS 1)
|
||||
message(FATAL_ERROR "No arguments supplied to blender_add_test_suite()")
|
||||
endif()
|
||||
|
||||
# Parse the arguments
|
||||
set(oneValueArgs TARGET SUITE_NAME)
|
||||
set(multiValueArgs SOURCES)
|
||||
cmake_parse_arguments(ARGS "" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
# Figure out the release dir, as some tests need files from there.
|
||||
GET_BLENDER_TEST_INSTALL_DIR(TEST_INSTALL_DIR)
|
||||
if(APPLE)
|
||||
set(_test_release_dir ${TEST_INSTALL_DIR}/Blender.app/Contents/Resources/${BLENDER_VERSION})
|
||||
else()
|
||||
if(WIN32 OR WITH_INSTALL_PORTABLE)
|
||||
set(_test_release_dir ${TEST_INSTALL_DIR}/${BLENDER_VERSION})
|
||||
else()
|
||||
set(_test_release_dir ${TEST_INSTALL_DIR}/share/blender/${BLENDER_VERSION})
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Define a test case with our custom gtest_add_tests() command.
|
||||
include(GTest)
|
||||
gtest_add_tests(
|
||||
TARGET ${ARGS_TARGET}
|
||||
SOURCES "${ARGS_SOURCES}"
|
||||
TEST_PREFIX ${ARGS_SUITE_NAME}
|
||||
WORKING_DIRECTORY "${TEST_INSTALL_DIR}"
|
||||
EXTRA_ARGS
|
||||
--test-assets-dir "${CMAKE_SOURCE_DIR}/../lib/tests"
|
||||
--test-release-dir "${_test_release_dir}"
|
||||
)
|
||||
|
||||
unset(_test_release_dir)
|
||||
endfunction()
|
||||
|
||||
# Add tests for a Blender library, to be called in tandem with blender_add_lib().
|
||||
# The tests will be part of the blender_test executable (see tests/gtests/runner).
|
||||
function(blender_add_test_lib
|
||||
@@ -458,12 +421,6 @@ function(blender_add_test_lib
|
||||
blender_add_lib__impl(${name} "${sources}" "${includes}" "${includes_sys}" "${library_deps}")
|
||||
|
||||
set_property(GLOBAL APPEND PROPERTY BLENDER_TEST_LIBS ${name})
|
||||
|
||||
blender_add_test_suite(
|
||||
TARGET blender_test
|
||||
SUITE_NAME ${name}
|
||||
SOURCES "${sources}"
|
||||
)
|
||||
endfunction()
|
||||
|
||||
|
||||
@@ -497,10 +454,14 @@ function(blender_add_test_executable
|
||||
SKIP_ADD_TEST
|
||||
)
|
||||
|
||||
blender_add_test_suite(
|
||||
TARGET ${name}_test
|
||||
SUITE_NAME ${name}
|
||||
SOURCES "${sources}"
|
||||
include(GTest)
|
||||
set(_GOOGLETEST_DISCOVER_TESTS_SCRIPT
|
||||
${CMAKE_SOURCE_DIR}/build_files/cmake/Modules/GTestAddTests.cmake
|
||||
)
|
||||
|
||||
gtest_discover_tests(${name}_test
|
||||
DISCOVERY_MODE PRE_TEST
|
||||
WORKING_DIRECTORY "${TEST_INSTALL_DIR}"
|
||||
)
|
||||
endfunction()
|
||||
|
||||
@@ -668,6 +629,12 @@ macro(TEST_SSE_SUPPORT
|
||||
#include <xmmintrin.h>
|
||||
int main(void) { __m128 v = _mm_setzero_ps(); return 0; }"
|
||||
SUPPORT_SSE_BUILD)
|
||||
|
||||
if(SUPPORT_SSE_BUILD)
|
||||
message(STATUS "SSE Support: detected.")
|
||||
else()
|
||||
message(STATUS "SSE Support: missing.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED SUPPORT_SSE2_BUILD)
|
||||
@@ -676,21 +643,17 @@ macro(TEST_SSE_SUPPORT
|
||||
#include <emmintrin.h>
|
||||
int main(void) { __m128d v = _mm_setzero_pd(); return 0; }"
|
||||
SUPPORT_SSE2_BUILD)
|
||||
|
||||
if(SUPPORT_SSE2_BUILD)
|
||||
message(STATUS "SSE2 Support: detected.")
|
||||
else()
|
||||
message(STATUS "SSE2 Support: missing.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
unset(CMAKE_REQUIRED_FLAGS)
|
||||
endmacro()
|
||||
|
||||
macro(TEST_NEON_SUPPORT)
|
||||
if(NOT DEFINED SUPPORT_NEON_BUILD)
|
||||
include(CheckCXXSourceCompiles)
|
||||
check_cxx_source_compiles(
|
||||
"#include <arm_neon.h>
|
||||
int main() {return vaddvq_s32(vdupq_n_s32(1));}"
|
||||
SUPPORT_NEON_BUILD)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
# Only print message if running CMake first time
|
||||
macro(message_first_run)
|
||||
if(FIRST_RUN)
|
||||
@@ -1176,7 +1139,6 @@ endfunction()
|
||||
|
||||
function(find_python_package
|
||||
package
|
||||
relative_include_dir
|
||||
)
|
||||
|
||||
string(TOUPPER ${package} _upper_package)
|
||||
@@ -1207,11 +1169,8 @@ function(find_python_package
|
||||
site-packages
|
||||
dist-packages
|
||||
vendor-packages
|
||||
NO_DEFAULT_PATH
|
||||
DOC
|
||||
"Path to python site-packages or dist-packages containing '${package}' module"
|
||||
NO_DEFAULT_PATH
|
||||
)
|
||||
mark_as_advanced(PYTHON_${_upper_package}_PATH)
|
||||
|
||||
if(NOT EXISTS "${PYTHON_${_upper_package}_PATH}")
|
||||
message(WARNING
|
||||
@@ -1229,50 +1188,6 @@ function(find_python_package
|
||||
set(WITH_PYTHON_INSTALL_${_upper_package} OFF PARENT_SCOPE)
|
||||
else()
|
||||
message(STATUS "${package} found at '${PYTHON_${_upper_package}_PATH}'")
|
||||
|
||||
if(NOT "${relative_include_dir}" STREQUAL "")
|
||||
set(_relative_include_dir "${package}/${relative_include_dir}")
|
||||
unset(PYTHON_${_upper_package}_INCLUDE_DIRS CACHE)
|
||||
find_path(PYTHON_${_upper_package}_INCLUDE_DIRS
|
||||
NAMES
|
||||
"${_relative_include_dir}"
|
||||
HINTS
|
||||
"${PYTHON_LIBPATH}/"
|
||||
"${PYTHON_LIBPATH}/python${PYTHON_VERSION}/"
|
||||
"${PYTHON_LIBPATH}/python${_PY_VER_MAJOR}/"
|
||||
PATH_SUFFIXES
|
||||
"site-packages/"
|
||||
"dist-packages/"
|
||||
"vendor-packages/"
|
||||
NO_DEFAULT_PATH
|
||||
DOC
|
||||
"Path to python site-packages or dist-packages containing '${package}' module header files"
|
||||
)
|
||||
mark_as_advanced(PYTHON_${_upper_package}_INCLUDE_DIRS)
|
||||
|
||||
if(NOT EXISTS "${PYTHON_${_upper_package}_INCLUDE_DIRS}")
|
||||
message(WARNING
|
||||
"Python package '${package}' include dir path could not be found in:\n"
|
||||
"'${PYTHON_LIBPATH}/python${PYTHON_VERSION}/site-packages/${_relative_include_dir}', "
|
||||
"'${PYTHON_LIBPATH}/python${_PY_VER_MAJOR}/site-packages/${_relative_include_dir}', "
|
||||
"'${PYTHON_LIBPATH}/python${PYTHON_VERSION}/dist-packages/${_relative_include_dir}', "
|
||||
"'${PYTHON_LIBPATH}/python${_PY_VER_MAJOR}/dist-packages/${_relative_include_dir}', "
|
||||
"'${PYTHON_LIBPATH}/python${PYTHON_VERSION}/vendor-packages/${_relative_include_dir}', "
|
||||
"'${PYTHON_LIBPATH}/python${_PY_VER_MAJOR}/vendor-packages/${_relative_include_dir}', "
|
||||
"\n"
|
||||
"The 'WITH_PYTHON_${_upper_package}' option will be disabled.\n"
|
||||
"The build will be usable, only add-ons that depend on this package won't be functional."
|
||||
)
|
||||
set(WITH_PYTHON_${_upper_package} OFF PARENT_SCOPE)
|
||||
else()
|
||||
set(_temp "${PYTHON_${_upper_package}_INCLUDE_DIRS}/${package}/${relative_include_dir}")
|
||||
unset(PYTHON_${_upper_package}_INCLUDE_DIRS CACHE)
|
||||
set(PYTHON_${_upper_package}_INCLUDE_DIRS "${_temp}"
|
||||
CACHE PATH "Path to the include directory of the ${package} module")
|
||||
|
||||
message(STATUS "${package} include files found at '${PYTHON_${_upper_package}_INCLUDE_DIRS}'")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
endfunction()
|
||||
|
@@ -72,11 +72,7 @@ if(WITH_JACK)
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED LIBDIR)
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64")
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin)
|
||||
else()
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin_${CMAKE_OSX_ARCHITECTURES})
|
||||
endif()
|
||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin)
|
||||
else()
|
||||
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
|
||||
endif()
|
||||
@@ -131,22 +127,22 @@ if(WITH_CODEC_SNDFILE)
|
||||
endif()
|
||||
|
||||
if(WITH_PYTHON)
|
||||
# we use precompiled libraries for py 3.9 and up by default
|
||||
set(PYTHON_VERSION 3.9)
|
||||
# we use precompiled libraries for py 3.7 and up by default
|
||||
set(PYTHON_VERSION 3.7)
|
||||
if(NOT WITH_PYTHON_MODULE AND NOT WITH_PYTHON_FRAMEWORK)
|
||||
# normally cached but not since we include them with blender
|
||||
set(PYTHON_INCLUDE_DIR "${LIBDIR}/python/include/python${PYTHON_VERSION}")
|
||||
set(PYTHON_EXECUTABLE "${LIBDIR}/python/bin/python${PYTHON_VERSION}")
|
||||
set(PYTHON_LIBRARY ${LIBDIR}/python/lib/libpython${PYTHON_VERSION}.a)
|
||||
set(PYTHON_INCLUDE_DIR "${LIBDIR}/python/include/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_EXECUTABLE "${LIBDIR}/python/bin/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_LIBRARY ${LIBDIR}/python/lib/libpython${PYTHON_VERSION}m.a)
|
||||
set(PYTHON_LIBPATH "${LIBDIR}/python/lib/python${PYTHON_VERSION}")
|
||||
# set(PYTHON_LINKFLAGS "-u _PyMac_Error") # won't build with this enabled
|
||||
else()
|
||||
# module must be compiled against Python framework
|
||||
set(_py_framework "/Library/Frameworks/Python.framework/Versions/${PYTHON_VERSION}")
|
||||
|
||||
set(PYTHON_INCLUDE_DIR "${_py_framework}/include/python${PYTHON_VERSION}")
|
||||
set(PYTHON_EXECUTABLE "${_py_framework}/bin/python${PYTHON_VERSION}")
|
||||
set(PYTHON_LIBPATH "${_py_framework}/lib/python${PYTHON_VERSION}/config-${PYTHON_VERSION}")
|
||||
set(PYTHON_INCLUDE_DIR "${_py_framework}/include/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_EXECUTABLE "${_py_framework}/bin/python${PYTHON_VERSION}m")
|
||||
set(PYTHON_LIBPATH "${_py_framework}/lib/python${PYTHON_VERSION}/config-${PYTHON_VERSION}m")
|
||||
# set(PYTHON_LIBRARY python${PYTHON_VERSION})
|
||||
# set(PYTHON_LINKFLAGS "-u _PyMac_Error -framework Python") # won't build with this enabled
|
||||
|
||||
@@ -206,13 +202,6 @@ set(PLATFORM_LINKFLAGS
|
||||
|
||||
list(APPEND PLATFORM_LINKLIBS c++)
|
||||
|
||||
if(WITH_OPENIMAGEDENOISE)
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
# OpenImageDenoise uses BNNS from the Accelerate framework.
|
||||
string(APPEND PLATFORM_LINKFLAGS " -framework Accelerate")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_JACK)
|
||||
string(APPEND PLATFORM_LINKFLAGS " -F/Library/Frameworks -weak_framework jackmp")
|
||||
endif()
|
||||
@@ -281,14 +270,6 @@ if(WITH_INTERNATIONAL OR WITH_CODEC_FFMPEG)
|
||||
string(APPEND PLATFORM_LINKFLAGS " -liconv") # boost_locale and ffmpeg needs it !
|
||||
endif()
|
||||
|
||||
if(WITH_PUGIXML)
|
||||
find_package(PugiXML)
|
||||
if(NOT PUGIXML_FOUND)
|
||||
message(WARNING "PugiXML not found, disabling WITH_PUGIXML")
|
||||
set(WITH_PUGIXML OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_OPENIMAGEIO)
|
||||
find_package(OpenImageIO)
|
||||
list(APPEND OPENIMAGEIO_LIBRARIES
|
||||
@@ -304,12 +285,7 @@ if(WITH_OPENIMAGEIO)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENCOLORIO)
|
||||
find_package(OpenColorIO 2.0.0)
|
||||
|
||||
if(NOT OPENCOLORIO_FOUND)
|
||||
set(WITH_OPENCOLORIO OFF)
|
||||
message(STATUS "OpenColorIO not found")
|
||||
endif()
|
||||
find_package(OpenColorIO)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENVDB)
|
||||
@@ -321,11 +297,8 @@ if(WITH_OPENVDB)
|
||||
endif()
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
find_package(NanoVDB)
|
||||
endif()
|
||||
|
||||
if(WITH_CPU_SIMD AND SUPPORT_NEON_BUILD)
|
||||
find_package(sse2neon)
|
||||
set(NANOVDB ${LIBDIR}/nanovdb)
|
||||
set(NANOVDB_INCLUDE_DIR ${NANOVDB}/include)
|
||||
endif()
|
||||
|
||||
if(WITH_LLVM)
|
||||
@@ -333,13 +306,6 @@ if(WITH_LLVM)
|
||||
if(NOT LLVM_FOUND)
|
||||
message(FATAL_ERROR "LLVM not found.")
|
||||
endif()
|
||||
if(WITH_CLANG)
|
||||
find_package(Clang)
|
||||
if(NOT CLANG_FOUND)
|
||||
message(FATAL_ERROR "Clang not found.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES_OSL)
|
||||
@@ -352,7 +318,7 @@ if(WITH_CYCLES_OSL)
|
||||
list(APPEND OSL_LIBRARIES ${OSL_LIB_COMP} -force_load ${OSL_LIB_EXEC} ${OSL_LIB_QUERY})
|
||||
find_path(OSL_INCLUDE_DIR OSL/oslclosure.h PATHS ${CYCLES_OSL}/include)
|
||||
find_program(OSL_COMPILER NAMES oslc PATHS ${CYCLES_OSL}/bin)
|
||||
find_path(OSL_SHADER_DIR NAMES stdosl.h PATHS ${CYCLES_OSL}/share/OSL/shaders)
|
||||
find_path(OSL_SHADER_DIR NAMES stdosl.h PATHS ${CYCLES_OSL}/shaders)
|
||||
|
||||
if(OSL_INCLUDE_DIR AND OSL_LIBRARIES AND OSL_COMPILER AND OSL_SHADER_DIR)
|
||||
set(OSL_FOUND TRUE)
|
||||
@@ -362,6 +328,12 @@ if(WITH_CYCLES_OSL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||
set(WITH_CYCLES_EMBREE OFF)
|
||||
set(WITH_OPENIMAGEDENOISE OFF)
|
||||
set(WITH_CPU_SSE OFF)
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES_EMBREE)
|
||||
find_package(Embree 3.8.0 REQUIRED)
|
||||
# Increase stack size for Embree, only works for executables.
|
||||
@@ -410,7 +382,7 @@ if(WITH_OPENMP)
|
||||
set(OPENMP_FOUND ON)
|
||||
set(OpenMP_C_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'")
|
||||
set(OpenMP_CXX_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'")
|
||||
set(OpenMP_LINKER_FLAGS "-L'${LIBDIR}/openmp/lib' -lomp")
|
||||
string(APPEND CMAKE_EXE_LINKER_FLAGS " -L'${LIBDIR}/openmp/lib' -lomp")
|
||||
|
||||
# Copy libomp.dylib to allow executables like datatoc and tests to work.
|
||||
# `@executable_path/../Resources/lib/` is a default dylib search path.
|
||||
@@ -448,14 +420,6 @@ if(WITH_GMP)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_HARU)
|
||||
find_package(Haru)
|
||||
if(NOT HARU_FOUND)
|
||||
message(WARNING "Haru not found, disabling WITH_HARU")
|
||||
set(WITH_HARU OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(EXISTS ${LIBDIR})
|
||||
without_system_libs_end()
|
||||
endif()
|
||||
@@ -498,17 +462,3 @@ set(CMAKE_C_ARCHIVE_CREATE "<CMAKE_AR> Scr <TARGET> <LINK_FLAGS> <OBJECTS>")
|
||||
set(CMAKE_CXX_ARCHIVE_CREATE "<CMAKE_AR> Scr <TARGET> <LINK_FLAGS> <OBJECTS>")
|
||||
set(CMAKE_C_ARCHIVE_FINISH "<CMAKE_RANLIB> -no_warning_for_no_symbols -c <TARGET>")
|
||||
set(CMAKE_CXX_ARCHIVE_FINISH "<CMAKE_RANLIB> -no_warning_for_no_symbols -c <TARGET>")
|
||||
|
||||
if(WITH_COMPILER_CCACHE)
|
||||
if(NOT CMAKE_GENERATOR STREQUAL "Xcode")
|
||||
find_program(CCACHE_PROGRAM ccache)
|
||||
if(CCACHE_PROGRAM)
|
||||
# Makefiles and ninja
|
||||
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_PROGRAM}" CACHE STRING "" FORCE)
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PROGRAM}" CACHE STRING "" FORCE)
|
||||
else()
|
||||
message(WARNING "Ccache NOT found, disabling WITH_COMPILER_CCACHE")
|
||||
set(WITH_COMPILER_CCACHE OFF)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
@@ -20,7 +20,6 @@
|
||||
|
||||
# Xcode and system configuration for Apple.
|
||||
|
||||
# Detect processor architecture.
|
||||
if(NOT CMAKE_OSX_ARCHITECTURES)
|
||||
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCHITECTURE OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
message(STATUS "Detected native architecture ${ARCHITECTURE}.")
|
||||
@@ -29,93 +28,56 @@ if(NOT CMAKE_OSX_ARCHITECTURES)
|
||||
FORCE)
|
||||
endif()
|
||||
|
||||
# Detect developer directory. Depending on configuration this may be either
|
||||
# an Xcode or Command Line Tools installation.
|
||||
if(NOT DEFINED OSX_SYSTEM)
|
||||
execute_process(
|
||||
COMMAND xcodebuild -version -sdk macosx SDKVersion
|
||||
OUTPUT_VARIABLE OSX_SYSTEM
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
|
||||
# workaround for incorrect cmake xcode lookup for developer previews - XCODE_VERSION does not
|
||||
# take xcode-select path into account but would always look into /Applications/Xcode.app
|
||||
# while dev versions are named Xcode<version>-DP<preview_number>
|
||||
execute_process(
|
||||
COMMAND xcode-select --print-path
|
||||
OUTPUT_VARIABLE XCODE_DEVELOPER_DIR OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
OUTPUT_VARIABLE XCODE_CHECK OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
string(REPLACE "/Contents/Developer" "" XCODE_BUNDLE ${XCODE_CHECK}) # truncate to bundlepath in any case
|
||||
|
||||
# Detect Xcode version. It is provided by the Xcode generator but not
|
||||
# Unix Makefiles or Ninja.
|
||||
if(NOT ${CMAKE_GENERATOR} MATCHES "Xcode")
|
||||
# Unix makefile generator does not fill XCODE_VERSION var, so we get it with a command.
|
||||
# Note that `xcodebuild -version` gives output in two lines: first line will include
|
||||
# Xcode version, second one will include build number. We are only interested in the
|
||||
# first line. Here is an example of the output:
|
||||
# former one. Here is an example of the output:
|
||||
# Xcode 11.4
|
||||
# Build version 11E146
|
||||
# The expected XCODE_VERSION in this case is 11.4.
|
||||
execute_process(
|
||||
COMMAND xcodebuild -version
|
||||
OUTPUT_VARIABLE _xcode_vers_build_nr
|
||||
RESULT_VARIABLE _xcode_vers_result
|
||||
ERROR_QUIET)
|
||||
|
||||
if(_xcode_vers_result EQUAL 0)
|
||||
# Convert output to a single line by replacing newlines with spaces.
|
||||
# This is needed because regex replace can not operate through the newline character
|
||||
# and applies substitutions for each individual lines.
|
||||
string(REPLACE "\n" " " _xcode_vers_build_nr_single_line "${_xcode_vers_build_nr}")
|
||||
string(REGEX REPLACE "(.*)Xcode ([0-9\\.]+).*" "\\2" XCODE_VERSION "${_xcode_vers_build_nr_single_line}")
|
||||
unset(_xcode_vers_build_nr_single_line)
|
||||
endif()
|
||||
execute_process(COMMAND xcodebuild -version OUTPUT_VARIABLE XCODE_VERS_BUILD_NR)
|
||||
|
||||
unset(_xcode_vers_build_nr)
|
||||
unset(_xcode_vers_result)
|
||||
# Convert output to a single line by replacling newlines with spaces.
|
||||
# This is needed because regex replace can not operate through the newline character
|
||||
# and applies substitutions for each individual lines.
|
||||
string(REPLACE "\n" " " XCODE_VERS_BUILD_NR_SINGLE_LINE "${XCODE_VERS_BUILD_NR}")
|
||||
|
||||
string(REGEX REPLACE "(.*)Xcode ([0-9\\.]+).*" "\\2" XCODE_VERSION "${XCODE_VERS_BUILD_NR_SINGLE_LINE}")
|
||||
|
||||
unset(XCODE_VERS_BUILD_NR)
|
||||
unset(XCODE_VERS_BUILD_NR_SINGLE_LINE)
|
||||
endif()
|
||||
|
||||
if(XCODE_VERSION)
|
||||
# Construct SDKs path ourselves, because xcode-select path could be ambiguous.
|
||||
# Both /Applications/Xcode.app/Contents/Developer or /Applications/Xcode.app would be allowed.
|
||||
set(XCODE_SDK_DIR ${XCODE_DEVELOPER_DIR}/Platforms/MacOSX.platform//Developer/SDKs)
|
||||
|
||||
# Detect SDK version to use
|
||||
if(NOT DEFINED OSX_SYSTEM)
|
||||
execute_process(
|
||||
COMMAND xcodebuild -version -sdk macosx SDKVersion
|
||||
OUTPUT_VARIABLE OSX_SYSTEM
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
|
||||
message(STATUS "Detected OS X ${OSX_SYSTEM} and Xcode ${XCODE_VERSION} at ${XCODE_DEVELOPER_DIR}")
|
||||
message(STATUS "SDKs Directory: " ${XCODE_SDK_DIR})
|
||||
else()
|
||||
# If no Xcode version found, try detecting command line tools.
|
||||
execute_process(
|
||||
COMMAND pkgutil --pkg-info=com.apple.pkg.CLTools_Executables
|
||||
OUTPUT_VARIABLE _cltools_pkg_info
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
RESULT_VARIABLE _cltools_pkg_info_result
|
||||
ERROR_QUIET)
|
||||
|
||||
if(_cltools_pkg_info_result EQUAL 0)
|
||||
# Extract version.
|
||||
string(REGEX REPLACE ".*version: ([0-9]+)\\.([0-9]+).*" "\\1.\\2" XCODE_VERSION "${_cltools_pkg_info}")
|
||||
# SDK directory.
|
||||
set(XCODE_SDK_DIR "${XCODE_DEVELOPER_DIR}/SDKs")
|
||||
|
||||
# Detect SDK version to use.
|
||||
if(NOT DEFINED OSX_SYSTEM)
|
||||
execute_process(
|
||||
COMMAND xcrun --show-sdk-version
|
||||
OUTPUT_VARIABLE OSX_SYSTEM
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
|
||||
message(STATUS "Detected OS X ${OSX_SYSTEM} and Command Line Tools ${XCODE_VERSION} at ${XCODE_DEVELOPER_DIR}")
|
||||
message(STATUS "SDKs Directory: " ${XCODE_SDK_DIR})
|
||||
else()
|
||||
message(FATAL_ERROR "No Xcode or Command Line Tools detected")
|
||||
endif()
|
||||
|
||||
unset( _cltools_pkg_info)
|
||||
unset(__cltools_pkg_info_result)
|
||||
endif()
|
||||
message(STATUS "Detected OS X ${OSX_SYSTEM} and Xcode ${XCODE_VERSION} at ${XCODE_BUNDLE}")
|
||||
|
||||
# Require a relatively recent Xcode version.
|
||||
if(${XCODE_VERSION} VERSION_LESS 10.0)
|
||||
message(FATAL_ERROR "Only Xcode version 10.0 and newer is supported")
|
||||
endif()
|
||||
|
||||
# note: xcode-select path could be ambiguous,
|
||||
# cause /Applications/Xcode.app/Contents/Developer or /Applications/Xcode.app would be allowed
|
||||
# so i use a selfcomposed bundlepath here
|
||||
set(OSX_SYSROOT_PREFIX ${XCODE_BUNDLE}/Contents/Developer/Platforms/MacOSX.platform)
|
||||
message(STATUS "OSX_SYSROOT_PREFIX: " ${OSX_SYSROOT_PREFIX})
|
||||
|
||||
# Collect list of OSX system versions which will be used to detect path to corresponding SDK.
|
||||
# Start with macOS SDK version reported by xcodebuild and include possible extra ones.
|
||||
#
|
||||
@@ -139,9 +101,10 @@ endif()
|
||||
# Loop through all possible versions and pick the first one which resolves to a valid SDK path.
|
||||
set(OSX_SDK_PATH)
|
||||
set(OSX_SDK_FOUND FALSE)
|
||||
set(OSX_SDK_PREFIX ${OSX_SYSROOT_PREFIX}/Developer/SDKs)
|
||||
set(OSX_SDKROOT)
|
||||
foreach(OSX_SDK_VERSION ${OSX_SDK_TEST_VERSIONS})
|
||||
set(CURRENT_OSX_SDK_PATH "${XCODE_SDK_DIR}/MacOSX${OSX_SDK_VERSION}.sdk")
|
||||
set(CURRENT_OSX_SDK_PATH "${OSX_SDK_PREFIX}/MacOSX${OSX_SDK_VERSION}.sdk")
|
||||
if(EXISTS ${CURRENT_OSX_SDK_PATH})
|
||||
set(OSX_SDK_PATH "${CURRENT_OSX_SDK_PATH}")
|
||||
set(OSX_SDKROOT macosx${OSX_SDK_VERSION})
|
||||
@@ -149,6 +112,7 @@ foreach(OSX_SDK_VERSION ${OSX_SDK_TEST_VERSIONS})
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
unset(OSX_SDK_PREFIX)
|
||||
unset(OSX_SDK_TEST_VERSIONS)
|
||||
|
||||
if(NOT OSX_SDK_FOUND)
|
||||
@@ -190,32 +154,3 @@ if(NOT ${CMAKE_GENERATOR} MATCHES "Xcode")
|
||||
string(APPEND CMAKE_CXX_FLAGS " -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET}")
|
||||
add_definitions("-DMACOSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET}")
|
||||
endif()
|
||||
|
||||
if(WITH_COMPILER_CCACHE)
|
||||
if(CMAKE_GENERATOR STREQUAL "Xcode")
|
||||
find_program(CCACHE_PROGRAM ccache)
|
||||
if(CCACHE_PROGRAM)
|
||||
get_filename_component(ccompiler "${CMAKE_C_COMPILER}" NAME)
|
||||
get_filename_component(cxxcompiler "${CMAKE_CXX_COMPILER}" NAME)
|
||||
# Ccache can figure out which compiler to use if it's invoked from
|
||||
# a symlink with the name of the compiler.
|
||||
# https://ccache.dev/manual/4.1.html#_run_modes
|
||||
set(_fake_compiler_dir "${CMAKE_BINARY_DIR}/ccache")
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory ${_fake_compiler_dir})
|
||||
set(_fake_C_COMPILER "${_fake_compiler_dir}/${ccompiler}")
|
||||
set(_fake_CXX_COMPILER "${_fake_compiler_dir}/${cxxcompiler}")
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink "${CCACHE_PROGRAM}" ${_fake_C_COMPILER})
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink "${CCACHE_PROGRAM}" ${_fake_CXX_COMPILER})
|
||||
set(CMAKE_XCODE_ATTRIBUTE_CC ${_fake_C_COMPILER} CACHE STRING "" FORCE)
|
||||
set(CMAKE_XCODE_ATTRIBUTE_CXX ${_fake_CXX_COMPILER} CACHE STRING "" FORCE)
|
||||
set(CMAKE_XCODE_ATTRIBUTE_LD ${_fake_C_COMPILER} CACHE STRING "" FORCE)
|
||||
set(CMAKE_XCODE_ATTRIBUTE_LDPLUSPLUS ${_fake_CXX_COMPILER} CACHE STRING "" FORCE)
|
||||
unset(_fake_compiler_dir)
|
||||
unset(_fake_C_COMPILER)
|
||||
unset(_fake_CXX_COMPILER)
|
||||
else()
|
||||
message(WARNING "Ccache NOT found, disabling WITH_COMPILER_CCACHE")
|
||||
set(WITH_COMPILER_CCACHE OFF)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
@@ -70,7 +70,6 @@ if(EXISTS ${LIBDIR})
|
||||
set(BOOST_LIBRARYDIR ${LIBDIR}/boost/lib)
|
||||
set(Boost_NO_SYSTEM_PATHS ON)
|
||||
set(OPENEXR_ROOT_DIR ${LIBDIR}/openexr)
|
||||
set(CLANG_ROOT_DIR ${LIBDIR}/llvm)
|
||||
endif()
|
||||
|
||||
if(WITH_STATIC_LIBS)
|
||||
@@ -285,10 +284,6 @@ if(WITH_NANOVDB)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_CPU_SIMD AND SUPPORT_NEON_BUILD)
|
||||
find_package_wrapper(sse2neon)
|
||||
endif()
|
||||
|
||||
if(WITH_ALEMBIC)
|
||||
find_package_wrapper(Alembic)
|
||||
|
||||
@@ -355,17 +350,15 @@ if(WITH_BOOST)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_PUGIXML)
|
||||
find_package_wrapper(PugiXML)
|
||||
|
||||
if(NOT PUGIXML_FOUND)
|
||||
set(WITH_PUGIXML OFF)
|
||||
message(STATUS "PugiXML not found, disabling WITH_PUGIXML")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_OPENIMAGEIO)
|
||||
find_package_wrapper(OpenImageIO)
|
||||
if(NOT OPENIMAGEIO_PUGIXML_FOUND AND WITH_CYCLES_STANDALONE)
|
||||
find_package_wrapper(PugiXML)
|
||||
else()
|
||||
set(PUGIXML_INCLUDE_DIR "${OPENIMAGEIO_INCLUDE_DIR/OpenImageIO}")
|
||||
set(PUGIXML_LIBRARIES "")
|
||||
endif()
|
||||
|
||||
set(OPENIMAGEIO_LIBRARIES
|
||||
${OPENIMAGEIO_LIBRARIES}
|
||||
${PNG_LIBRARIES}
|
||||
@@ -390,7 +383,7 @@ if(WITH_OPENIMAGEIO)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENCOLORIO)
|
||||
find_package_wrapper(OpenColorIO 2.0.0)
|
||||
find_package_wrapper(OpenColorIO)
|
||||
|
||||
set(OPENCOLORIO_LIBRARIES ${OPENCOLORIO_LIBRARIES})
|
||||
set(OPENCOLORIO_LIBPATH) # TODO, remove and reference the absolute path everywhere
|
||||
@@ -421,9 +414,7 @@ if(WITH_LLVM)
|
||||
endif()
|
||||
|
||||
find_package_wrapper(LLVM)
|
||||
if(WITH_CLANG)
|
||||
find_package_wrapper(Clang)
|
||||
endif()
|
||||
|
||||
# Symbol conflicts with same UTF library used by OpenCollada
|
||||
if(EXISTS ${LIBDIR})
|
||||
if(WITH_OPENCOLLADA AND (${LLVM_VERSION} VERSION_LESS "4.0.0"))
|
||||
@@ -433,13 +424,7 @@ if(WITH_LLVM)
|
||||
|
||||
if(NOT LLVM_FOUND)
|
||||
set(WITH_LLVM OFF)
|
||||
set(WITH_CLANG OFF)
|
||||
message(STATUS "LLVM not found")
|
||||
else()
|
||||
if(NOT CLANG_FOUND)
|
||||
set(WITH_CLANG OFF)
|
||||
message(STATUS "Clang not found")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@@ -483,14 +468,6 @@ if(WITH_POTRACE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_HARU)
|
||||
find_package_wrapper(Haru)
|
||||
if(NOT HARU_FOUND)
|
||||
message(WARNING "Haru not found, disabling WITH_HARU")
|
||||
set(WITH_HARU OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(EXISTS ${LIBDIR})
|
||||
without_system_libs_end()
|
||||
endif()
|
||||
@@ -636,13 +613,7 @@ endif()
|
||||
|
||||
# GNU Compiler
|
||||
if(CMAKE_COMPILER_IS_GNUCC)
|
||||
# ffp-contract=off:
|
||||
# Automatically turned on when building with "-march=native". This is
|
||||
# explicitly turned off here as it will make floating point math give a bit
|
||||
# different results. This will lead to automated test failures. So disable
|
||||
# this until we support it. Seems to default to off in clang and the intel
|
||||
# compiler.
|
||||
set(PLATFORM_CFLAGS "-pipe -fPIC -funsigned-char -fno-strict-aliasing -ffp-contract=off")
|
||||
set(PLATFORM_CFLAGS "-pipe -fPIC -funsigned-char -fno-strict-aliasing")
|
||||
|
||||
# `maybe-uninitialized` is unreliable in release builds, but fine in debug builds.
|
||||
set(GCC_EXTRA_FLAGS_RELEASE "-Wno-maybe-uninitialized")
|
||||
@@ -716,15 +687,3 @@ set(PLATFORM_LINKFLAGS
|
||||
if(WITH_INSTALL_PORTABLE)
|
||||
string(APPEND CMAKE_EXE_LINKER_FLAGS " -no-pie")
|
||||
endif()
|
||||
|
||||
if(WITH_COMPILER_CCACHE)
|
||||
find_program(CCACHE_PROGRAM ccache)
|
||||
if(CCACHE_PROGRAM)
|
||||
# Makefiles and ninja
|
||||
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_PROGRAM}" CACHE STRING "" FORCE)
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PROGRAM}" CACHE STRING "" FORCE)
|
||||
else()
|
||||
message(WARNING "Ccache NOT found, disabling WITH_COMPILER_CCACHE")
|
||||
set(WITH_COMPILER_CCACHE OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
@@ -49,7 +49,7 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
if(NOT EXISTS "${CLANG_OPENMP_DLL}")
|
||||
message(FATAL_ERROR "Clang OpenMP library (${CLANG_OPENMP_DLL}) not found.")
|
||||
endif()
|
||||
set(OpenMP_LINKER_FLAGS "\"${CLANG_OPENMP_LIB}\"")
|
||||
string(APPEND CMAKE_EXE_LINKER_FLAGS " \"${CLANG_OPENMP_LIB}\"")
|
||||
endif()
|
||||
if(WITH_WINDOWS_STRIPPED_PDB)
|
||||
message(WARNING "stripped pdb not supported with clang, disabling..")
|
||||
@@ -119,7 +119,6 @@ string(APPEND CMAKE_MODULE_LINKER_FLAGS " /SAFESEH:NO /ignore:4099")
|
||||
list(APPEND PLATFORM_LINKLIBS
|
||||
ws2_32 vfw32 winmm kernel32 user32 gdi32 comdlg32 Comctl32 version
|
||||
advapi32 shfolder shell32 ole32 oleaut32 uuid psapi Dbghelp Shlwapi
|
||||
pathcch
|
||||
)
|
||||
|
||||
if(WITH_INPUT_IME)
|
||||
@@ -240,24 +239,9 @@ if(NOT EXISTS "${LIBDIR}/")
|
||||
message(FATAL_ERROR "\n\nWindows requires pre-compiled libs at: '${LIBDIR}'. Please run `make update` in the blender source folder to obtain them.")
|
||||
endif()
|
||||
|
||||
if(CMAKE_GENERATOR MATCHES "^Visual Studio.+" AND # Only supported in the VS IDE
|
||||
MSVC_VERSION GREATER_EQUAL 1924 AND # Supported for 16.4+
|
||||
WITH_CLANG_TIDY # And Clang Tidy needs to be on
|
||||
)
|
||||
set(CMAKE_VS_GLOBALS
|
||||
"RunCodeAnalysis=false"
|
||||
"EnableMicrosoftCodeAnalysis=false"
|
||||
"EnableClangTidyCodeAnalysis=true"
|
||||
)
|
||||
set(VS_CLANG_TIDY On)
|
||||
endif()
|
||||
|
||||
# Mark libdir as system headers with a lower warn level, to resolve some warnings
|
||||
# that we have very little control over
|
||||
if(MSVC_VERSION GREATER_EQUAL 1914 AND # Available with 15.7+
|
||||
NOT MSVC_CLANG AND # But not for clang
|
||||
NOT WITH_WINDOWS_SCCACHE AND # And not when sccache is enabled
|
||||
NOT VS_CLANG_TIDY) # Clang-tidy does not like these options
|
||||
if(MSVC_VERSION GREATER_EQUAL 1914 AND NOT MSVC_CLANG AND NOT WITH_WINDOWS_SCCACHE)
|
||||
add_compile_options(/experimental:external /external:templates- /external:I "${LIBDIR}" /external:W0)
|
||||
endif()
|
||||
|
||||
@@ -422,7 +406,7 @@ if(WITH_JACK)
|
||||
endif()
|
||||
|
||||
if(WITH_PYTHON)
|
||||
set(PYTHON_VERSION 3.9) # CACHE STRING)
|
||||
set(PYTHON_VERSION 3.7) # CACHE STRING)
|
||||
|
||||
string(REPLACE "." "" _PYTHON_VERSION_NO_DOTS ${PYTHON_VERSION})
|
||||
set(PYTHON_LIBRARY ${LIBDIR}/python/${_PYTHON_VERSION_NO_DOTS}/libs/python${_PYTHON_VERSION_NO_DOTS}.lib)
|
||||
@@ -455,18 +439,10 @@ if(WITH_BOOST)
|
||||
set(BOOST ${LIBDIR}/boost)
|
||||
set(BOOST_INCLUDE_DIR ${BOOST}/include)
|
||||
set(BOOST_LIBPATH ${BOOST}/lib)
|
||||
set(BOOST_VERSION_HEADER ${BOOST_INCLUDE_DIR}/boost/version.hpp)
|
||||
if(EXISTS ${BOOST_VERSION_HEADER})
|
||||
file(STRINGS "${BOOST_VERSION_HEADER}" BOOST_LIB_VERSION REGEX "#define BOOST_LIB_VERSION ")
|
||||
if(BOOST_LIB_VERSION MATCHES "#define BOOST_LIB_VERSION \"([0-9_]+)\"")
|
||||
set(BOOST_VERSION "${CMAKE_MATCH_1}")
|
||||
endif()
|
||||
if(CMAKE_CL_64)
|
||||
set(BOOST_POSTFIX "vc141-mt-x64-1_70.lib")
|
||||
set(BOOST_DEBUG_POSTFIX "vc141-mt-gd-x64-1_70.lib")
|
||||
endif()
|
||||
if(NOT BOOST_VERSION)
|
||||
message(FATAL_ERROR "Unable to determine Boost version")
|
||||
endif()
|
||||
set(BOOST_POSTFIX "vc141-mt-x64-${BOOST_VERSION}.lib")
|
||||
set(BOOST_DEBUG_POSTFIX "vc141-mt-gd-x64-${BOOST_VERSION}.lib")
|
||||
set(BOOST_LIBRARIES
|
||||
optimized ${BOOST_LIBPATH}/libboost_date_time-${BOOST_POSTFIX}
|
||||
optimized ${BOOST_LIBPATH}/libboost_filesystem-${BOOST_POSTFIX}
|
||||
@@ -509,7 +485,7 @@ if(WITH_OPENIMAGEIO)
|
||||
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
|
||||
|
||||
set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0")
|
||||
set(OPENCOLORIO_DEFINITIONS "-DDOpenColorIO_SKIP_IMPORTS")
|
||||
set(OPENCOLORIO_DEFINITIONS "-DOCIO_STATIC_BUILD")
|
||||
set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe")
|
||||
add_definitions(-DOIIO_STATIC_DEFINE)
|
||||
add_definitions(-DOIIO_NO_SSE=1)
|
||||
@@ -547,13 +523,11 @@ if(WITH_OPENCOLORIO)
|
||||
set(OPENCOLORIO_LIBPATH ${OPENCOLORIO}/lib)
|
||||
set(OPENCOLORIO_LIBRARIES
|
||||
optimized ${OPENCOLORIO_LIBPATH}/OpenColorIO.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/tinyxml.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/libyaml-cpp.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/libexpatMD.lib
|
||||
optimized ${OPENCOLORIO_LIBPATH}/pystring.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/OpencolorIO_d.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/tinyxml_d.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/libyaml-cpp_d.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/libexpatdMD.lib
|
||||
debug ${OPENCOLORIO_LIBPATH}/pystring_d.lib
|
||||
)
|
||||
set(OPENCOLORIO_DEFINITIONS)
|
||||
endif()
|
||||
@@ -672,10 +646,6 @@ endif()
|
||||
if(WITH_CYCLES_OSL)
|
||||
set(CYCLES_OSL ${LIBDIR}/osl CACHE PATH "Path to OpenShadingLanguage installation")
|
||||
set(OSL_SHADER_DIR ${CYCLES_OSL}/shaders)
|
||||
# Shaders have moved around a bit between OSL versions, check multiple locations
|
||||
if(NOT EXISTS "${OSL_SHADER_DIR}")
|
||||
set(OSL_SHADER_DIR ${CYCLES_OSL}/share/OSL/shaders)
|
||||
endif()
|
||||
find_library(OSL_LIB_EXEC NAMES oslexec PATHS ${CYCLES_OSL}/lib)
|
||||
find_library(OSL_LIB_COMP NAMES oslcomp PATHS ${CYCLES_OSL}/lib)
|
||||
find_library(OSL_LIB_QUERY NAMES oslquery PATHS ${CYCLES_OSL}/lib)
|
||||
@@ -754,7 +724,7 @@ if(WINDOWS_PYTHON_DEBUG)
|
||||
string(REPLACE "/" "\\" _group_path "${_source_path}")
|
||||
source_group("${_group_path}" FILES "${_source}")
|
||||
endforeach()
|
||||
|
||||
|
||||
# If the user scripts env var is set, include scripts from there otherwise
|
||||
# include user scripts in the profile folder.
|
||||
if(DEFINED ENV{BLENDER_USER_SCRIPTS})
|
||||
@@ -765,7 +735,7 @@ if(WINDOWS_PYTHON_DEBUG)
|
||||
# Include the user scripts from the profile folder in the blender_python_user_scripts project.
|
||||
set(USER_SCRIPTS_ROOT "$ENV{appdata}/blender foundation/blender/${BLENDER_VERSION}/scripts")
|
||||
endif()
|
||||
|
||||
|
||||
file(TO_CMAKE_PATH ${USER_SCRIPTS_ROOT} USER_SCRIPTS_ROOT)
|
||||
FILE(GLOB_RECURSE inFiles "${USER_SCRIPTS_ROOT}/*.*" )
|
||||
ADD_CUSTOM_TARGET(blender_python_user_scripts SOURCES ${inFiles})
|
||||
@@ -796,14 +766,7 @@ if(WITH_XR_OPENXR)
|
||||
set(XR_OPENXR_SDK ${LIBDIR}/xr_openxr_sdk)
|
||||
set(XR_OPENXR_SDK_LIBPATH ${LIBDIR}/xr_openxr_sdk/lib)
|
||||
set(XR_OPENXR_SDK_INCLUDE_DIR ${XR_OPENXR_SDK}/include)
|
||||
# This is the old name of this library, it is checked to
|
||||
# support the transition between the old and new lib versions
|
||||
# this can be removed after the next lib update.
|
||||
if(EXISTS ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
|
||||
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
|
||||
else()
|
||||
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loaderd.lib)
|
||||
endif()
|
||||
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
|
||||
else()
|
||||
message(WARNING "OpenXR-SDK was not found, disabling WITH_XR_OPENXR")
|
||||
set(WITH_XR_OPENXR OFF)
|
||||
@@ -822,15 +785,3 @@ if(WITH_POTRACE)
|
||||
set(POTRACE_LIBRARIES ${LIBDIR}/potrace/lib/potrace.lib)
|
||||
set(POTRACE_FOUND On)
|
||||
endif()
|
||||
|
||||
if(WITH_HARU)
|
||||
if(EXISTS ${LIBDIR}/haru)
|
||||
set(HARU_FOUND On)
|
||||
set(HARU_ROOT_DIR ${LIBDIR}/haru)
|
||||
set(HARU_INCLUDE_DIRS ${HARU_ROOT_DIR}/include)
|
||||
set(HARU_LIBRARIES ${HARU_ROOT_DIR}/lib/libhpdfs.lib)
|
||||
else()
|
||||
message(WARNING "Haru was not found, disabling WITH_HARU")
|
||||
set(WITH_HARU OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
@@ -220,12 +220,14 @@ def cmake_advanced_info():
|
||||
|
||||
|
||||
def cmake_cache_var(var):
|
||||
with open(os.path.join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8') as cache_file:
|
||||
lines = [
|
||||
l_strip for l in cache_file
|
||||
if (l_strip := l.strip())
|
||||
if not l_strip.startswith(("//", "#"))
|
||||
]
|
||||
cache_file = open(join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8')
|
||||
lines = [
|
||||
l_strip for l in cache_file
|
||||
for l_strip in (l.strip(),)
|
||||
if l_strip
|
||||
if not l_strip.startswith(("//", "#"))
|
||||
]
|
||||
cache_file.close()
|
||||
|
||||
for l in lines:
|
||||
if l.split(":")[0] == var:
|
||||
|
@@ -8,7 +8,6 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
@@ -50,12 +49,7 @@ def svn_update(args, release_version):
|
||||
|
||||
# Checkout precompiled libraries
|
||||
if sys.platform == 'darwin':
|
||||
if platform.machine() == 'x86_64':
|
||||
lib_platform = "darwin"
|
||||
elif platform.machine() == 'arm64':
|
||||
lib_platform = "darwin_arm64"
|
||||
else:
|
||||
lib_platform = None
|
||||
lib_platform = "darwin"
|
||||
elif sys.platform == 'win32':
|
||||
# Windows checkout is usually handled by bat scripts since python3 to run
|
||||
# this script is bundled as part of the precompiled libraries. However it
|
||||
|
@@ -38,7 +38,7 @@ PROJECT_NAME = Blender
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = "V2.93"
|
||||
PROJECT_NUMBER = "V2.92"
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
@@ -453,7 +453,7 @@ TYPEDEF_HIDES_STRUCT = NO
|
||||
# the optimal cache size from a speed point of view.
|
||||
# Minimum value: 0, maximum value: 9, default value: 0.
|
||||
|
||||
LOOKUP_CACHE_SIZE = 3
|
||||
LOOKUP_CACHE_SIZE = 0
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Build related configuration options
|
||||
@@ -1321,7 +1321,7 @@ DOCSET_PUBLISHER_NAME = Publisher
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
GENERATE_HTMLHELP = NO
|
||||
GENERATE_HTMLHELP = YES
|
||||
|
||||
# The CHM_FILE tag can be used to specify the file name of the resulting .chm
|
||||
# file. You can add a path in front of the file if the result should not be
|
||||
|
@@ -121,10 +121,6 @@
|
||||
* \ingroup editors
|
||||
*/
|
||||
|
||||
/** \defgroup edasset asset
|
||||
* \ingroup editors
|
||||
*/
|
||||
|
||||
/** \defgroup edcurve curve
|
||||
* \ingroup editors
|
||||
*/
|
||||
|
@@ -52,11 +52,10 @@ outfilename = sys.argv[2]
|
||||
|
||||
cmd = [blender_bin, "--help"]
|
||||
print(" executing:", " ".join(cmd))
|
||||
ASAN_OPTIONS = "exitcode=0:" + os.environ.get("ASAN_OPTIONS", "")
|
||||
blender_help = subprocess.run(
|
||||
cmd, env={"ASAN_OPTIONS": ASAN_OPTIONS}, check=True, stdout=subprocess.PIPE).stdout.decode(encoding="utf-8")
|
||||
cmd, env={"ASAN_OPTIONS": "exitcode=0"}, check=True, stdout=subprocess.PIPE).stdout.decode(encoding="utf-8")
|
||||
blender_version = subprocess.run(
|
||||
[blender_bin, "--version"], env={"ASAN_OPTIONS": ASAN_OPTIONS}, check=True, stdout=subprocess.PIPE).stdout.decode(encoding="utf-8").strip()
|
||||
[blender_bin, "--version"], env={"ASAN_OPTIONS": "exitcode=0"}, check=True, stdout=subprocess.PIPE).stdout.decode(encoding="utf-8").strip()
|
||||
blender_version, blender_date = (blender_version.split("build") + [None, None])[0:2]
|
||||
blender_version = blender_version.rstrip().partition(" ")[2] # remove 'Blender' prefix.
|
||||
if blender_date is None:
|
||||
|
@@ -1,64 +0,0 @@
|
||||
"""
|
||||
Dependency graph: Object.to_curve()
|
||||
+++++++++++++++++++++++++++++++++++
|
||||
|
||||
Function to get a curve from text and curve objects. It is typically used by exporters, render
|
||||
engines, and tools that need to access the curve representing the object.
|
||||
|
||||
The function takes the evaluated dependency graph as a required parameter and optionally a boolean
|
||||
apply_modifiers which defaults to false. If apply_modifiers is true and the object is a curve object,
|
||||
the spline deform modifiers are applied on the control points. Note that constructive modifiers and
|
||||
modifiers that are not spline-enabled will not be applied. So modifiers like Array will not be applied
|
||||
and deform modifiers that have Apply On Spline disabled will not be applied.
|
||||
|
||||
If the object is a text object. The text will be converted into a 3D curve and returned. Modifiers are
|
||||
never applied on text objects and apply_modifiers will be ignored. If the object is neither a curve nor
|
||||
a text object, an error will be reported.
|
||||
|
||||
.. note:: The resulting curve is owned by the object. It can be freed by calling `object.to_curve_clear()`.
|
||||
.. note::
|
||||
The resulting curve must be treated as temporary, and can not be referenced from objects in the main
|
||||
database.
|
||||
"""
|
||||
import bpy
|
||||
|
||||
|
||||
class OBJECT_OT_object_to_curve(bpy.types.Operator):
|
||||
"""Convert selected object to curve and show number of splines"""
|
||||
bl_label = "DEG Object to Curve"
|
||||
bl_idname = "object.object_to_curve"
|
||||
|
||||
def execute(self, context):
|
||||
# Access input original object.
|
||||
obj = context.object
|
||||
if obj is None:
|
||||
self.report({'INFO'}, "No active object to convert to curve")
|
||||
return {'CANCELLED'}
|
||||
if obj.type not in {'CURVE', 'FONT'}:
|
||||
self.report({'INFO'}, "Object can not be converted to curve")
|
||||
return {'CANCELLED'}
|
||||
depsgraph = context.evaluated_depsgraph_get()
|
||||
# Invoke to_curve() without applying modifiers.
|
||||
curve_without_modifiers = obj.to_curve(depsgraph)
|
||||
self.report({'INFO'}, f"{len(curve_without_modifiers.splines)} splines in a new curve without modifiers.")
|
||||
# Remove temporary curve.
|
||||
obj.to_curve_clear()
|
||||
# Invoke to_curve() with applying modifiers.
|
||||
curve_with_modifiers = obj.to_curve(depsgraph, apply_modifiers = True)
|
||||
self.report({'INFO'}, f"{len(curve_with_modifiers.splines)} splines in new curve with modifiers.")
|
||||
# Remove temporary curve.
|
||||
obj.to_curve_clear()
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(OBJECT_OT_object_to_curve)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(OBJECT_OT_object_to_curve)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
@@ -1,30 +0,0 @@
|
||||
"""
|
||||
.. note::
|
||||
|
||||
Properties defined at run-time store the values of the properties as custom-properties.
|
||||
|
||||
This method checks if the underlying data exists, causing the property to be considered *set*.
|
||||
|
||||
A common pattern for operators is to calculate a value for the properties
|
||||
that have not had their values explicitly set by the caller
|
||||
(where the caller could be a key-binding, menu-items or Python script for example).
|
||||
|
||||
In the case of executing operators multiple times, values are re-used from the previous execution.
|
||||
|
||||
For example: subdividing a mesh with a smooth value of 1.0 will keep using
|
||||
that value on subsequent calls to subdivision, unless the operator is called with
|
||||
that property set to a different value.
|
||||
|
||||
This behavior can be disabled using the ``SKIP_SAVE`` option when the property is declared (see: :mod:`bpy.props`).
|
||||
|
||||
The ``ghost`` argument allows detecting how a value from a previous execution is handled.
|
||||
|
||||
- When true: The property is considered unset even if the value from a previous call is used.
|
||||
- When false: The existence of any values causes ``is_property_set`` to return true.
|
||||
|
||||
While this argument should typically be omitted, there are times when
|
||||
it's important to know if a value is anything besides the default.
|
||||
|
||||
For example, the previous value may have been scaled by the scene's unit scale.
|
||||
In this case scaling the value multiple times would cause problems, so the ``ghost`` argument should be false.
|
||||
"""
|
@@ -1,2 +1,2 @@
|
||||
Sphinx==3.5.1
|
||||
sphinx_rtd_theme==0.5.1
|
||||
Sphinx==3.1.1
|
||||
sphinx_rtd_theme==0.5.0
|
||||
|
@@ -163,13 +163,13 @@ Now in the button's context menu select *Copy Data Path*, then paste the result
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
bpy.context.active_object.modifiers["Subdivision"].levels
|
||||
bpy.context.active_object.modifiers["Subsurf"].levels
|
||||
|
||||
Press :kbd:`Return` and you'll get the current value of 1. Now try changing the value to 2:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
bpy.context.active_object.modifiers["Subdivision"].levels = 2
|
||||
bpy.context.active_object.modifiers["Subsurf"].levels = 2
|
||||
|
||||
You can see the value update in the Subdivision Surface modifier's UI as well as the cube.
|
||||
|
||||
@@ -185,31 +185,43 @@ For example, if you want to access the texture of a brush via Python to adjust i
|
||||
#. Start in the default scene and enable Sculpt Mode from the 3D Viewport header.
|
||||
#. From the Sidebar expand the Brush Settings panel's *Texture* subpanel and add a new texture.
|
||||
*Notice the texture data-block menu itself doesn't have very useful links (you can check the tooltips).*
|
||||
#. The contrast setting isn't exposed in the Sidebar, so view the texture in the
|
||||
:ref:`Properties Editor <blender_manual:bpy.types.Texture.contrast>`.
|
||||
#. The contrast setting isn't exposed in the Sidebar, so view the texture in the properties editor:
|
||||
|
||||
- In the properties editor select the Texture tab.
|
||||
- Select brush texture.
|
||||
- Expand the *Colors* panel to locate the *Contrast* number field.
|
||||
#. Open the context menu of the contrast field and select *Online Python Reference*.
|
||||
This takes you to ``bpy.types.Texture.contrast``. Now you can see that ``contrast`` is a property of texture.
|
||||
#. To find out how to access the texture from the brush check on the references at the bottom of the page.
|
||||
Sometimes there are many references, and it may take some guesswork to find the right one,
|
||||
but in this case it's ``tool_settings.sculpt.brush.texture``.
|
||||
but in this case it's ``Brush.texture``.
|
||||
|
||||
#. Now you know that the texture can be accessed from ``bpy.data.brushes["BrushName"].texture``
|
||||
but normally you *won't* want to access the brush by name, instead you want to access the active brush.
|
||||
So the next step is to check on where brushes are accessed from via the references.
|
||||
In this case there it is simply ``bpy.context.brush``.
|
||||
|
||||
Now you can use the Python console to form the nested properties needed to access brush textures contrast:
|
||||
:menuselection:`Context --> Tool Settings --> Sculpt --> Brush --> Texture --> Contrast`.
|
||||
*Context -> Brush -> Texture -> Contrast*.
|
||||
|
||||
Since the attribute for each is given along the way you can compose the data path in the Python console:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
bpy.context.tool_settings.sculpt.brush.texture.contrast
|
||||
bpy.context.brush.texture.contrast
|
||||
|
||||
There can be multiple ways to access the same data, which you choose often depends on the task.
|
||||
An alternate path to access the same setting is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
bpy.context.sculpt.brush.texture.contrast
|
||||
|
||||
Or access the brush directly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
bpy.data.textures["Texture"].contrast
|
||||
bpy.data.brushes["BrushName"].texture.contrast
|
||||
|
||||
|
||||
If you are writing a user tool normally you want to use the :mod:`bpy.context` since the user normally expects
|
||||
|
@@ -35,13 +35,12 @@ but not to fully cover each topic.
|
||||
|
||||
A quick list of helpful things to know before starting:
|
||||
|
||||
- Enable :ref:`Developer Extra <blender_manual:prefs-interface-dev-extras>`
|
||||
and :ref:`Python Tooltips <blender_manual:prefs-interface-tooltips-python>`.
|
||||
- The :ref:`Python Console <blender_manual:bpy.types.SpaceConsole>`
|
||||
is great for testing one-liners; it has autocompletion so you can inspect the API quickly.
|
||||
- Button tooltips show Python attributes and operator names (when enabled see above).
|
||||
- The context menu of buttons directly links to this API documentation (when enabled see above).
|
||||
- Many python examples can be found in the text editor's template menu.
|
||||
- Blender uses Python 3.x; some online documentation still assumes version 2.x.
|
||||
- The interactive console is great for testing one-liners.
|
||||
It also has autocompletion so you can inspect the API quickly.
|
||||
- Button tooltips show Python attributes and operator names.
|
||||
- The context menu of buttons directly links to this API documentation.
|
||||
- More operator examples can be found in the text editor's template menu.
|
||||
- To examine further scripts distributed with Blender, see:
|
||||
|
||||
- ``scripts/startup/bl_ui`` for the user interface.
|
||||
@@ -238,7 +237,7 @@ Examples:
|
||||
{'FINISHED'}
|
||||
>>> bpy.ops.mesh.hide(unselected=False)
|
||||
{'FINISHED'}
|
||||
>>> bpy.ops.object.transform_apply()
|
||||
>>> bpy.ops.object.scale_apply()
|
||||
{'FINISHED'}
|
||||
|
||||
.. tip::
|
||||
|
@@ -24,9 +24,10 @@ The three main use cases for the terminal are:
|
||||
- If the script runs for too long or you accidentally enter an infinite loop,
|
||||
:kbd:`Ctrl-C` in the terminal (:kbd:`Ctrl-Break` on Windows) will quit the script early.
|
||||
|
||||
.. seealso::
|
||||
.. note::
|
||||
|
||||
:ref:`blender_manual:command_line-launch-index`.
|
||||
For Linux and macOS users this means starting the terminal first, then running Blender from within it.
|
||||
On Windows the terminal can be enabled from the Help menu.
|
||||
|
||||
|
||||
Interface Tricks
|
||||
|
@@ -75,12 +75,12 @@ def rna_info_BuildRNAInfo_cache():
|
||||
rna_info_BuildRNAInfo_cache.ret = None
|
||||
# --- end rna_info cache
|
||||
|
||||
# import rpdb2; rpdb2.start_embedded_debugger('test')
|
||||
import os
|
||||
import sys
|
||||
import inspect
|
||||
import shutil
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
from textwrap import indent
|
||||
|
||||
@@ -227,7 +227,6 @@ else:
|
||||
"blf",
|
||||
"bl_math",
|
||||
"imbuf",
|
||||
"imbuf.types",
|
||||
"bmesh",
|
||||
"bmesh.ops",
|
||||
"bmesh.types",
|
||||
@@ -251,9 +250,6 @@ else:
|
||||
"gpu.types",
|
||||
"gpu.matrix",
|
||||
"gpu.select",
|
||||
"gpu.shader",
|
||||
"gpu.state",
|
||||
"gpu.texture",
|
||||
"gpu_extras",
|
||||
"idprop.types",
|
||||
"mathutils",
|
||||
@@ -555,7 +551,7 @@ def example_extract_docstring(filepath):
|
||||
file.close()
|
||||
return "", 0
|
||||
|
||||
for line in file:
|
||||
for line in file.readlines():
|
||||
line_no += 1
|
||||
if line.startswith('"""'):
|
||||
break
|
||||
@@ -563,13 +559,6 @@ def example_extract_docstring(filepath):
|
||||
text.append(line.rstrip())
|
||||
|
||||
line_no += 1
|
||||
|
||||
# Skip over blank lines so the Python code doesn't have blank lines at the top.
|
||||
for line in file:
|
||||
if line.strip():
|
||||
break
|
||||
line_no += 1
|
||||
|
||||
file.close()
|
||||
return "\n".join(text), line_no
|
||||
|
||||
@@ -1208,7 +1197,7 @@ def pycontext2sphinx(basepath):
|
||||
# for member in sorted(unique):
|
||||
# print(' "%s": ("", False),' % member)
|
||||
if len(context_type_map) > len(unique):
|
||||
warnings.warn(
|
||||
raise Exception(
|
||||
"Some types are not used: %s" %
|
||||
str([member for member in context_type_map if member not in unique]))
|
||||
else:
|
||||
@@ -1718,6 +1707,7 @@ except ModuleNotFoundError:
|
||||
|
||||
fw("if html_theme == 'sphinx_rtd_theme':\n")
|
||||
fw(" html_theme_options = {\n")
|
||||
fw(" 'canonical_url': 'https://docs.blender.org/api/current/',\n")
|
||||
# fw(" 'analytics_id': '',\n")
|
||||
# fw(" 'collapse_navigation': True,\n")
|
||||
fw(" 'sticky_navigation': False,\n")
|
||||
@@ -1729,7 +1719,6 @@ except ModuleNotFoundError:
|
||||
# not helpful since the source is generated, adds to upload size.
|
||||
fw("html_copy_source = False\n")
|
||||
fw("html_show_sphinx = False\n")
|
||||
fw("html_baseurl = 'https://docs.blender.org/api/current/'\n")
|
||||
fw("html_use_opensearch = 'https://docs.blender.org/api/current'\n")
|
||||
fw("html_split_index = True\n")
|
||||
fw("html_static_path = ['static']\n")
|
||||
@@ -1979,14 +1968,11 @@ def write_rst_importable_modules(basepath):
|
||||
"aud": "Audio System",
|
||||
"blf": "Font Drawing",
|
||||
"imbuf": "Image Buffer",
|
||||
"imbuf.types": "Image Buffer Types",
|
||||
"gpu": "GPU Shader Module",
|
||||
"gpu.types": "GPU Types",
|
||||
"gpu.matrix": "GPU Matrix Utilities",
|
||||
"gpu.select": "GPU Select Utilities",
|
||||
"gpu.shader": "GPU Shader Utilities",
|
||||
"gpu.state": "GPU State Utilities",
|
||||
"gpu.texture": "GPU Texture Utilities",
|
||||
"gpu.matrix": "GPU Matrix",
|
||||
"gpu.select": "GPU Select",
|
||||
"gpu.shader": "GPU Shader",
|
||||
"bmesh": "BMesh Module",
|
||||
"bmesh.ops": "BMesh Operators",
|
||||
"bmesh.types": "BMesh Types",
|
||||
|
156
doc/python_api/sphinx_doc_gen.sh
Executable file
156
doc/python_api/sphinx_doc_gen.sh
Executable file
@@ -0,0 +1,156 @@
|
||||
#!/bin/sh
|
||||
# run from the blender source dir
|
||||
# bash doc/python_api/sphinx_doc_gen.sh
|
||||
# ssh upload means you need an account on the server
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Upload vars
|
||||
|
||||
# disable for testing
|
||||
DO_UPLOAD=true
|
||||
DO_EXE_BLENDER=true
|
||||
DO_OUT_HTML=true
|
||||
DO_OUT_HTML_ZIP=true
|
||||
DO_OUT_PDF=false
|
||||
|
||||
if [ -z $BLENDER_BIN ] ; then
|
||||
BLENDER_BIN="./blender.bin"
|
||||
fi
|
||||
|
||||
if [ "$1" == "" ] ; then
|
||||
echo "Expected a single argument for the username on blender.org, skipping upload step!"
|
||||
DO_UPLOAD=false
|
||||
else
|
||||
SSH_USER=$1
|
||||
SSH_HOST=$SSH_USER"@blender.org"
|
||||
SSH_UPLOAD="/data/www/vhosts/www.blender.org/api" # blender_python_api_VERSION, added after
|
||||
fi
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Blender Version & Info
|
||||
|
||||
# 'Blender 2.53 (sub 1) Build' --> '2_53_1' as a shell script.
|
||||
# "_".join(str(v) for v in bpy.app.version)
|
||||
# custom blender vars
|
||||
blender_srcdir=$(dirname -- $0)/../..
|
||||
blender_version_header="$blender_srcdir/source/blender/blenkernel/BKE_blender_version.h"
|
||||
blender_version=$(grep "BLENDER_VERSION\s" "$blender_version_header" | awk '{print $3}')
|
||||
blender_version_cycle=$(grep "BLENDER_VERSION_CYCLE\s" "$blender_version_header" | awk '{print $3}')
|
||||
unset blender_version_header
|
||||
|
||||
BLENDER_VERSION=$(expr $blender_version / 100)_$(expr $blender_version % 100)
|
||||
|
||||
SSH_UPLOAD_FULL=$SSH_UPLOAD/"blender_python_api_"$BLENDER_VERSION
|
||||
|
||||
SPHINXBASE=doc/python_api
|
||||
|
||||
SPHINX_WORKDIR="$(mktemp --directory --suffix=.sphinx)"
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Generate reStructuredText (blender/python only)
|
||||
|
||||
if $DO_EXE_BLENDER ; then
|
||||
# Don't delete existing docs, now partial updates are used for quick builds.
|
||||
#
|
||||
# Disable ASAN error halt since it results in nonzero exit code on any minor issue.
|
||||
ASAN_OPTIONS=halt_on_error=0 \
|
||||
$BLENDER_BIN \
|
||||
--background \
|
||||
-noaudio \
|
||||
--factory-startup \
|
||||
--python-exit-code 1 \
|
||||
--python $SPHINXBASE/sphinx_doc_gen.py \
|
||||
-- \
|
||||
--output=$SPHINX_WORKDIR
|
||||
|
||||
|
||||
if (($? != 0)) ; then
|
||||
echo "Generating documentation failed, aborting"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Generate HTML (sphinx)
|
||||
|
||||
if $DO_OUT_HTML ; then
|
||||
sphinx-build -b html -j auto $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
|
||||
|
||||
# XXX, saves space on upload and zip, should move HTML outside
|
||||
# and zip up there, for now this is OK
|
||||
rm -rf sphinx-out/.doctrees
|
||||
|
||||
# in case we have a zip already
|
||||
rm -f blender_python_reference_$BLENDER_VERSION.zip
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# ZIP the HTML dir for upload
|
||||
|
||||
if $DO_OUT_HTML_ZIP ; then
|
||||
# lame, temp rename dir
|
||||
mv sphinx-out blender_python_reference_$BLENDER_VERSION
|
||||
zip -r -9 blender_python_reference_$BLENDER_VERSION.zip blender_python_reference_$BLENDER_VERSION
|
||||
mv blender_python_reference_$BLENDER_VERSION sphinx-out
|
||||
fi
|
||||
|
||||
cd -
|
||||
fi
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Generate PDF (sphinx/laytex)
|
||||
|
||||
if $DO_OUT_PDF ; then
|
||||
sphinx-build -n -b latex -j auto $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
|
||||
make -C $SPHINX_WORKDIR/sphinx-out
|
||||
mv $SPHINX_WORKDIR/sphinx-out/contents.pdf \
|
||||
$SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Upload to blender servers, comment this section for testing
|
||||
|
||||
if $DO_UPLOAD ; then
|
||||
|
||||
ssh $SSH_USER@blender.org 'rm -rf '$SSH_UPLOAD_FULL'/*'
|
||||
rsync --progress -ave "ssh -p 22" $SPHINX_WORKDIR/sphinx-out/* $SSH_HOST:$SSH_UPLOAD_FULL/
|
||||
|
||||
## symlink the dir to a static URL
|
||||
#ssh $SSH_USER@blender.org 'rm '$SSH_UPLOAD'/250PythonDoc && ln -s '$SSH_UPLOAD_FULL' '$SSH_UPLOAD'/250PythonDoc'
|
||||
if [ "$blender_version_cycle" = "release" ] ; then
|
||||
ssh $SSH_USER@blender.org 'rm '$SSH_UPLOAD'/blender_python_api_current && ln -s '$SSH_UPLOAD_FULL' '$SSH_UPLOAD'/blender_python_api_current'
|
||||
fi
|
||||
|
||||
# better redirect
|
||||
ssh $SSH_USER@blender.org 'echo "<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\" content=\"0;url=../blender_python_api_'$BLENDER_VERSION'/\"></head><body>Redirecting...</body></html>" > '$SSH_UPLOAD'/250PythonDoc/index.html'
|
||||
|
||||
# redirect for release only so wiki can point here
|
||||
if [ "$blender_version_cycle" = "release" ] ; then
|
||||
ssh $SSH_USER@blender.org 'echo "<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\" content=\"0;url=../blender_python_api_'$BLENDER_VERSION'/\"></head><body>Redirecting...</body></html>" > '$SSH_UPLOAD'/blender_python_api/index.html'
|
||||
fi
|
||||
|
||||
if $DO_OUT_PDF ; then
|
||||
# rename so local PDF has matching name.
|
||||
rsync --progress -ave "ssh -p 22" \
|
||||
$SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf \
|
||||
$SSH_HOST:$SSH_UPLOAD_FULL/blender_python_reference_$BLENDER_VERSION.pdf
|
||||
fi
|
||||
|
||||
if $DO_OUT_HTML_ZIP ; then
|
||||
rsync --progress -ave "ssh -p 22" \
|
||||
$SPHINX_WORKDIR/blender_python_reference_$BLENDER_VERSION.zip \
|
||||
$SSH_HOST:$SSH_UPLOAD_FULL/blender_python_reference_$BLENDER_VERSION.zip
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Print some useful text
|
||||
|
||||
echo ""
|
||||
echo "Finished! view the docs from: "
|
||||
if $DO_OUT_HTML ; then echo " html:" $SPHINX_WORKDIR/sphinx-out/index.html ; fi
|
||||
if $DO_OUT_PDF ; then echo " pdf:" $SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf ; fi
|
216
doc/python_api/sphinx_doc_update.py
Executable file
216
doc/python_api/sphinx_doc_update.py
Executable file
@@ -0,0 +1,216 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
"""
|
||||
This is a helper script to generate Blender Python API documentation (using Sphinx), and update server data using rsync.
|
||||
|
||||
You'll need to specify your user login and password, obviously.
|
||||
|
||||
Example usage:
|
||||
|
||||
./sphinx_doc_update.py --jobs 16 --mirror ../../../docs/remote_api_backup/ --source ../.. --blender ../../../build_cmake/bin/blender --user foobar --password barfoo
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
|
||||
DEFAULT_RSYNC_SERVER = "docs.blender.org"
|
||||
DEFAULT_RSYNC_ROOT = "/api/"
|
||||
DEFAULT_SYMLINK_ROOT = "/data/www/vhosts/docs.blender.org/api"
|
||||
|
||||
|
||||
def argparse_create():
|
||||
import argparse
|
||||
global __doc__
|
||||
|
||||
# When --help or no args are given, print this help
|
||||
usage_text = __doc__
|
||||
|
||||
parser = argparse.ArgumentParser(description=usage_text,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
|
||||
parser.add_argument(
|
||||
"--mirror", dest="mirror_dir",
|
||||
metavar='PATH', required=True,
|
||||
help="Path to local rsync mirror of api doc server")
|
||||
parser.add_argument(
|
||||
"--source", dest="source_dir",
|
||||
metavar='PATH', required=True,
|
||||
help="Path to Blender git repository")
|
||||
parser.add_argument(
|
||||
"--blender", dest="blender",
|
||||
metavar='PATH', required=True,
|
||||
help="Path to Blender executable")
|
||||
parser.add_argument(
|
||||
"--rsync-server", dest="rsync_server", default=DEFAULT_RSYNC_SERVER,
|
||||
metavar='RSYNCSERVER', type=str, required=False,
|
||||
help=("rsync server address"))
|
||||
parser.add_argument(
|
||||
"--rsync-root", dest="rsync_root", default=DEFAULT_RSYNC_ROOT,
|
||||
metavar='RSYNCROOT', type=str, required=False,
|
||||
help=("Root path of API doc on rsync server"))
|
||||
parser.add_argument(
|
||||
"--user", dest="user",
|
||||
metavar='USER', type=str, required=True,
|
||||
help=("User to login on rsync server"))
|
||||
parser.add_argument(
|
||||
"--password", dest="password",
|
||||
metavar='PASSWORD', type=str, required=True,
|
||||
help=("Password to login on rsync server"))
|
||||
parser.add_argument(
|
||||
"--jobs", dest="jobs_nr",
|
||||
metavar='NR', type=int, required=False, default=1,
|
||||
help="Number of sphinx building jobs to launch in parallel")
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main():
|
||||
# ----------
|
||||
# Parse Args
|
||||
|
||||
args = argparse_create().parse_args()
|
||||
|
||||
rsync_base = "rsync://%s@%s:%s" % (args.user, args.rsync_server, args.rsync_root)
|
||||
|
||||
blenver = api_blenver = api_blenver_zip = ""
|
||||
api_name = ""
|
||||
branch = ""
|
||||
is_release = is_beta = False
|
||||
|
||||
# I) Update local mirror using rsync.
|
||||
rsync_mirror_cmd = ("rsync", "--delete-after", "-avzz", rsync_base, args.mirror_dir)
|
||||
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
# II) Generate doc source in temp dir.
|
||||
doc_gen_cmd = (
|
||||
args.blender, "--background", "-noaudio", "--factory-startup", "--python-exit-code", "1",
|
||||
"--python", "%s/doc/python_api/sphinx_doc_gen.py" % args.source_dir, "--",
|
||||
"--output", tmp_dir
|
||||
)
|
||||
subprocess.run(doc_gen_cmd)
|
||||
|
||||
# III) Get Blender version info.
|
||||
getver_file = os.path.join(tmp_dir, "blendver.txt")
|
||||
getver_script = (r"""import sys, bpy
|
||||
with open(sys.argv[-1], 'w') as f:
|
||||
is_release = bpy.app.version_cycle in {'rc', 'release'}
|
||||
is_beta = bpy.app.version_cycle in {'beta'}
|
||||
branch = bpy.app.build_branch.split()[0].decode()
|
||||
f.write('%d\n' % is_release)
|
||||
f.write('%d\n' % is_beta)
|
||||
f.write('%s\n' % branch)
|
||||
f.write('%d.%d\n' % (bpy.app.version[0], bpy.app.version[1]))
|
||||
f.write('%d.%d\n' % (bpy.app.version[0], bpy.app.version[1])
|
||||
if (is_release or is_beta) else '%s\n' % branch)
|
||||
f.write('%d_%d' % (bpy.app.version[0], bpy.app.version[1]))
|
||||
""")
|
||||
get_ver_cmd = (args.blender, "--background", "-noaudio", "--factory-startup", "--python-exit-code", "1",
|
||||
"--python-expr", getver_script, "--", getver_file)
|
||||
subprocess.run(get_ver_cmd)
|
||||
with open(getver_file) as f:
|
||||
is_release, is_beta, branch, blenver, api_blenver, api_blenver_zip = f.read().split("\n")
|
||||
is_release = bool(int(is_release))
|
||||
is_beta = bool(int(is_beta))
|
||||
os.remove(getver_file)
|
||||
|
||||
# IV) Build doc.
|
||||
curr_dir = os.getcwd()
|
||||
os.chdir(tmp_dir)
|
||||
sphinx_cmd = ("sphinx-build", "-j", str(args.jobs_nr), "-b", "html", "sphinx-in", "sphinx-out")
|
||||
subprocess.run(sphinx_cmd)
|
||||
shutil.rmtree(os.path.join("sphinx-out", ".doctrees"))
|
||||
os.chdir(curr_dir)
|
||||
|
||||
# V) Cleanup existing matching dir in server mirror (if any), and copy new doc.
|
||||
api_name = api_blenver
|
||||
api_dir = os.path.join(args.mirror_dir, api_name)
|
||||
if os.path.exists(api_dir):
|
||||
if os.path.islink(api_dir):
|
||||
os.remove(api_dir)
|
||||
else:
|
||||
shutil.rmtree(api_dir)
|
||||
os.rename(os.path.join(tmp_dir, "sphinx-out"), api_dir)
|
||||
|
||||
# VI) Create zip archive.
|
||||
zip_name = "blender_python_reference_%s" % api_blenver_zip # We can't use 'release' postfix here...
|
||||
zip_path = os.path.join(args.mirror_dir, zip_name)
|
||||
with zipfile.ZipFile(zip_path, 'w') as zf:
|
||||
for dirname, _, filenames in os.walk(api_dir):
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(dirname, filename)
|
||||
zip_filepath = os.path.join(zip_name, os.path.relpath(filepath, api_dir))
|
||||
zf.write(filepath, arcname=zip_filepath)
|
||||
os.rename(zip_path, os.path.join(api_dir, "%s.zip" % zip_name))
|
||||
|
||||
# VII) Create symlinks and html redirects.
|
||||
if is_release:
|
||||
symlink = os.path.join(args.mirror_dir, "current")
|
||||
if os.path.exists(symlink):
|
||||
if os.path.islink(symlink):
|
||||
os.remove(symlink)
|
||||
else:
|
||||
shutil.rmtree(symlink)
|
||||
os.symlink("./%s" % api_name, symlink)
|
||||
with open(os.path.join(args.mirror_dir, "250PythonDoc/index.html"), 'w') as f:
|
||||
f.write("<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\""
|
||||
"content=\"0;url=../%s/\"></head><body>Redirecting...</body></html>" % api_name)
|
||||
elif is_beta:
|
||||
# We do not have any particular symlink for that stage.
|
||||
pass
|
||||
elif branch == "master":
|
||||
# Also create a symlink from version number to actual master api doc.
|
||||
symlink = os.path.join(args.mirror_dir, blenver)
|
||||
if os.path.exists(symlink):
|
||||
if os.path.islink(symlink):
|
||||
os.remove(symlink)
|
||||
else:
|
||||
shutil.rmtree(symlink)
|
||||
os.symlink("./%s" % api_name, symlink)
|
||||
with open(os.path.join(args.mirror_dir, "blender_python_api/index.html"), 'w') as f:
|
||||
f.write("<html><head><title>Redirecting...</title><meta http-equiv=\"REFRESH\""
|
||||
"content=\"0;url=../%s/\"></head><body>Redirecting...</body></html>" % api_name)
|
||||
|
||||
# VIII) Upload (first do a dry-run so user can ensure everything is OK).
|
||||
print("Doc generated in local mirror %s, please check it before uploading "
|
||||
"(hit [Enter] to continue, [Ctrl-C] to exit):" % api_dir)
|
||||
sys.stdin.read(1)
|
||||
|
||||
rsync_mirror_cmd = ("rsync", "--dry-run", "--delete-after", "-avzz", args.mirror_dir, rsync_base)
|
||||
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
|
||||
|
||||
print("Rsync upload simulated, please check every thing is OK (hit [Enter] to continue, [Ctrl-C] to exit):")
|
||||
sys.stdin.read(1)
|
||||
|
||||
rsync_mirror_cmd = ("rsync", "--delete-after", "-avzz", args.mirror_dir, rsync_base)
|
||||
subprocess.run(rsync_mirror_cmd, env=dict(os.environ, RSYNC_PASSWORD=args.password))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -1,5 +1,7 @@
|
||||
/* T76453: Prevent Long enum lists */
|
||||
.field-list li {
|
||||
/* Prevent Long enum lists */
|
||||
.field-body {
|
||||
display: block;
|
||||
width: 100%;
|
||||
max-height: 245px;
|
||||
overflow-y: auto !important;
|
||||
}
|
||||
|
4
extern/README
vendored
4
extern/README
vendored
@@ -1,4 +0,0 @@
|
||||
When updating a library remember to:
|
||||
|
||||
* Update the README.blender with the corresponding version.
|
||||
* Update the THIRD-PARTY-LICENSE.txt document
|
5
extern/audaspace/README.blender
vendored
5
extern/audaspace/README.blender
vendored
@@ -1,5 +0,0 @@
|
||||
Project: Audaspace
|
||||
URL: https://audaspace.github.io/
|
||||
License: Apache 2.0
|
||||
Upstream version: 1.3 (Last Release)
|
||||
Local modifications: None
|
2
extern/audaspace/blender_config.cmake
vendored
2
extern/audaspace/blender_config.cmake
vendored
@@ -24,6 +24,6 @@ set(JACK_FOUND ${WITH_JACK})
|
||||
set(LIBSNDFILE_FOUND ${WITH_CODEC_SNDFILE})
|
||||
set(OPENAL_FOUND ${WITH_OPENAL})
|
||||
set(PYTHONLIBS_FOUND TRUE)
|
||||
set(NUMPY_FOUND ${WITH_PYTHON_NUMPY})
|
||||
set(NUMPY_FOUND TRUE)
|
||||
set(NUMPY_INCLUDE_DIRS ${PYTHON_NUMPY_INCLUDE_DIRS})
|
||||
set(SDL_FOUND ${WITH_SDL})
|
||||
|
@@ -72,9 +72,6 @@ protected:
|
||||
/// The channel mapper reader in between.
|
||||
std::shared_ptr<ChannelMapperReader> m_mapper;
|
||||
|
||||
/// Whether the source is being read for the first time.
|
||||
bool m_first_reading;
|
||||
|
||||
/// Whether to keep the source if end of it is reached.
|
||||
bool m_keep;
|
||||
|
||||
|
19
extern/audaspace/src/devices/SoftwareDevice.cpp
vendored
19
extern/audaspace/src/devices/SoftwareDevice.cpp
vendored
@@ -78,7 +78,7 @@ bool SoftwareDevice::SoftwareHandle::pause(bool keep)
|
||||
}
|
||||
|
||||
SoftwareDevice::SoftwareHandle::SoftwareHandle(SoftwareDevice* device, std::shared_ptr<IReader> reader, std::shared_ptr<PitchReader> pitch, std::shared_ptr<ResampleReader> resampler, std::shared_ptr<ChannelMapperReader> mapper, bool keep) :
|
||||
m_reader(reader), m_pitch(pitch), m_resampler(resampler), m_mapper(mapper), m_first_reading(true), m_keep(keep), m_user_pitch(1.0f), m_user_volume(1.0f), m_user_pan(0.0f), m_volume(0.0f), m_old_volume(0.0f), m_loopcount(0),
|
||||
m_reader(reader), m_pitch(pitch), m_resampler(resampler), m_mapper(mapper), m_keep(keep), m_user_pitch(1.0f), m_user_volume(1.0f), m_user_pan(0.0f), m_volume(0.0f), m_old_volume(0.0f), m_loopcount(0),
|
||||
m_relative(true), m_volume_max(1.0f), m_volume_min(0), m_distance_max(std::numeric_limits<float>::max()),
|
||||
m_distance_reference(1.0f), m_attenuation(1.0f), m_cone_angle_outer(M_PI), m_cone_angle_inner(M_PI), m_cone_volume_outer(0),
|
||||
m_flags(RENDER_CONE), m_stop(nullptr), m_stop_data(nullptr), m_status(STATUS_PLAYING), m_device(device)
|
||||
@@ -106,14 +106,6 @@ void SoftwareDevice::SoftwareHandle::update()
|
||||
if(m_pitch->getSpecs().channels != CHANNELS_MONO)
|
||||
{
|
||||
m_volume = m_user_volume;
|
||||
|
||||
// we don't know a previous volume if this source has never been read before
|
||||
if(m_first_reading)
|
||||
{
|
||||
m_old_volume = m_volume;
|
||||
m_first_reading = false;
|
||||
}
|
||||
|
||||
m_pitch->setPitch(m_user_pitch);
|
||||
return;
|
||||
}
|
||||
@@ -222,13 +214,6 @@ void SoftwareDevice::SoftwareHandle::update()
|
||||
m_volume *= m_user_volume;
|
||||
}
|
||||
|
||||
// we don't know a previous volume if this source has never been read before
|
||||
if(m_first_reading)
|
||||
{
|
||||
m_old_volume = m_volume;
|
||||
m_first_reading = false;
|
||||
}
|
||||
|
||||
// 3D Cue
|
||||
|
||||
Quaternion orientation;
|
||||
@@ -769,8 +754,6 @@ void SoftwareDevice::mix(data_t* buffer, int length)
|
||||
{
|
||||
m_mixer->mix(buf, pos, len, sound->m_volume, sound->m_old_volume);
|
||||
|
||||
sound->m_old_volume = sound->m_volume;
|
||||
|
||||
pos += len;
|
||||
|
||||
if(sound->m_loopcount > 0)
|
||||
|
@@ -22,7 +22,6 @@
|
||||
#include <mutex>
|
||||
|
||||
#define KEEP_TIME 10
|
||||
#define POSITION_EPSILON (1.0 / static_cast<double>(RATE_48000))
|
||||
|
||||
AUD_NAMESPACE_BEGIN
|
||||
|
||||
@@ -65,7 +64,7 @@ bool SequenceHandle::updatePosition(double position)
|
||||
if(m_handle.get())
|
||||
{
|
||||
// we currently have a handle, let's check where we are
|
||||
if(position - POSITION_EPSILON >= m_entry->m_end)
|
||||
if(position >= m_entry->m_end)
|
||||
{
|
||||
if(position >= m_entry->m_end + KEEP_TIME)
|
||||
// far end, stopping
|
||||
@@ -77,7 +76,7 @@ bool SequenceHandle::updatePosition(double position)
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else if(position + POSITION_EPSILON >= m_entry->m_begin)
|
||||
else if(position >= m_entry->m_begin)
|
||||
{
|
||||
// inside, resuming
|
||||
m_handle->resume();
|
||||
@@ -99,7 +98,7 @@ bool SequenceHandle::updatePosition(double position)
|
||||
else
|
||||
{
|
||||
// we don't have a handle, let's start if we should be playing
|
||||
if(position + POSITION_EPSILON >= m_entry->m_begin && position - POSITION_EPSILON <= m_entry->m_end)
|
||||
if(position >= m_entry->m_begin && position <= m_entry->m_end)
|
||||
{
|
||||
start();
|
||||
return m_valid;
|
||||
|
5
extern/bullet2/README.blender
vendored
5
extern/bullet2/README.blender
vendored
@@ -1,5 +0,0 @@
|
||||
Project: Bullet Continuous Collision Detection and Physics Library
|
||||
URL: http://bulletphysics.org
|
||||
License: zlib
|
||||
Upstream version: 3.07
|
||||
Local modifications: Fixed inertia
|
1
extern/ceres/README.blender
vendored
1
extern/ceres/README.blender
vendored
@@ -1,5 +1,4 @@
|
||||
Project: Ceres Solver
|
||||
URL: http://ceres-solver.org/
|
||||
License: BSD 3-Clause
|
||||
Upstream version 2.0.0
|
||||
Local modifications: None
|
||||
|
2
extern/curve_fit_nd/README.blender
vendored
2
extern/curve_fit_nd/README.blender
vendored
@@ -1,5 +1,5 @@
|
||||
Project: Curve-Fit-nD
|
||||
URL: https://github.com/ideasman42/curve-fit-nd
|
||||
License: BSD 3-Clause
|
||||
Upstream version: ddcd5bd (Last Release)
|
||||
Upstream version: Unknown (Last Release)
|
||||
Local modifications: None
|
||||
|
16
extern/draco/CMakeLists.txt
vendored
16
extern/draco/CMakeLists.txt
vendored
@@ -19,24 +19,20 @@
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
# Build Draco library.
|
||||
add_subdirectory(draco)
|
||||
add_subdirectory(dracoenc)
|
||||
|
||||
# Build Draco-Blender bridging module.
|
||||
# Build blender-draco-exporter module.
|
||||
set(SRC
|
||||
src/common.cpp
|
||||
src/common.h
|
||||
src/decoder.cpp
|
||||
src/decoder.h
|
||||
src/encoder.cpp
|
||||
src/encoder.h
|
||||
src/draco-compressor.cpp
|
||||
src/draco-compressor.h
|
||||
)
|
||||
|
||||
set(INC
|
||||
draco/src
|
||||
dracoenc/src
|
||||
)
|
||||
|
||||
set(LIB
|
||||
draco
|
||||
dracoenc
|
||||
)
|
||||
|
||||
add_library(extern_draco SHARED "${SRC}")
|
||||
|
5
extern/draco/README.blender
vendored
5
extern/draco/README.blender
vendored
@@ -1,5 +0,0 @@
|
||||
Project: Draco
|
||||
URL: https://google.github.io/draco/
|
||||
License: Apache 2.0
|
||||
Upstream version: 1.3.6
|
||||
Local modifications: None
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user