Compare commits
2 Commits
temp-fcurv
...
temp-angav
Author | SHA1 | Date | |
---|---|---|---|
db24fc4f27 | |||
71b595fc0e |
@@ -198,12 +198,11 @@ option(WITH_OPENIMAGEDENOISE "Enable the OpenImageDenoise compositing node" ON
|
||||
|
||||
option(WITH_OPENSUBDIV "Enable OpenSubdiv for surface subdivision" ON)
|
||||
|
||||
option(WITH_POTRACE "Enable features relying on potrace" ON)
|
||||
option(WITH_POTRACE "Enable features relying on potrace" OFF)
|
||||
option(WITH_OPENVDB "Enable features relying on OpenVDB" ON)
|
||||
option(WITH_OPENVDB_BLOSC "Enable blosc compression for OpenVDB, only enable if OpenVDB was built with blosc support" ON)
|
||||
option(WITH_OPENVDB_3_ABI_COMPATIBLE "Assume OpenVDB library has been compiled with version 3 ABI compatibility" OFF)
|
||||
mark_as_advanced(WITH_OPENVDB_3_ABI_COMPATIBLE)
|
||||
option(WITH_NANOVDB "Enable usage of NanoVDB data structure for accelerated rendering on the GPU" OFF)
|
||||
|
||||
# GHOST Windowing Library Options
|
||||
option(WITH_GHOST_DEBUG "Enable debugging output for the GHOST library" OFF)
|
||||
@@ -224,6 +223,8 @@ if(WITH_GHOST_X11)
|
||||
option(WITH_GHOST_XDND "Enable drag'n'drop support on X11 using XDND protocol" ON)
|
||||
endif()
|
||||
|
||||
option(WITH_GMP "Use the gmp library for more accurate booleans" OFF)
|
||||
|
||||
# Misc...
|
||||
option(WITH_HEADLESS "Build without graphical support (renderfarm, server mode only)" OFF)
|
||||
mark_as_advanced(WITH_HEADLESS)
|
||||
@@ -705,9 +706,6 @@ set_and_warn_dependency(WITH_TBB WITH_OPENIMAGEDENOISE OFF)
|
||||
set_and_warn_dependency(WITH_TBB WITH_OPENVDB OFF)
|
||||
set_and_warn_dependency(WITH_TBB WITH_MOD_FLUID OFF)
|
||||
|
||||
# NanoVDB requires OpenVDB to convert the data structure
|
||||
set_and_warn_dependency(WITH_OPENVDB WITH_NANOVDB OFF)
|
||||
|
||||
# OpenVDB uses 'half' type from OpenEXR & fails to link without OpenEXR enabled.
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENVDB OFF)
|
||||
|
||||
@@ -861,13 +859,13 @@ if(NOT CMAKE_BUILD_TYPE MATCHES "Release")
|
||||
if(MSVC)
|
||||
set(COMPILER_ASAN_LINKER_FLAGS "/FUNCTIONPADMIN:6")
|
||||
endif()
|
||||
|
||||
if(APPLE AND COMPILER_ASAN_LIBRARY)
|
||||
string(REPLACE " " ";" _list_COMPILER_ASAN_CFLAGS ${COMPILER_ASAN_CFLAGS})
|
||||
add_compile_options("$<$<NOT:$<CONFIG:Release>>:${_list_COMPILER_ASAN_CFLAGS}>")
|
||||
add_link_options("$<$<NOT:$<CONFIG:Release>>:-fno-omit-frame-pointer;-fsanitize=address>")
|
||||
unset(_list_COMPILER_ASAN_CFLAGS)
|
||||
elseif(COMPILER_ASAN_LIBRARY)
|
||||
if(APPLE)
|
||||
# COMPILER_ASAN_CFLAGS and COMPILER_ASAN_CXXFLAGS are the same as of
|
||||
# now, so use either for PLATFORM_CFLAGS.
|
||||
set(PLATFORM_CFLAGS "${PLATFORM_CFLAGS} ${COMPILER_ASAN_CFLAGS}")
|
||||
set(COMPILER_ASAN_LINKER_FLAGS "-fno-omit-frame-pointer -fsanitize=address")
|
||||
endif(APPLE)
|
||||
if(COMPILER_ASAN_LIBRARY)
|
||||
set(PLATFORM_LINKLIBS "${PLATFORM_LINKLIBS};${COMPILER_ASAN_LIBRARY}")
|
||||
set(PLATFORM_LINKFLAGS "${COMPILER_ASAN_LIBRARY} ${COMPILER_ASAN_LINKER_FLAGS}")
|
||||
set(PLATFORM_LINKFLAGS_DEBUG "${COMPILER_ASAN_LIBRARY} ${COMPILER_ASAN_LINKER_FLAGS}")
|
||||
|
@@ -517,7 +517,7 @@ doc_py: .FORCE
|
||||
ASAN_OPTIONS=halt_on_error=0 \
|
||||
$(BLENDER_BIN) --background -noaudio --factory-startup \
|
||||
--python doc/python_api/sphinx_doc_gen.py
|
||||
sphinx-build -b html -j $(NPROCS) doc/python_api/sphinx-in doc/python_api/sphinx-out
|
||||
cd doc/python_api ; sphinx-build -b html sphinx-in sphinx-out
|
||||
@echo "docs written into: '$(BLENDER_DIR)/doc/python_api/sphinx-out/index.html'"
|
||||
|
||||
doc_doxy: .FORCE
|
||||
|
@@ -117,6 +117,7 @@ if(WIN32)
|
||||
include(cmake/yamlcpp.cmake)
|
||||
# LCMS is an OCIO dep, but only if you build the apps, leaving it here for convenience
|
||||
# include(cmake/lcms.cmake)
|
||||
|
||||
endif()
|
||||
|
||||
if(NOT WIN32 OR ENABLE_MINGW64)
|
||||
|
@@ -37,11 +37,6 @@ if(APPLE)
|
||||
--with-pic
|
||||
)
|
||||
endif()
|
||||
elseif(UNIX)
|
||||
set(GMP_OPTIONS
|
||||
${GMP_OPTIONS}
|
||||
--with-pic
|
||||
)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(external_gmp
|
||||
|
@@ -26,7 +26,6 @@ endif()
|
||||
message("HARVEST_TARGET = ${HARVEST_TARGET}")
|
||||
|
||||
if(WIN32)
|
||||
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
add_custom_target(Harvest_Release_Results
|
||||
COMMAND # jpeg rename libfile + copy include
|
||||
@@ -146,9 +145,6 @@ harvest(openjpeg/lib openjpeg/lib "*.a")
|
||||
harvest(opensubdiv/include opensubdiv/include "*.h")
|
||||
harvest(opensubdiv/lib opensubdiv/lib "*.a")
|
||||
harvest(openvdb/include/openvdb openvdb/include/openvdb "*.h")
|
||||
if(WITH_NANOVDB)
|
||||
harvest(openvdb/nanovdb nanovdb/include/nanovdb "*.h")
|
||||
endif()
|
||||
harvest(openvdb/lib openvdb/lib "*.a")
|
||||
harvest(xr_openxr_sdk/include/openxr xr_openxr_sdk/include/openxr "*.h")
|
||||
harvest(xr_openxr_sdk/lib xr_openxr_sdk/lib "*.a")
|
||||
|
@@ -59,4 +59,4 @@ else(WIN32)
|
||||
)
|
||||
|
||||
set(JPEG_LIBRARY libjpeg${LIBEXT})
|
||||
endif()
|
||||
endif(WIN32)
|
||||
|
@@ -54,20 +54,6 @@ set(OPENVDB_EXTRA_ARGS
|
||||
-DOPENVDB_CORE_STATIC=${OPENVDB_STATIC}
|
||||
-DOPENVDB_BUILD_BINARIES=Off
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
# NanoVDB is header-only, so only need the install target
|
||||
-DNANOVDB_BUILD_UNITTESTS=OFF
|
||||
-DNANOVDB_BUILD_EXAMPLES=OFF
|
||||
-DNANOVDB_BUILD_BENCHMARK=OFF
|
||||
-DNANOVDB_BUILD_DOCS=OFF
|
||||
-DNANOVDB_BUILD_TOOLS=OFF
|
||||
-DNANOVDB_CUDA_KEEP_PTX=OFF
|
||||
-DNANOVDB_USE_OPENGL=OFF
|
||||
-DNANOVDB_USE_OPENGL=OFF
|
||||
-DNANOVDB_USE_CUDA=OFF
|
||||
-DNANOVDB_USE_TBB=OFF
|
||||
-DNANOVDB_USE_OPTIX=OFF
|
||||
-DNANOVDB_USE_OPENVDB=OFF
|
||||
-DNANOVDB_ALLOW_FETCHCONTENT=OFF
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
@@ -88,18 +74,12 @@ else()
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
set(OPENVDB_PATCH_FILE openvdb_nanovdb.diff)
|
||||
else()
|
||||
set(OPENVDB_PATCH_FILE openvdb.diff)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(openvdb
|
||||
URL ${OPENVDB_URI}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH MD5=${OPENVDB_HASH}
|
||||
PREFIX ${BUILD_DIR}/openvdb
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/openvdb/src/openvdb < ${PATCH_DIR}/${OPENVDB_PATCH_FILE}
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/openvdb/src/openvdb < ${PATCH_DIR}/openvdb.diff
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/openvdb ${DEFAULT_CMAKE_FLAGS} ${OPENVDB_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/openvdb
|
||||
)
|
||||
@@ -116,17 +96,11 @@ add_dependencies(
|
||||
if(WIN32)
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
ExternalProject_Add_Step(openvdb after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/openvdb/include/openvdb ${HARVEST_TARGET}/openvdb/include/openvdb
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/openvdb/include ${HARVEST_TARGET}/openvdb/include
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/openvdb/lib/openvdb.lib ${HARVEST_TARGET}/openvdb/lib/openvdb.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/openvdb/bin/openvdb.dll ${HARVEST_TARGET}/openvdb/bin/openvdb.dll
|
||||
DEPENDEES install
|
||||
)
|
||||
if(WITH_NANOVDB)
|
||||
ExternalProject_Add_Step(openvdb nanovdb_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/openvdb/nanovdb ${HARVEST_TARGET}/nanovdb/include/nanovdb
|
||||
DEPENDEES after_install
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
if(BUILD_MODE STREQUAL Debug)
|
||||
ExternalProject_Add_Step(openvdb after_install
|
||||
|
@@ -21,7 +21,6 @@ if(WIN32)
|
||||
endif()
|
||||
option(WITH_WEBP "Enable building of oiio with webp support" OFF)
|
||||
option(WITH_BOOST_PYTHON "Enable building of boost with python support" OFF)
|
||||
option(WITH_NANOVDB "Enable building of OpenVDB with NanoVDB included" OFF)
|
||||
set(MAKE_THREADS 1 CACHE STRING "Number of threads to run make with")
|
||||
|
||||
if(NOT BUILD_MODE)
|
||||
|
@@ -145,15 +145,9 @@ set(TBB_VERSION 2019_U9)
|
||||
set(TBB_URI https://github.com/oneapi-src/oneTBB/archive/${TBB_VERSION}.tar.gz)
|
||||
set(TBB_HASH 26263622e9187212ec240dcf01b66207)
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
set(OPENVDB_GIT_UID e62f7a0bf1e27397223c61ddeaaf57edf111b77f)
|
||||
set(OPENVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/${OPENVDB_GIT_UID}.tar.gz)
|
||||
set(OPENVDB_HASH 90919510bc6ccd630fedc56f748cb199)
|
||||
else()
|
||||
set(OPENVDB_VERSION 7.0.0)
|
||||
set(OPENVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/v${OPENVDB_VERSION}.tar.gz)
|
||||
set(OPENVDB_HASH fd6c4f168282f7e0e494d290cd531fa8)
|
||||
endif()
|
||||
set(OPENVDB_VERSION 7.0.0)
|
||||
set(OPENVDB_URI https://github.com/dreamworksanimation/openvdb/archive/v${OPENVDB_VERSION}.tar.gz)
|
||||
set(OPENVDB_HASH fd6c4f168282f7e0e494d290cd531fa8)
|
||||
|
||||
set(IDNA_VERSION 2.9)
|
||||
set(CHARDET_VERSION 3.0.4)
|
||||
|
@@ -1,135 +0,0 @@
|
||||
diff -Naur orig/cmake/FindIlmBase.cmake openvdb/cmake/FindIlmBase.cmake
|
||||
--- orig/cmake/FindIlmBase.cmake 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/cmake/FindIlmBase.cmake 2020-08-12 12:48:44 -0600
|
||||
@@ -217,6 +217,8 @@
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib")
|
||||
endif()
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${_IlmBase_Version_Suffix}.lib")
|
||||
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "_s.lib")
|
||||
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "_s_d.lib")
|
||||
else()
|
||||
if(ILMBASE_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
||||
diff -Naur orig/cmake/FindOpenEXR.cmake openvdb/cmake/FindOpenEXR.cmake
|
||||
--- orig/cmake/FindOpenEXR.cmake 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/cmake/FindOpenEXR.cmake 2020-08-12 12:48:44 -0600
|
||||
@@ -210,6 +210,8 @@
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib")
|
||||
endif()
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${_OpenEXR_Version_Suffix}.lib")
|
||||
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "_s.lib")
|
||||
+ list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "_s_d.lib")
|
||||
else()
|
||||
if(OPENEXR_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
||||
diff -Naur orig/openvdb/openvdb/CMakeLists.txt openvdb/openvdb/openvdb/CMakeLists.txt
|
||||
--- orig/openvdb/openvdb/CMakeLists.txt 2019-12-06 12:11:33 -0700
|
||||
+++ openvdb/openvdb/openvdb/CMakeLists.txt 2020-08-12 14:12:26 -0600
|
||||
@@ -105,7 +105,9 @@
|
||||
# http://boost.2283326.n4.nabble.com/CMake-config-scripts-broken-in-1-70-td4708957.html
|
||||
# https://github.com/boostorg/boost_install/commit/160c7cb2b2c720e74463865ef0454d4c4cd9ae7c
|
||||
set(BUILD_SHARED_LIBS ON)
|
||||
- set(Boost_USE_STATIC_LIBS OFF)
|
||||
+ if(NOT WIN32) # blender links boost statically on windows
|
||||
+ set(Boost_USE_STATIC_LIBS OFF)
|
||||
+ endif()
|
||||
endif()
|
||||
|
||||
find_package(Boost ${MINIMUM_BOOST_VERSION} REQUIRED COMPONENTS iostreams system)
|
||||
@@ -193,6 +195,7 @@
|
||||
if(OPENVDB_DISABLE_BOOST_IMPLICIT_LINKING)
|
||||
add_definitions(-DBOOST_ALL_NO_LIB)
|
||||
endif()
|
||||
+ add_definitions(-D__TBB_NO_IMPLICIT_LINKAGE -DOPENVDB_OPENEXR_STATICLIB)
|
||||
endif()
|
||||
|
||||
# @todo Should be target definitions
|
||||
@@ -383,7 +386,12 @@
|
||||
# imported targets.
|
||||
|
||||
if(OPENVDB_CORE_SHARED)
|
||||
- add_library(openvdb_shared SHARED ${OPENVDB_LIBRARY_SOURCE_FILES})
|
||||
+ if(WIN32)
|
||||
+ configure_file(version.rc.in ${CMAKE_CURRENT_BINARY_DIR}/version.rc @ONLY)
|
||||
+ add_library(openvdb_shared SHARED ${OPENVDB_LIBRARY_SOURCE_FILES} ${CMAKE_CURRENT_BINARY_DIR}/version.rc)
|
||||
+ else()
|
||||
+ add_library(openvdb_shared SHARED ${OPENVDB_LIBRARY_SOURCE_FILES})
|
||||
+ endif()
|
||||
endif()
|
||||
|
||||
if(OPENVDB_CORE_STATIC)
|
||||
diff -Naur orig/openvdb/openvdb/version.rc.in openvdb/openvdb/openvdb/version.rc.in
|
||||
--- orig/openvdb/openvdb/version.rc.in 1969-12-31 17:00:00 -0700
|
||||
+++ openvdb/openvdb/openvdb/version.rc.in 2020-08-12 14:15:01 -0600
|
||||
@@ -0,0 +1,48 @@
|
||||
+#include <winver.h>
|
||||
+
|
||||
+#define VER_FILEVERSION @OpenVDB_MAJOR_VERSION@,@OpenVDB_MINOR_VERSION@,@OpenVDB_PATCH_VERSION@,0
|
||||
+#define VER_FILEVERSION_STR "@OpenVDB_MAJOR_VERSION@.@OpenVDB_MINOR_VERSION@.@OpenVDB_PATCH_VERSION@.0\0"
|
||||
+
|
||||
+#define VER_PRODUCTVERSION @OpenVDB_MAJOR_VERSION@,@OpenVDB_MINOR_VERSION@,@OpenVDB_PATCH_VERSION@,0
|
||||
+#define VER_PRODUCTVERSION_STR "@OpenVDB_MAJOR_VERSION@.@OpenVDB_MINOR_VERSION@\0"
|
||||
+
|
||||
+#ifndef DEBUG
|
||||
+#define VER_DEBUG 0
|
||||
+#else
|
||||
+#define VER_DEBUG VS_FF_DEBUG
|
||||
+#endif
|
||||
+
|
||||
+VS_VERSION_INFO VERSIONINFO
|
||||
+FILEVERSION VER_FILEVERSION
|
||||
+PRODUCTVERSION VER_PRODUCTVERSION
|
||||
+FILEFLAGSMASK VS_FFI_FILEFLAGSMASK
|
||||
+FILEFLAGS (VER_DEBUG)
|
||||
+FILEOS VOS__WINDOWS32
|
||||
+FILETYPE VFT_DLL
|
||||
+FILESUBTYPE VFT2_UNKNOWN
|
||||
+BEGIN
|
||||
+ BLOCK "StringFileInfo"
|
||||
+ BEGIN
|
||||
+ BLOCK "040904E4"
|
||||
+ BEGIN
|
||||
+ VALUE "FileDescription", "OpenVDB"
|
||||
+ VALUE "FileVersion", VER_FILEVERSION_STR
|
||||
+ VALUE "InternalName", "OpenVDB"
|
||||
+ VALUE "ProductName", "OpenVDB"
|
||||
+ VALUE "ProductVersion", VER_PRODUCTVERSION_STR
|
||||
+ END
|
||||
+ END
|
||||
+
|
||||
+ BLOCK "VarFileInfo"
|
||||
+ BEGIN
|
||||
+ /* The following line should only be modified for localized versions. */
|
||||
+ /* It consists of any number of WORD,WORD pairs, with each pair */
|
||||
+ /* describing a language,codepage combination supported by the file. */
|
||||
+ /* */
|
||||
+ /* For example, a file might have values "0x409,1252" indicating that it */
|
||||
+ /* supports English language (0x409) in the Windows ANSI codepage (1252). */
|
||||
+
|
||||
+ VALUE "Translation", 0x409, 1252
|
||||
+
|
||||
+ END
|
||||
+END
|
||||
diff -Naur openvdb-original/CMakeLists.txt openvdb/CMakeLists.txt
|
||||
--- openvdb-original/CMakeLists.txt 2020-08-27 03:34:02.000000000 +0200
|
||||
+++ openvdb/CMakeLists.txt 2020-09-02 10:56:21.665735244 +0200
|
||||
@@ -68,6 +68,7 @@
|
||||
option(OPENVDB_INSTALL_HOUDINI_PYTHONRC [=[Install a Houdini startup script that sets
|
||||
the visibilty of OpenVDB nodes and their native equivalents.]=] OFF)
|
||||
option(OPENVDB_BUILD_MAYA_PLUGIN "Build the Maya plugin" OFF)
|
||||
+option(OPENVDB_BUILD_NANOVDB "Build nanovdb" ON)
|
||||
option(OPENVDB_ENABLE_RPATH "Build with RPATH information" ON)
|
||||
option(OPENVDB_CXX_STRICT "Enable or disable pre-defined compiler warnings" OFF)
|
||||
option(OPENVDB_CODE_COVERAGE "Enable code coverage. This also overrides CMAKE_BUILD_TYPE to Debug" OFF)
|
||||
@@ -740,6 +741,10 @@
|
||||
add_subdirectory(openvdb_maya)
|
||||
endif()
|
||||
|
||||
+if(OPENVDB_BUILD_NANOVDB)
|
||||
+ add_subdirectory(nanovdb)
|
||||
+endif()
|
||||
+
|
||||
##########################################################################
|
||||
|
||||
add_custom_target(uninstall
|
||||
|
@@ -24,7 +24,6 @@ import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def is_tool(name):
|
||||
"""Check whether `name` is on PATH and marked as executable."""
|
||||
|
||||
@@ -33,7 +32,6 @@ def is_tool(name):
|
||||
|
||||
return which(name) is not None
|
||||
|
||||
|
||||
class Builder:
|
||||
def __init__(self, name, branch, codesign):
|
||||
self.name = name
|
||||
@@ -50,23 +48,22 @@ class Builder:
|
||||
# Detect platform
|
||||
if name.startswith('mac'):
|
||||
self.platform = 'mac'
|
||||
self.command_prefix = []
|
||||
self.command_prefix = []
|
||||
elif name.startswith('linux'):
|
||||
self.platform = 'linux'
|
||||
if is_tool('scl'):
|
||||
self.command_prefix = ['scl', 'enable', 'devtoolset-9', '--']
|
||||
self.command_prefix = ['scl', 'enable', 'devtoolset-9', '--']
|
||||
else:
|
||||
self.command_prefix = []
|
||||
self.command_prefix = []
|
||||
elif name.startswith('win'):
|
||||
self.platform = 'win'
|
||||
self.command_prefix = []
|
||||
self.command_prefix = []
|
||||
else:
|
||||
raise ValueError('Unkonw platform for builder ' + self.platform)
|
||||
|
||||
# Always 64 bit now
|
||||
self.bits = 64
|
||||
|
||||
|
||||
def create_builder_from_arguments():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('builder_name')
|
||||
@@ -107,7 +104,7 @@ class VersionInfo:
|
||||
|
||||
def _parse_header_file(self, filename, define):
|
||||
import re
|
||||
regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define)
|
||||
regex = re.compile("^#\s*define\s+%s\s+(.*)" % define)
|
||||
with open(filename, "r") as file:
|
||||
for l in file:
|
||||
match = regex.match(l)
|
||||
|
@@ -102,7 +102,7 @@ class ArchiveWithIndicator:
|
||||
|
||||
# Wait for until archive is fully stored.
|
||||
actual_archive_size = self.archive_filepath.stat().st_size
|
||||
if actual_archive_size != expected_archive_size:
|
||||
if actual_archive_size != expected_archive_size:
|
||||
print('Partial/invalid archive size (expected '
|
||||
f'{expected_archive_size} got {actual_archive_size})')
|
||||
return False
|
||||
|
@@ -23,7 +23,6 @@ import shutil
|
||||
|
||||
import buildbot_utils
|
||||
|
||||
|
||||
def get_cmake_options(builder):
|
||||
codesign_script = os.path.join(
|
||||
builder.blender_dir, 'build_files', 'buildbot', 'worker_codesign.cmake')
|
||||
@@ -50,7 +49,6 @@ def get_cmake_options(builder):
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def update_git(builder):
|
||||
# Do extra git fetch because not all platform/git/buildbot combinations
|
||||
# update the origin remote, causing buildinfo to detect local changes.
|
||||
@@ -60,7 +58,6 @@ def update_git(builder):
|
||||
command = ['git', 'fetch', '--all']
|
||||
buildbot_utils.call(builder.command_prefix + command)
|
||||
|
||||
|
||||
def clean_directories(builder):
|
||||
# Make sure no garbage remained from the previous run
|
||||
if os.path.isdir(builder.install_dir):
|
||||
@@ -76,7 +73,6 @@ def clean_directories(builder):
|
||||
print("Removing {}" . format(buildinfo))
|
||||
os.remove(full_path)
|
||||
|
||||
|
||||
def cmake_configure(builder):
|
||||
# CMake configuration
|
||||
os.chdir(builder.build_dir)
|
||||
@@ -91,7 +87,6 @@ def cmake_configure(builder):
|
||||
command = ['cmake', builder.blender_dir] + cmake_options
|
||||
buildbot_utils.call(builder.command_prefix + command)
|
||||
|
||||
|
||||
def cmake_build(builder):
|
||||
# CMake build
|
||||
os.chdir(builder.build_dir)
|
||||
@@ -114,7 +109,6 @@ def cmake_build(builder):
|
||||
print("CMake build:")
|
||||
buildbot_utils.call(builder.command_prefix + command)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
builder = buildbot_utils.create_builder_from_arguments()
|
||||
update_git(builder)
|
||||
|
@@ -29,20 +29,18 @@ from pathlib import Path
|
||||
|
||||
import buildbot_utils
|
||||
|
||||
|
||||
def get_package_name(builder, platform=None):
|
||||
info = buildbot_utils.VersionInfo(builder)
|
||||
|
||||
package_name = 'blender-' + info.full_version
|
||||
if platform:
|
||||
package_name += '-' + platform
|
||||
package_name += '-' + platform
|
||||
if not (builder.branch == 'master' or builder.is_release_branch):
|
||||
if info.is_development_build:
|
||||
package_name = builder.branch + "-" + package_name
|
||||
|
||||
return package_name
|
||||
|
||||
|
||||
def sign_file_or_directory(path):
|
||||
from codesign.simple_code_signer import SimpleCodeSigner
|
||||
code_signer = SimpleCodeSigner()
|
||||
@@ -66,7 +64,6 @@ def create_buildbot_upload_zip(builder, package_files):
|
||||
sys.stderr.write('Create buildbot_upload.zip failed: ' + str(ex) + '\n')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def create_tar_xz(src, dest, package_name):
|
||||
# One extra to remove leading os.sep when cleaning root for package_root
|
||||
ln = len(src) + 1
|
||||
@@ -94,7 +91,6 @@ def create_tar_xz(src, dest, package_name):
|
||||
package.add(entry[0], entry[1], recursive=False, filter=_fakeroot)
|
||||
package.close()
|
||||
|
||||
|
||||
def cleanup_files(dirpath, extension):
|
||||
for f in os.listdir(dirpath):
|
||||
filepath = os.path.join(dirpath, f)
|
||||
@@ -175,11 +171,7 @@ def pack_linux(builder):
|
||||
|
||||
print("Stripping python...")
|
||||
py_target = os.path.join(builder.install_dir, info.short_version)
|
||||
buildbot_utils.call(
|
||||
builder.command_prefix + [
|
||||
'find', py_target, '-iname', '*.so', '-exec', 'strip', '-s', '{}', ';',
|
||||
],
|
||||
)
|
||||
buildbot_utils.call(builder.command_prefix + ['find', py_target, '-iname', '*.so', '-exec', 'strip', '-s', '{}', ';'])
|
||||
|
||||
# Construct package name
|
||||
platform_name = 'linux64'
|
||||
|
@@ -22,21 +22,18 @@ import buildbot_utils
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def get_ctest_arguments(builder):
|
||||
args = ['--output-on-failure']
|
||||
if builder.platform == 'win':
|
||||
args += ['-C', 'Release']
|
||||
return args
|
||||
|
||||
|
||||
def test(builder):
|
||||
os.chdir(builder.build_dir)
|
||||
|
||||
command = builder.command_prefix + ['ctest'] + get_ctest_arguments(builder)
|
||||
command = builder.command_prefix + ['ctest'] + get_ctest_arguments(builder)
|
||||
buildbot_utils.call(command)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
builder = buildbot_utils.create_builder_from_arguments()
|
||||
test(builder)
|
||||
|
@@ -52,7 +52,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(Alembic DEFAULT_MSG ALEMBIC_LIBRARY ALEMBIC_IN
|
||||
IF(ALEMBIC_FOUND)
|
||||
SET(ALEMBIC_LIBRARIES ${ALEMBIC_LIBRARY})
|
||||
SET(ALEMBIC_INCLUDE_DIRS ${ALEMBIC_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(ALEMBIC_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
ALEMBIC_INCLUDE_DIR
|
||||
|
@@ -30,7 +30,7 @@ SET(_audaspace_SEARCH_DIRS
|
||||
FIND_PACKAGE(PkgConfig)
|
||||
IF(PKG_CONFIG_FOUND)
|
||||
PKG_CHECK_MODULES(AUDASPACE_PKGCONF audaspace)
|
||||
ENDIF()
|
||||
ENDIF(PKG_CONFIG_FOUND)
|
||||
|
||||
# Include dir
|
||||
FIND_PATH(AUDASPACE_INCLUDE_DIR
|
||||
@@ -88,17 +88,17 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(Audaspace_Py DEFAULT_MSG AUDASPACE_PY_LIBRAR
|
||||
IF(AUDASPACE_FOUND)
|
||||
SET(AUDASPACE_LIBRARIES ${AUDASPACE_LIBRARY})
|
||||
SET(AUDASPACE_INCLUDE_DIRS ${AUDASPACE_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(AUDASPACE_FOUND)
|
||||
|
||||
IF(AUDASPACE_C_FOUND)
|
||||
SET(AUDASPACE_C_LIBRARIES ${AUDASPACE_C_LIBRARY})
|
||||
SET(AUDASPACE_C_INCLUDE_DIRS ${AUDASPACE_C_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(AUDASPACE_C_FOUND)
|
||||
|
||||
IF(AUDASPACE_PY_FOUND)
|
||||
SET(AUDASPACE_PY_LIBRARIES ${AUDASPACE_PY_LIBRARY})
|
||||
SET(AUDASPACE_PY_INCLUDE_DIRS ${AUDASPACE_PY_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(AUDASPACE_PY_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
AUDASPACE_LIBRARY
|
||||
|
@@ -41,7 +41,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(Eigen3 DEFAULT_MSG
|
||||
|
||||
IF(EIGEN3_FOUND)
|
||||
SET(EIGEN3_INCLUDE_DIRS ${EIGEN3_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(EIGEN3_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
EIGEN3_INCLUDE_DIR
|
||||
|
@@ -80,7 +80,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(Embree DEFAULT_MSG
|
||||
IF(EMBREE_FOUND)
|
||||
SET(EMBREE_LIBRARIES ${_embree_LIBRARIES})
|
||||
SET(EMBREE_INCLUDE_DIRS ${EMBREE_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(EMBREE_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
EMBREE_INCLUDE_DIR
|
||||
|
@@ -1,83 +0,0 @@
|
||||
# - Find FFmpeg library and includes.
|
||||
# Set FFMPEG_FIND_COMPONENTS to the canonical names of the libraries
|
||||
# before using the module.
|
||||
# This module defines
|
||||
# FFMPEG_INCLUDE_DIRS, where to find libavcodec/ac3_parser.h.
|
||||
# FFMPEG_LIBRARIES, libraries to link against to use FFmpeg.
|
||||
# FFMPEG_ROOT_DIR, The base directory to search for FFmpeg.
|
||||
# This can also be an environment variable.
|
||||
# FFMPEG_FOUND, If false, do not try to use FFmpeg.
|
||||
# FFMPEG_<COMPONENT>_LIBRARY, the given individual component libraries.
|
||||
#=============================================================================
|
||||
# Copyright 2020 Blender Foundation.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD 3-Clause License,
|
||||
# see accompanying file BSD-3-Clause-license.txt for details.
|
||||
#=============================================================================
|
||||
|
||||
# If FFMPEG_ROOT_DIR was defined in the environment, use it.
|
||||
if(NOT FFMPEG_ROOT_DIR AND NOT $ENV{FFMPEG_ROOT_DIR} STREQUAL "")
|
||||
set(FFMPEG_ROOT_DIR $ENV{FFMPEG_ROOT_DIR})
|
||||
endif()
|
||||
|
||||
set(_ffmpeg_SEARCH_DIRS
|
||||
${FFMPEG_ROOT_DIR}
|
||||
/opt/lib/ffmpeg
|
||||
)
|
||||
|
||||
if(NOT FFMPEG_FIND_COMPONENTS)
|
||||
set(FFMPEG_FIND_COMPONENTS
|
||||
# List taken from http://ffmpeg.org/download.html#build-mac
|
||||
avcodec
|
||||
avdevice
|
||||
avfilter
|
||||
avformat
|
||||
avutil
|
||||
)
|
||||
endif()
|
||||
|
||||
find_path(_ffmpeg_INCLUDE_DIR
|
||||
NAMES
|
||||
libavcodec/ac3_parser.h
|
||||
HINTS
|
||||
${_ffmpeg_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
include
|
||||
)
|
||||
|
||||
set(_ffmpeg_LIBRARIES)
|
||||
foreach(_component ${FFMPEG_FIND_COMPONENTS})
|
||||
string(TOUPPER ${_component} _upper_COMPONENT)
|
||||
find_library(FFMPEG_${_upper_COMPONENT}_LIBRARY
|
||||
NAMES
|
||||
${_upper_COMPONENT}
|
||||
HINTS
|
||||
${LIBDIR}/ffmpeg
|
||||
PATH_SUFFIXES
|
||||
lib64 lib
|
||||
)
|
||||
if(NOT FFMPEG_${_upper_COMPONENT}_LIBRARY)
|
||||
message(WARNING "Could NOT find FFmpeg ${_upper_COMPONENT}.")
|
||||
endif()
|
||||
list(APPEND _ffmpeg_LIBRARIES ${FFMPEG_${_upper_COMPONENT}_LIBRARY})
|
||||
mark_as_advanced(FFMPEG_${_upper_COMPONENT}_LIBRARY)
|
||||
endforeach()
|
||||
|
||||
# handle the QUIETLY and REQUIRED arguments and set FFMPEG_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(FFmpeg DEFAULT_MSG
|
||||
_ffmpeg_LIBRARIES _ffmpeg_INCLUDE_DIR)
|
||||
|
||||
IF(FFMPEG_FOUND)
|
||||
set(FFMPEG_LIBRARIES ${_ffmpeg_LIBRARIES})
|
||||
set(FFMPEG_INCLUDE_DIRS ${_ffmpeg_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
|
||||
mark_as_advanced(
|
||||
FFMPEG_INCLUDE_DIR
|
||||
)
|
||||
|
||||
unset(_ffmpeg_SEARCH_DIRS)
|
||||
unset(_ffmpeg_LIBRARIES)
|
||||
unset(_ffmpeg_INCLUDE_DIR)
|
@@ -54,7 +54,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(Fftw3 DEFAULT_MSG
|
||||
IF(FFTW3_FOUND)
|
||||
SET(FFTW3_LIBRARIES ${FFTW3_LIBRARY})
|
||||
SET(FFTW3_INCLUDE_DIRS ${FFTW3_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(FFTW3_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
FFTW3_INCLUDE_DIR
|
||||
|
@@ -52,7 +52,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(GLEW DEFAULT_MSG
|
||||
|
||||
IF(GLEW_FOUND)
|
||||
SET(GLEW_INCLUDE_DIRS ${GLEW_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(GLEW_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
GLEW_INCLUDE_DIR
|
||||
|
@@ -82,7 +82,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(GMP DEFAULT_MSG
|
||||
IF(GMP_FOUND)
|
||||
SET(GMP_LIBRARIES ${GMPXX_LIBRARY} ${GMP_LIBRARY})
|
||||
SET(GMP_INCLUDE_DIRS ${GMP_INCLUDE_DIR} ${GMPXX_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(GMP_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
GMP_INCLUDE_DIR
|
||||
|
@@ -124,7 +124,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(Icu DEFAULT_MSG
|
||||
IF(ICU_FOUND)
|
||||
SET(ICU_LIBRARIES ${ICU_LIBRARY_DATA} ${ICU_LIBRARY_I18N} ${ICU_LIBRARY_IO} ${ICU_LIBRARY_LE} ${ICU_LIBRARY_LX} ${ICU_LIBRARY_TU} ${ICU_LIBRARY_UC})
|
||||
SET(ICU_INCLUDE_DIRS ${ICU_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(ICU_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
ICU_INCLUDE_DIR
|
||||
|
@@ -54,7 +54,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(Jack DEFAULT_MSG
|
||||
IF(JACK_FOUND)
|
||||
SET(JACK_LIBRARIES ${JACK_LIBRARY})
|
||||
SET(JACK_INCLUDE_DIRS ${JACK_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(JACK_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
JACK_INCLUDE_DIR
|
||||
|
@@ -64,7 +64,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(JeMalloc DEFAULT_MSG
|
||||
IF(JEMALLOC_FOUND)
|
||||
SET(JEMALLOC_LIBRARIES ${JEMALLOC_LIBRARY})
|
||||
SET(JEMALLOC_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(JEMALLOC_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
JEMALLOC_INCLUDE_DIR
|
||||
|
@@ -52,7 +52,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(LZO DEFAULT_MSG
|
||||
IF(LZO_FOUND)
|
||||
SET(LZO_LIBRARIES ${LZO_LIBRARY})
|
||||
SET(LZO_INCLUDE_DIRS ${LZO_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(LZO_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
LZO_INCLUDE_DIR
|
||||
|
@@ -1,49 +0,0 @@
|
||||
# - Find NanoVDB library
|
||||
# Find the native NanoVDB includes and library
|
||||
# This module defines
|
||||
# NANOVDB_INCLUDE_DIRS, where to find nanovdb.h, Set when
|
||||
# NANOVDB_INCLUDE_DIR is found.
|
||||
# NANOVDB_ROOT_DIR, The base directory to search for NanoVDB.
|
||||
# This can also be an environment variable.
|
||||
# NANOVDB_FOUND, If false, do not try to use NanoVDB.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2020 Blender Foundation.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD 3-Clause License,
|
||||
# see accompanying file BSD-3-Clause-license.txt for details.
|
||||
#=============================================================================
|
||||
|
||||
# If NANOVDB_ROOT_DIR was defined in the environment, use it.
|
||||
IF(NOT NANOVDB_ROOT_DIR AND NOT $ENV{NANOVDB_ROOT_DIR} STREQUAL "")
|
||||
SET(NANOVDB_ROOT_DIR $ENV{NANOVDB_ROOT_DIR})
|
||||
ENDIF()
|
||||
|
||||
SET(_nanovdb_SEARCH_DIRS
|
||||
${NANOVDB_ROOT_DIR}
|
||||
)
|
||||
|
||||
FIND_PATH(NANOVDB_INCLUDE_DIR
|
||||
NAMES
|
||||
nanovdb/NanoVDB.h
|
||||
HINTS
|
||||
${_nanovdb_SEARCH_DIRS}
|
||||
PATH_SUFFIXES
|
||||
include
|
||||
)
|
||||
|
||||
# handle the QUIETLY and REQUIRED arguments and set NANOVDB_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
INCLUDE(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(NanoVDB DEFAULT_MSG
|
||||
NANOVDB_INCLUDE_DIR)
|
||||
|
||||
IF(NANOVDB_FOUND)
|
||||
SET(NANOVDB_INCLUDE_DIRS ${NANOVDB_INCLUDE_DIR})
|
||||
ENDIF(NANOVDB_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
NANOVDB_INCLUDE_DIR
|
||||
)
|
||||
|
||||
UNSET(_nanovdb_SEARCH_DIRS)
|
@@ -95,7 +95,7 @@ IF(OSL_FOUND)
|
||||
"\\1" OSL_LIBRARY_VERSION_MAJOR ${OSL_LIBRARY_VERSION_MAJOR})
|
||||
STRING(REGEX REPLACE ".*#define[ \t]+OSL_LIBRARY_VERSION_MINOR[ \t]+([.0-9]+).*"
|
||||
"\\1" OSL_LIBRARY_VERSION_MINOR ${OSL_LIBRARY_VERSION_MINOR})
|
||||
ENDIF()
|
||||
ENDIF(OSL_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
OSL_INCLUDE_DIR
|
||||
|
@@ -129,7 +129,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenCOLLADA DEFAULT_MSG
|
||||
IF(OPENCOLLADA_FOUND)
|
||||
SET(OPENCOLLADA_LIBRARIES ${_opencollada_LIBRARIES})
|
||||
SET(OPENCOLLADA_INCLUDE_DIRS ${_opencollada_INCLUDES})
|
||||
ENDIF()
|
||||
ENDIF(OPENCOLLADA_FOUND)
|
||||
|
||||
UNSET(COMPONENT)
|
||||
UNSET(UPPERCOMPONENT)
|
||||
|
@@ -70,7 +70,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenColorIO DEFAULT_MSG
|
||||
IF(OPENCOLORIO_FOUND)
|
||||
SET(OPENCOLORIO_LIBRARIES ${_opencolorio_LIBRARIES})
|
||||
SET(OPENCOLORIO_INCLUDE_DIRS ${OPENCOLORIO_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(OPENCOLORIO_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
OPENCOLORIO_INCLUDE_DIR
|
||||
|
@@ -65,7 +65,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenJPEG DEFAULT_MSG
|
||||
IF(OPENJPEG_FOUND)
|
||||
SET(OPENJPEG_LIBRARIES ${OPENJPEG_LIBRARY})
|
||||
SET(OPENJPEG_INCLUDE_DIRS ${OPENJPEG_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(OPENJPEG_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
OPENJPEG_INCLUDE_DIR
|
||||
|
@@ -90,7 +90,7 @@ IF(OPENSUBDIV_FOUND)
|
||||
OPENSUBDIV_CHECK_CONTROLLER("cudaEvaluator.h" OPENSUBDIV_HAS_CUDA)
|
||||
OPENSUBDIV_CHECK_CONTROLLER("glXFBEvaluator.h" OPENSUBDIV_HAS_GLSL_TRANSFORM_FEEDBACK)
|
||||
OPENSUBDIV_CHECK_CONTROLLER("glComputeEvaluator.h" OPENSUBDIV_HAS_GLSL_COMPUTE)
|
||||
ENDIF()
|
||||
ENDIF(OPENSUBDIV_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
OPENSUBDIV_INCLUDE_DIR
|
||||
|
@@ -55,7 +55,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenVDB DEFAULT_MSG
|
||||
IF(OPENVDB_FOUND)
|
||||
SET(OPENVDB_LIBRARIES ${OPENVDB_LIBRARY})
|
||||
SET(OPENVDB_INCLUDE_DIRS ${OPENVDB_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(OPENVDB_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
OPENVDB_INCLUDE_DIR
|
||||
|
@@ -41,7 +41,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(OptiX DEFAULT_MSG
|
||||
|
||||
IF(OPTIX_FOUND)
|
||||
SET(OPTIX_INCLUDE_DIRS ${OPTIX_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(OPTIX_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
OPTIX_INCLUDE_DIR
|
||||
|
@@ -57,7 +57,7 @@ SET(PCRE_LIBRARIES ${PCRE_LIBRARY})
|
||||
|
||||
IF(PCRE_FOUND)
|
||||
SET(PCRE_INCLUDE_DIRS ${PCRE_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(PCRE_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
PCRE_INCLUDE_DIR
|
||||
|
@@ -53,7 +53,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(SDL2 DEFAULT_MSG
|
||||
IF(SDL2_FOUND)
|
||||
SET(SDL2_LIBRARIES ${SDL2_LIBRARY})
|
||||
SET(SDL2_INCLUDE_DIRS ${SDL2_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(SDL2_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
SDL2_INCLUDE_DIR
|
||||
|
@@ -52,7 +52,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(SndFile DEFAULT_MSG
|
||||
IF(SNDFILE_FOUND)
|
||||
SET(LIBSNDFILE_LIBRARIES ${LIBSNDFILE_LIBRARY})
|
||||
SET(LIBSNDFILE_INCLUDE_DIRS ${LIBSNDFILE_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(SNDFILE_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
LIBSNDFILE_INCLUDE_DIR
|
||||
|
@@ -54,7 +54,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(Spacenav DEFAULT_MSG
|
||||
IF(SPACENAV_FOUND)
|
||||
SET(SPACENAV_LIBRARIES ${SPACENAV_LIBRARY})
|
||||
SET(SPACENAV_INCLUDE_DIRS ${SPACENAV_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(SPACENAV_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
SPACENAV_INCLUDE_DIR
|
||||
|
@@ -59,7 +59,7 @@ ELSE()
|
||||
get_filename_component(USD_LIBRARY_DIR ${USD_LIBRARY} DIRECTORY)
|
||||
SET(USD_INCLUDE_DIRS ${USD_INCLUDE_DIR})
|
||||
set(USD_LIBRARIES ${USD_LIBRARY})
|
||||
ENDIF()
|
||||
ENDIF(USD_FOUND)
|
||||
ENDIF()
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
|
@@ -52,7 +52,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(XML2 DEFAULT_MSG
|
||||
IF(XML2_FOUND)
|
||||
SET(XML2_LIBRARIES ${XML2_LIBRARY})
|
||||
SET(XML2_INCLUDE_DIRS ${XML2_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(XML2_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
XML2_INCLUDE_DIR
|
||||
|
@@ -61,7 +61,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(XR_OpenXR_SDK DEFAULT_MSG
|
||||
IF(XR_OPENXR_SDK_FOUND)
|
||||
SET(XR_OPENXR_SDK_LIBRARIES ${XR_OPENXR_SDK_LOADER_LIBRARY})
|
||||
SET(XR_OPENXR_SDK_INCLUDE_DIRS ${XR_OPENXR_SDK_INCLUDE_DIR})
|
||||
ENDIF()
|
||||
ENDIF(XR_OPENXR_SDK_FOUND)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
XR_OPENXR_SDK_INCLUDE_DIR
|
||||
|
@@ -355,8 +355,6 @@ def recursive_arg_sizes(node, ):
|
||||
# print("adding", node.spelling)
|
||||
for c in node.get_children():
|
||||
recursive_arg_sizes(c)
|
||||
|
||||
|
||||
# cache function sizes
|
||||
recursive_arg_sizes(tu.cursor)
|
||||
_defs.update(defs_precalc)
|
||||
|
@@ -182,7 +182,7 @@ def create_nb_project_main():
|
||||
f.write(' </logicalFolder>\n')
|
||||
# default, but this dir is infact not in blender dir so we can ignore it
|
||||
# f.write(' <sourceFolderFilter>^(nbproject)$</sourceFolderFilter>\n')
|
||||
f.write(r' <sourceFolderFilter>^(nbproject|__pycache__|.*\.py|.*\.html|.*\.blend)$</sourceFolderFilter>\n')
|
||||
f.write(' <sourceFolderFilter>^(nbproject|__pycache__|.*\.py|.*\.html|.*\.blend)$</sourceFolderFilter>\n')
|
||||
|
||||
f.write(' <sourceRootList>\n')
|
||||
f.write(' <Elem>%s</Elem>\n' % SOURCE_DIR) # base_root_rel
|
||||
|
@@ -8,7 +8,6 @@ import sys
|
||||
|
||||
cmakelists_file = sys.argv[-1]
|
||||
|
||||
|
||||
def main():
|
||||
options = []
|
||||
for l in open(cmakelists_file, 'r').readlines():
|
||||
|
@@ -49,8 +49,8 @@ def main():
|
||||
check_commands = []
|
||||
for c, inc_dirs, defs in source_info:
|
||||
|
||||
# ~if "source/blender" not in c:
|
||||
# ~ continue
|
||||
#~if "source/blender" not in c:
|
||||
#~ continue
|
||||
|
||||
cmd = ([CHECKER_BIN] +
|
||||
CHECKER_ARGS +
|
||||
|
@@ -5,22 +5,24 @@
|
||||
#
|
||||
|
||||
set(WITH_ALEMBIC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_AUDASPACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_BUILDINFO ON CACHE BOOL "" FORCE)
|
||||
set(WITH_BULLET ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CODEC_AVI ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CODEC_FFMPEG ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CODEC_SNDFILE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_COMPOSITOR ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_EMBREE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_OSL ON CACHE BOOL "" FORCE)
|
||||
set(WITH_DRACO ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FFTW3 ON CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)
|
||||
set(WITH_COMPOSITOR ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FREESTYLE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_ITASC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_SOLVER ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_ITASC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_CINEON ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_DDS ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_HDR ON CACHE BOOL "" FORCE)
|
||||
@@ -29,13 +31,12 @@ set(WITH_IMAGE_OPENJPEG ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_TIFF ON CACHE BOOL "" FORCE)
|
||||
set(WITH_INPUT_NDOF ON CACHE BOOL "" FORCE)
|
||||
set(WITH_INTERNATIONAL ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LZMA ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LZO ON CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_REMESH ON CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_FLUID ON CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_OCEANSIM ON CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_REMESH ON CACHE BOOL "" FORCE)
|
||||
set(WITH_AUDASPACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENAL ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENCOLLADA ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENCOLORIO ON CACHE BOOL "" FORCE)
|
||||
@@ -44,7 +45,6 @@ set(WITH_OPENMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB_BLOSC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_PYTHON_INSTALL ON CACHE BOOL "" FORCE)
|
||||
set(WITH_QUADRIFLOW ON CACHE BOOL "" FORCE)
|
||||
set(WITH_SDL ON CACHE BOOL "" FORCE)
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
set(WITH_INSTALL_PORTABLE ON CACHE BOOL "" FORCE)
|
||||
|
||||
set(WITH_AUDASPACE OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_ALEMBIC OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_BOOST OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_BUILDINFO OFF CACHE BOOL "" FORCE)
|
||||
@@ -15,17 +14,20 @@ set(WITH_BULLET OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_CODEC_AVI OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_CODEC_FFMPEG OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_CODEC_SNDFILE OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_COMPOSITOR OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_EMBREE OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_OSL OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_DEVICE_OPTIX OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_DRACO OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_FFTW3 OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_LIBMV OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_LLVM OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_COMPOSITOR OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_FREESTYLE OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_ITASC OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_SOLVER OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_ITASC OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_CINEON OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_DDS OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_HDR OFF CACHE BOOL "" FORCE)
|
||||
@@ -35,13 +37,12 @@ set(WITH_IMAGE_TIFF OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_INPUT_NDOF OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_INTERNATIONAL OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_JACK OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_LIBMV OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_LLVM OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_LZMA OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_LZO OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_REMESH OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_FLUID OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_OCEANSIM OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_REMESH OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_AUDASPACE OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENAL OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENCOLLADA OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENCOLORIO OFF CACHE BOOL "" FORCE)
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# cmake -C../blender/build_files/cmake/config/blender_release.cmake ../blender
|
||||
#
|
||||
|
||||
set(WITH_AUDASPACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_ALEMBIC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_ASSERT_ABORT OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_BUILDINFO ON CACHE BOOL "" FORCE)
|
||||
@@ -12,12 +11,15 @@ set(WITH_BULLET ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CODEC_AVI ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CODEC_FFMPEG ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CODEC_SNDFILE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_COMPOSITOR ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_EMBREE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_CYCLES_OSL ON CACHE BOOL "" FORCE)
|
||||
set(WITH_DRACO ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FFTW3 ON CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)
|
||||
set(WITH_COMPOSITOR ON CACHE BOOL "" FORCE)
|
||||
set(WITH_FREESTYLE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_GMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IK_SOLVER ON CACHE BOOL "" FORCE)
|
||||
@@ -30,13 +32,12 @@ set(WITH_IMAGE_OPENJPEG ON CACHE BOOL "" FORCE)
|
||||
set(WITH_IMAGE_TIFF ON CACHE BOOL "" FORCE)
|
||||
set(WITH_INPUT_NDOF ON CACHE BOOL "" FORCE)
|
||||
set(WITH_INTERNATIONAL ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LZMA ON CACHE BOOL "" FORCE)
|
||||
set(WITH_LZO ON CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_REMESH ON CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_FLUID ON CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_OCEANSIM ON CACHE BOOL "" FORCE)
|
||||
set(WITH_MOD_REMESH ON CACHE BOOL "" FORCE)
|
||||
set(WITH_AUDASPACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENAL ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENCOLLADA ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENCOLORIO ON CACHE BOOL "" FORCE)
|
||||
@@ -45,7 +46,6 @@ set(WITH_OPENMP ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB ON CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB_BLOSC ON CACHE BOOL "" FORCE)
|
||||
set(WITH_POTRACE ON CACHE BOOL "" FORCE)
|
||||
set(WITH_PYTHON_INSTALL ON CACHE BOOL "" FORCE)
|
||||
set(WITH_QUADRIFLOW ON CACHE BOOL "" FORCE)
|
||||
set(WITH_SDL ON CACHE BOOL "" FORCE)
|
||||
|
@@ -114,6 +114,5 @@ def main():
|
||||
import subprocess
|
||||
subprocess.call(cmd)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@@ -86,7 +86,7 @@ if(WIN32)
|
||||
set(CPACK_NSIS_MUI_ICON ${CMAKE_SOURCE_DIR}/release/windows/icons/winblender.ico)
|
||||
set(CPACK_NSIS_COMPRESSOR "/SOLID lzma")
|
||||
|
||||
set(CPACK_RESOURCE_FILE_LICENSE ${CMAKE_SOURCE_DIR}/release/license/GPL-3.0.txt)
|
||||
set(CPACK_RESOURCE_FILE_LICENSE ${CMAKE_SOURCE_DIR}/release/text/GPL3-license.txt)
|
||||
set(CPACK_WIX_PRODUCT_ICON ${CMAKE_SOURCE_DIR}/release/windows/icons/winblender.ico)
|
||||
|
||||
set(BLENDER_NAMESPACE_GUID "507F933F-5898-404A-9A05-18282FD491A6")
|
||||
|
@@ -323,11 +323,6 @@ if(WITH_OPENVDB)
|
||||
set(OPENVDB_DEFINITIONS)
|
||||
endif()
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
set(NANOVDB ${LIBDIR}/nanovdb)
|
||||
set(NANOVDB_INCLUDE_DIR ${NANOVDB}/include)
|
||||
endif()
|
||||
|
||||
if(WITH_LLVM)
|
||||
set(LLVM_ROOT_DIR ${LIBDIR}/llvm)
|
||||
if(EXISTS "${LLVM_ROOT_DIR}/bin/llvm-config")
|
||||
@@ -422,6 +417,15 @@ if(WITH_TBB)
|
||||
find_package(TBB)
|
||||
endif()
|
||||
|
||||
if(WITH_GMP)
|
||||
find_package(GMP)
|
||||
|
||||
if(NOT GMP_FOUND)
|
||||
set(WITH_GMP OFF)
|
||||
message(STATUS "GMP not found")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_POTRACE)
|
||||
find_package(Potrace)
|
||||
if(NOT POTRACE_FOUND)
|
||||
|
@@ -154,3 +154,11 @@ if(NOT ${CMAKE_GENERATOR} MATCHES "Xcode")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET}")
|
||||
add_definitions("-DMACOSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET}")
|
||||
endif()
|
||||
|
||||
if(${CMAKE_GENERATOR} MATCHES "Xcode")
|
||||
# Generate schemes in Blender.xcodeproj/xcshareddata/xcschemes/ early, at
|
||||
# configuration time, not when Xcode is opened.
|
||||
# This gets rid of "Manage schemes automatically" confirmation dialog that
|
||||
# appears whenever CMake is run.
|
||||
set(CMAKE_XCODE_GENERATE_SCHEME ON)
|
||||
endif()
|
||||
|
@@ -264,7 +264,6 @@ endif()
|
||||
if(WITH_OPENVDB)
|
||||
find_package_wrapper(OpenVDB)
|
||||
find_package_wrapper(Blosc)
|
||||
|
||||
if(NOT OPENVDB_FOUND)
|
||||
set(WITH_OPENVDB OFF)
|
||||
set(WITH_OPENVDB_BLOSC OFF)
|
||||
@@ -275,15 +274,6 @@ if(WITH_OPENVDB)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
find_package_wrapper(NanoVDB)
|
||||
|
||||
if(NOT NANOVDB_FOUND)
|
||||
set(WITH_NANOVDB OFF)
|
||||
message(STATUS "NanoVDB not found, disabling it")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_ALEMBIC)
|
||||
find_package_wrapper(Alembic)
|
||||
|
||||
@@ -444,6 +434,15 @@ if(WITH_TBB)
|
||||
find_package_wrapper(TBB)
|
||||
endif()
|
||||
|
||||
if(WITH_GMP)
|
||||
find_package(GMP)
|
||||
|
||||
if(NOT GMP_FOUND)
|
||||
set(WITH_GMP OFF)
|
||||
message(STATUS "GMP not found")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_XR_OPENXR)
|
||||
find_package(XR_OpenXR_SDK)
|
||||
if(NOT XR_OPENXR_SDK_FOUND)
|
||||
|
@@ -535,11 +535,6 @@ if(WITH_OPENVDB)
|
||||
set(OPENVDB_DEFINITIONS -DNOMINMAX -D_USE_MATH_DEFINES)
|
||||
endif()
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
set(NANOVDB ${LIBDIR}/nanoVDB)
|
||||
set(NANOVDB_INCLUDE_DIR ${NANOVDB}/include)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENIMAGEDENOISE)
|
||||
set(OPENIMAGEDENOISE ${LIBDIR}/OpenImageDenoise)
|
||||
set(OPENIMAGEDENOISE_LIBPATH ${LIBDIR}/OpenImageDenoise/lib)
|
||||
@@ -771,3 +766,4 @@ if(WITH_POTRACE)
|
||||
set(POTRACE_LIBRARIES ${LIBDIR}/potrace/lib/potrace.lib)
|
||||
set(POTRACE_FOUND On)
|
||||
endif()
|
||||
|
||||
|
@@ -25,7 +25,7 @@ __all__ = (
|
||||
|
||||
|
||||
import sys
|
||||
if sys.version_info.major < 3:
|
||||
if not sys.version_info.major < 3:
|
||||
print("\nPython3.x or newer needed, found %s.\nAborting!\n" %
|
||||
sys.version.partition(" ")[0])
|
||||
sys.exit(1)
|
||||
@@ -242,6 +242,5 @@ def main():
|
||||
for s in build_info():
|
||||
print(s)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@@ -12,7 +12,6 @@ from make_utils import call
|
||||
|
||||
# Parse arguments
|
||||
|
||||
|
||||
def parse_arguments():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--ctest-command", default="ctest")
|
||||
@@ -23,7 +22,6 @@ def parse_arguments():
|
||||
parser.add_argument("build_directory")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
args = parse_arguments()
|
||||
git_command = args.git_command
|
||||
svn_command = args.svn_command
|
||||
|
@@ -14,15 +14,12 @@ import sys
|
||||
import make_utils
|
||||
from make_utils import call, check_output
|
||||
|
||||
|
||||
def print_stage(text):
|
||||
print("")
|
||||
print(text)
|
||||
print("")
|
||||
|
||||
# Parse arguments
|
||||
|
||||
|
||||
def parse_arguments():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--no-libraries", action="store_true")
|
||||
@@ -34,13 +31,10 @@ def parse_arguments():
|
||||
parser.add_argument("--use-centos-libraries", action="store_true")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_blender_git_root():
|
||||
return check_output([args.git_command, "rev-parse", "--show-toplevel"])
|
||||
|
||||
# Setup for precompiled libraries and tests from svn.
|
||||
|
||||
|
||||
def svn_update(args, release_version):
|
||||
svn_non_interactive = [args.svn_command, '--non-interactive']
|
||||
|
||||
@@ -92,32 +86,30 @@ def svn_update(args, release_version):
|
||||
print_stage("Updating Precompiled Libraries and Tests")
|
||||
|
||||
if os.path.isdir(lib_dirpath):
|
||||
for dirname in os.listdir(lib_dirpath):
|
||||
dirpath = os.path.join(lib_dirpath, dirname)
|
||||
for dirname in os.listdir(lib_dirpath):
|
||||
dirpath = os.path.join(lib_dirpath, dirname)
|
||||
|
||||
if dirname == ".svn":
|
||||
# Cleanup must be run from svn root directory if it exists.
|
||||
if not make_utils.command_missing(args.svn_command):
|
||||
call(svn_non_interactive + ["cleanup", lib_dirpath])
|
||||
continue
|
||||
if dirname == ".svn":
|
||||
# Cleanup must be run from svn root directory if it exists.
|
||||
if not make_utils.command_missing(args.svn_command):
|
||||
call(svn_non_interactive + ["cleanup", lib_dirpath])
|
||||
continue
|
||||
|
||||
svn_dirpath = os.path.join(dirpath, ".svn")
|
||||
svn_root_dirpath = os.path.join(lib_dirpath, ".svn")
|
||||
svn_dirpath = os.path.join(dirpath, ".svn")
|
||||
svn_root_dirpath = os.path.join(lib_dirpath, ".svn")
|
||||
|
||||
if (
|
||||
os.path.isdir(dirpath) and
|
||||
(os.path.exists(svn_dirpath) or os.path.exists(svn_root_dirpath))
|
||||
):
|
||||
if make_utils.command_missing(args.svn_command):
|
||||
sys.stderr.write("svn not found, can't update libraries\n")
|
||||
sys.exit(1)
|
||||
if os.path.isdir(dirpath) and \
|
||||
(os.path.exists(svn_dirpath) or os.path.exists(svn_root_dirpath)):
|
||||
if make_utils.command_missing(args.svn_command):
|
||||
sys.stderr.write("svn not found, can't update libraries\n")
|
||||
sys.exit(1)
|
||||
|
||||
# Cleanup to continue with interrupted downloads.
|
||||
if os.path.exists(svn_dirpath):
|
||||
call(svn_non_interactive + ["cleanup", dirpath])
|
||||
# Switch to appropriate branch and update.
|
||||
call(svn_non_interactive + ["switch", svn_url + dirname, dirpath], exit_on_error=False)
|
||||
call(svn_non_interactive + ["update", dirpath])
|
||||
# Cleanup to continue with interrupted downloads.
|
||||
if os.path.exists(svn_dirpath):
|
||||
call(svn_non_interactive + ["cleanup", dirpath])
|
||||
# Switch to appropriate branch and update.
|
||||
call(svn_non_interactive + ["switch", svn_url + dirname, dirpath], exit_on_error=False)
|
||||
call(svn_non_interactive + ["update", dirpath])
|
||||
|
||||
# Test if git repo can be updated.
|
||||
def git_update_skip(args, check_remote_exists=True):
|
||||
@@ -129,11 +121,9 @@ def git_update_skip(args, check_remote_exists=True):
|
||||
rebase_merge = check_output([args.git_command, 'rev-parse', '--git-path', 'rebase-merge'], exit_on_error=False)
|
||||
rebase_apply = check_output([args.git_command, 'rev-parse', '--git-path', 'rebase-apply'], exit_on_error=False)
|
||||
merge_head = check_output([args.git_command, 'rev-parse', '--git-path', 'MERGE_HEAD'], exit_on_error=False)
|
||||
if (
|
||||
os.path.exists(rebase_merge) or
|
||||
os.path.exists(rebase_apply) or
|
||||
os.path.exists(merge_head)
|
||||
):
|
||||
if os.path.exists(rebase_merge) or \
|
||||
os.path.exists(rebase_apply) or \
|
||||
os.path.exists(merge_head):
|
||||
return "rebase or merge in progress, complete it first"
|
||||
|
||||
# Abort if uncommitted changes.
|
||||
@@ -143,14 +133,13 @@ def git_update_skip(args, check_remote_exists=True):
|
||||
|
||||
# Test if there is an upstream branch configured
|
||||
if check_remote_exists:
|
||||
branch = check_output([args.git_command, "rev-parse", "--abbrev-ref", "HEAD"])
|
||||
remote = check_output([args.git_command, "config", "branch." + branch + ".remote"], exit_on_error=False)
|
||||
if len(remote) == 0:
|
||||
return "no remote branch to pull from"
|
||||
branch = check_output([args.git_command, "rev-parse", "--abbrev-ref", "HEAD"])
|
||||
remote = check_output([args.git_command, "config", "branch." + branch + ".remote"], exit_on_error=False)
|
||||
if len(remote) == 0:
|
||||
return "no remote branch to pull from"
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
# Update blender repository.
|
||||
def blender_update(args):
|
||||
print_stage("Updating Blender Git Repository")
|
||||
@@ -189,7 +178,7 @@ def submodules_update(args, release_version, branch):
|
||||
os.chdir(submodule_path)
|
||||
msg = git_update_skip(args, check_remote_exists=False)
|
||||
if msg:
|
||||
skip_msg += submodule_path + " skipped: " + msg + "\n"
|
||||
skip_msg += submodule_path + " skipped: " + msg + "\n"
|
||||
else:
|
||||
if make_utils.git_branch(args.git_command) != submodule_branch:
|
||||
call([args.git_command, "fetch", "origin"])
|
||||
|
@@ -7,7 +7,6 @@ import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def call(cmd, exit_on_error=True):
|
||||
print(" ".join(cmd))
|
||||
|
||||
@@ -20,7 +19,6 @@ def call(cmd, exit_on_error=True):
|
||||
sys.exit(retcode)
|
||||
return retcode
|
||||
|
||||
|
||||
def check_output(cmd, exit_on_error=True):
|
||||
# Flush to ensure correct order output on Windows.
|
||||
sys.stdout.flush()
|
||||
@@ -37,7 +35,6 @@ def check_output(cmd, exit_on_error=True):
|
||||
|
||||
return output.strip()
|
||||
|
||||
|
||||
def git_branch(git_command):
|
||||
# Get current branch name.
|
||||
try:
|
||||
@@ -48,7 +45,6 @@ def git_branch(git_command):
|
||||
|
||||
return branch.strip().decode('utf8')
|
||||
|
||||
|
||||
def git_tag(git_command):
|
||||
# Get current tag name.
|
||||
try:
|
||||
@@ -58,18 +54,16 @@ def git_tag(git_command):
|
||||
|
||||
return tag.strip().decode('utf8')
|
||||
|
||||
|
||||
def git_branch_release_version(branch, tag):
|
||||
release_version = re.search("^blender-v(.*)-release$", branch)
|
||||
if release_version:
|
||||
release_version = release_version.group(1)
|
||||
elif tag:
|
||||
release_version = re.search(r"^v([0-9]*\.[0-9]*).*", tag)
|
||||
release_version = re.search("^v([0-9]*\.[0-9]*).*", tag)
|
||||
if release_version:
|
||||
release_version = release_version.group(1)
|
||||
return release_version
|
||||
|
||||
|
||||
def svn_libraries_base_url(release_version):
|
||||
if release_version:
|
||||
svn_branch = "tags/blender-" + release_version + "-release"
|
||||
@@ -77,7 +71,6 @@ def svn_libraries_base_url(release_version):
|
||||
svn_branch = "trunk"
|
||||
return "https://svn.blender.org/svnroot/bf-blender/" + svn_branch + "/lib/"
|
||||
|
||||
|
||||
def command_missing(command):
|
||||
# Support running with Python 2 for macOS
|
||||
if sys.version_info >= (3, 0):
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""
|
||||
Introduction
|
||||
------------
|
||||
Intro
|
||||
-----
|
||||
|
||||
.. warning::
|
||||
|
||||
@@ -9,8 +9,9 @@ Introduction
|
||||
and the :func:`register`/:func:`unregister` functions! The :func:`pgettext` family of functions
|
||||
should only be used in rare, specific cases (like e.g. complex "composited" UI strings...).
|
||||
|
||||
To add translations to your python script, you must define a dictionary formatted like that:
|
||||
``{locale: {msg_key: msg_translation, ...}, ...}`` where:
|
||||
| To add translations to your python script, you must define a dictionary formatted like that:
|
||||
| ``{locale: {msg_key: msg_translation, ...}, ...}``
|
||||
| where:
|
||||
|
||||
- locale is either a lang iso code (e.g. ``fr``), a lang+country code (e.g. ``pt_BR``),
|
||||
a lang+variant code (e.g. ``sr@latin``), or a full code (e.g. ``uz_UZ@cyrilic``).
|
||||
|
@@ -677,7 +677,7 @@ Here are some general hints to avoid running into these problems:
|
||||
Undo/Redo
|
||||
---------
|
||||
|
||||
For safety, you should assume that undo and redo always invalidates all :class:`bpy.types.ID`
|
||||
For safety, you should assume that undo and redo always invalidates all :class:`bpy.types.ID`
|
||||
instances (Object, Scene, Mesh, Light, etc.), as weel obviously as all of their sub-data.
|
||||
|
||||
This example shows how you can tell undo changes the memory locations:
|
||||
@@ -701,7 +701,7 @@ interactively by the user is the only way to make sure that the script doesn't b
|
||||
Modern undo/redo system does not systematically invalidate all pointers anymore.
|
||||
Some data (in fact, most data, in typical cases), which were detected as unchanged for a
|
||||
particular history step, may remain unchanged and hence their pointers may remain valid.
|
||||
|
||||
|
||||
Be aware that if you want to take advantage of this behavior for some reason, there is no
|
||||
guarantee of any kind that it will be safe and consistent. Use it at your own risk.
|
||||
|
||||
|
@@ -41,7 +41,8 @@ Sphinx: HTML generation
|
||||
After you have built doc/python_api/sphinx-in (see above),
|
||||
generate html docs by running:
|
||||
|
||||
sphinx-build doc/python_api/sphinx-in doc/python_api/sphinx-out
|
||||
cd doc/python_api
|
||||
sphinx-build sphinx-in sphinx-out
|
||||
|
||||
|
||||
Sphinx: PDF generation
|
||||
@@ -98,7 +99,6 @@ SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
# See: D6261 for reference.
|
||||
USE_ONLY_BUILTIN_RNA_TYPES = True
|
||||
|
||||
|
||||
def handle_args():
|
||||
'''
|
||||
Parse the args passed to Blender after "--", ignored by Blender
|
||||
@@ -173,7 +173,7 @@ def handle_args():
|
||||
dest="log",
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="Log the output of the API dump and sphinx|latex "
|
||||
help="Log the output of the api dump and sphinx|latex "
|
||||
"warnings and errors (default=False).\n"
|
||||
"If given, save logs in:\n"
|
||||
"* OUTPUT_DIR/.bpy.log\n"
|
||||
@@ -350,9 +350,9 @@ RST_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, "rst"))
|
||||
# stored in ./rst/info_*
|
||||
INFO_DOCS = (
|
||||
("info_quickstart.rst",
|
||||
"Quickstart: New to Blender or scripting and want to get your feet wet?"),
|
||||
"Quickstart: new to Blender or scripting and want to get your feet wet?"),
|
||||
("info_overview.rst",
|
||||
"API Overview: A more complete explanation of Python integration"),
|
||||
"API Overview: a more complete explanation of Python integration"),
|
||||
("info_api_reference.rst",
|
||||
"API Reference Usage: examples of how to use the API reference docs"),
|
||||
("info_best_practice.rst",
|
||||
@@ -360,8 +360,8 @@ INFO_DOCS = (
|
||||
("info_tips_and_tricks.rst",
|
||||
"Tips and Tricks: Hints to help you while writing scripts for Blender"),
|
||||
("info_gotcha.rst",
|
||||
"Gotcha's: Some of the problems you may encounter when writing scripts"),
|
||||
("change_log.rst", "Change Log: List of changes since last Blender release"),
|
||||
"Gotcha's: some of the problems you may encounter when writing scripts"),
|
||||
("change_log.rst", "List of changes since last Blender release"),
|
||||
)
|
||||
|
||||
# only support for properties atm.
|
||||
@@ -1095,7 +1095,7 @@ def pycontext2sphinx(basepath):
|
||||
fw("The context members available depend on the area of Blender which is currently being accessed.\n")
|
||||
fw("\n")
|
||||
fw("Note that all context values are readonly,\n")
|
||||
fw("but may be modified through the data API or by running operators\n\n")
|
||||
fw("but may be modified through the data api or by running operators\n\n")
|
||||
|
||||
def write_contex_cls():
|
||||
|
||||
@@ -1204,7 +1204,7 @@ def pyrna_enum2sphinx(prop, use_empty_descriptions=False):
|
||||
identifier,
|
||||
# Account for multi-line enum descriptions, allowing this to be a block of text.
|
||||
indent(", ".join(escape_rst(val) for val in (name, description) if val) or "Undocumented", " "),
|
||||
)
|
||||
)
|
||||
for identifier, name, description in prop.enum_items
|
||||
])
|
||||
else:
|
||||
@@ -1312,7 +1312,7 @@ def pyrna2sphinx(basepath):
|
||||
|
||||
fw(title_string(title, "="))
|
||||
|
||||
fw(".. module:: %s\n\n" % struct_module_name)
|
||||
fw(".. module:: %s.%s\n\n" % (struct_module_name, struct_id))
|
||||
|
||||
# docs first?, ok
|
||||
write_example_ref("", fw, "%s.%s" % (struct_module_name, struct_id))
|
||||
@@ -1543,7 +1543,7 @@ def pyrna2sphinx(basepath):
|
||||
|
||||
fw(title_string(class_name, "="))
|
||||
|
||||
fw(".. module:: %s\n" % class_module_name)
|
||||
fw(".. module:: %s.%s\n" % (class_module_name, class_name))
|
||||
fw("\n")
|
||||
|
||||
if use_subclasses:
|
||||
@@ -2227,7 +2227,7 @@ def main():
|
||||
shutil.rmtree(REFERENCE_PATH, True)
|
||||
|
||||
# copy SPHINX_OUT to the REFERENCE_PATH
|
||||
ignores = ('.doctrees', '.buildinfo')
|
||||
ignores = ('.doctrees', 'objects.inv', '.buildinfo')
|
||||
shutil.copytree(SPHINX_OUT,
|
||||
REFERENCE_PATH,
|
||||
ignore=shutil.ignore_patterns(*ignores))
|
||||
|
@@ -27,22 +27,6 @@ else
|
||||
fi
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Get the number of cores for threaded build
|
||||
ifndef NPROCS
|
||||
NPROCS:=1
|
||||
ifeq ($(OS), Linux)
|
||||
NPROCS:=$(shell nproc)
|
||||
endif
|
||||
ifeq ($(OS), NetBSD)
|
||||
NPROCS:=$(shell getconf NPROCESSORS_ONLN)
|
||||
endif
|
||||
ifneq (,$(filter $(OS),Darwin FreeBSD))
|
||||
NPROCS:=$(shell sysctl -n hw.ncpu)
|
||||
endif
|
||||
endif
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Blender Version & Info
|
||||
|
||||
@@ -92,7 +76,11 @@ fi
|
||||
# Generate HTML (sphinx)
|
||||
|
||||
if $DO_OUT_HTML ; then
|
||||
sphinx-build -b html -j $(NPROCS) $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
|
||||
# sphinx-build -n -b html $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
|
||||
|
||||
# annoying bug in sphinx makes it very slow unless we do this. should report.
|
||||
cd $SPHINX_WORKDIR
|
||||
sphinx-build -b html sphinx-in sphinx-out
|
||||
|
||||
# XXX, saves space on upload and zip, should move HTML outside
|
||||
# and zip up there, for now this is OK
|
||||
@@ -119,7 +107,8 @@ fi
|
||||
# Generate PDF (sphinx/laytex)
|
||||
|
||||
if $DO_OUT_PDF ; then
|
||||
sphinx-build -n -b latex -j $(NPROCS) $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
|
||||
cd $SPHINX_WORKDIR
|
||||
sphinx-build -n -b latex $SPHINX_WORKDIR/sphinx-in $SPHINX_WORKDIR/sphinx-out
|
||||
make -C $SPHINX_WORKDIR/sphinx-out
|
||||
mv $SPHINX_WORKDIR/sphinx-out/contents.pdf \
|
||||
$SPHINX_WORKDIR/sphinx-out/blender_python_reference_$BLENDER_VERSION.pdf
|
||||
|
39
extern/mantaflow/helper/util/randomstream.h
vendored
39
extern/mantaflow/helper/util/randomstream.h
vendored
@@ -377,6 +377,19 @@ class RandomStream {
|
||||
}
|
||||
|
||||
/*! get a random number from the stream */
|
||||
inline double getDouble(void)
|
||||
{
|
||||
return mtr.rand();
|
||||
};
|
||||
inline float getFloat(void)
|
||||
{
|
||||
return (float)mtr.rand();
|
||||
};
|
||||
|
||||
inline float getFloat(float min, float max)
|
||||
{
|
||||
return mtr.rand(max - min) + min;
|
||||
};
|
||||
inline float getRandNorm(float mean, float var)
|
||||
{
|
||||
return mtr.randNorm(mean, var);
|
||||
@@ -387,20 +400,12 @@ class RandomStream {
|
||||
{
|
||||
return getFloat();
|
||||
}
|
||||
inline Real getReal(float min, float max)
|
||||
{
|
||||
return getFloat(min, max);
|
||||
}
|
||||
|
||||
#else
|
||||
inline Real getReal()
|
||||
{
|
||||
return getDouble();
|
||||
}
|
||||
inline Real getReal(double min, double max)
|
||||
{
|
||||
return getDouble(min, max);
|
||||
}
|
||||
#endif
|
||||
|
||||
inline Vec3 getVec3()
|
||||
@@ -417,24 +422,6 @@ class RandomStream {
|
||||
|
||||
private:
|
||||
MTRand mtr;
|
||||
|
||||
inline double getDouble(void)
|
||||
{
|
||||
return mtr.rand();
|
||||
};
|
||||
inline float getFloat(void)
|
||||
{
|
||||
return (float)mtr.rand();
|
||||
};
|
||||
|
||||
inline double getDouble(double min, double max)
|
||||
{
|
||||
return mtr.rand(max - min) + min;
|
||||
};
|
||||
inline float getFloat(float min, float max)
|
||||
{
|
||||
return (float)(mtr.rand(max - min) + min);
|
||||
};
|
||||
};
|
||||
|
||||
} // namespace Manta
|
||||
|
@@ -405,7 +405,6 @@ int writeObjectsVDB(const string &filename,
|
||||
vdb_flags = openvdb::io::COMPRESS_NONE;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
case COMPRESSION_ZIP: {
|
||||
vdb_flags |= openvdb::io::COMPRESS_ZIP;
|
||||
break;
|
||||
@@ -446,7 +445,6 @@ int readObjectsVDB(const string &filename, std::vector<PbClass *> *objects, floa
|
||||
(void)metadata; // Unused for now
|
||||
}
|
||||
catch (const openvdb::IoError &e) {
|
||||
(void)e; // Unused for now
|
||||
debMsg("readObjectsVDB: Could not open vdb file " << filename, 1);
|
||||
file.close();
|
||||
return 0;
|
||||
|
2
extern/mantaflow/preprocessed/gitinfo.h
vendored
2
extern/mantaflow/preprocessed/gitinfo.h
vendored
@@ -1,3 +1,3 @@
|
||||
|
||||
|
||||
#define MANTA_GIT_VERSION "commit b8e557707805720ff00a8eb946db2ee5b9361b5a"
|
||||
#define MANTA_GIT_VERSION "commit e2f6e59e3679f88e5100ae2145410cca4971b9df"
|
||||
|
2
extern/mantaflow/preprocessed/grid.h
vendored
2
extern/mantaflow/preprocessed/grid.h
vendored
@@ -355,6 +355,7 @@ class GridBase : public PbClass {
|
||||
return isInBounds(Vec3i(i, j, k), bnd);
|
||||
}
|
||||
|
||||
#ifdef BLENDER
|
||||
//! expose name field to Python for Blender
|
||||
void setName(const std::string &name)
|
||||
{
|
||||
@@ -385,6 +386,7 @@ class GridBase : public PbClass {
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
protected:
|
||||
GridType mType;
|
||||
Vec3i mSize;
|
||||
|
199
extern/mantaflow/preprocessed/levelset.cpp
vendored
199
extern/mantaflow/preprocessed/levelset.cpp
vendored
@@ -639,207 +639,8 @@ void LevelsetGrid::initFromFlags(const FlagGrid &flags, bool ignoreWalls)
|
||||
}
|
||||
}
|
||||
|
||||
/* Helper variables that are used in flood-fill functions. */
|
||||
static const int ID_UNKNOWN = 0;
|
||||
static const int ID_VISITED = 1;
|
||||
|
||||
/* Fills all cells in the target grid that have not been marked during a flood-fill. */
|
||||
|
||||
struct KnFillApply : public KernelBase {
|
||||
KnFillApply(Grid<Real> &target,
|
||||
Grid<int> &visited,
|
||||
const Real value,
|
||||
const int boundaryWidth,
|
||||
const bool outside)
|
||||
: KernelBase(&target, boundaryWidth),
|
||||
target(target),
|
||||
visited(visited),
|
||||
value(value),
|
||||
boundaryWidth(boundaryWidth),
|
||||
outside(outside)
|
||||
{
|
||||
runMessage();
|
||||
run();
|
||||
}
|
||||
inline void op(int i,
|
||||
int j,
|
||||
int k,
|
||||
Grid<Real> &target,
|
||||
Grid<int> &visited,
|
||||
const Real value,
|
||||
const int boundaryWidth,
|
||||
const bool outside) const
|
||||
{
|
||||
|
||||
if (visited(i, j, k) == ID_VISITED)
|
||||
return;
|
||||
if (outside && target(i, j, k) < 0)
|
||||
return;
|
||||
if (!outside && target(i, j, k) >= 0)
|
||||
return;
|
||||
|
||||
/* Actual flood-fill override. */
|
||||
target(i, j, k) = value;
|
||||
}
|
||||
inline Grid<Real> &getArg0()
|
||||
{
|
||||
return target;
|
||||
}
|
||||
typedef Grid<Real> type0;
|
||||
inline Grid<int> &getArg1()
|
||||
{
|
||||
return visited;
|
||||
}
|
||||
typedef Grid<int> type1;
|
||||
inline const Real &getArg2()
|
||||
{
|
||||
return value;
|
||||
}
|
||||
typedef Real type2;
|
||||
inline const int &getArg3()
|
||||
{
|
||||
return boundaryWidth;
|
||||
}
|
||||
typedef int type3;
|
||||
inline const bool &getArg4()
|
||||
{
|
||||
return outside;
|
||||
}
|
||||
typedef bool type4;
|
||||
void runMessage()
|
||||
{
|
||||
debMsg("Executing kernel KnFillApply ", 3);
|
||||
debMsg("Kernel range"
|
||||
<< " x " << maxX << " y " << maxY << " z " << minZ << " - " << maxZ << " ",
|
||||
4);
|
||||
};
|
||||
void operator()(const tbb::blocked_range<IndexInt> &__r) const
|
||||
{
|
||||
const int _maxX = maxX;
|
||||
const int _maxY = maxY;
|
||||
if (maxZ > 1) {
|
||||
for (int k = __r.begin(); k != (int)__r.end(); k++)
|
||||
for (int j = boundaryWidth; j < _maxY; j++)
|
||||
for (int i = boundaryWidth; i < _maxX; i++)
|
||||
op(i, j, k, target, visited, value, boundaryWidth, outside);
|
||||
}
|
||||
else {
|
||||
const int k = 0;
|
||||
for (int j = __r.begin(); j != (int)__r.end(); j++)
|
||||
for (int i = boundaryWidth; i < _maxX; i++)
|
||||
op(i, j, k, target, visited, value, boundaryWidth, outside);
|
||||
}
|
||||
}
|
||||
void run()
|
||||
{
|
||||
if (maxZ > 1)
|
||||
tbb::parallel_for(tbb::blocked_range<IndexInt>(minZ, maxZ), *this);
|
||||
else
|
||||
tbb::parallel_for(tbb::blocked_range<IndexInt>(boundaryWidth, maxY), *this);
|
||||
}
|
||||
Grid<Real> ⌖
|
||||
Grid<int> &visited;
|
||||
const Real value;
|
||||
const int boundaryWidth;
|
||||
const bool outside;
|
||||
};
|
||||
|
||||
/* Basic flood fill implementation used to fill inside / outside areas of levelset.
|
||||
* Calling this function will ensure that there are no fluid cells inside obstacles.
|
||||
* I.e. starting from walls, cells will be tagged in flood-fill fashion, stopping at 0 borders.
|
||||
* All remaining cells will be filled with the fill value. Outside mode inverts search behavior. */
|
||||
void LevelsetGrid::floodFill(const Real value, const bool outside, const int boundaryWidth)
|
||||
{
|
||||
|
||||
/* Sanity check: Filling mode and filling value need to "match". */
|
||||
if (outside) {
|
||||
assertMsg(value < 0, "Cannot fill outside with (positive) value " << value);
|
||||
}
|
||||
else {
|
||||
assertMsg(value >= 0, "Cannot fill inside with (negative) value " << value);
|
||||
}
|
||||
|
||||
Grid<Real> levelsetCopy(this->getParent());
|
||||
Grid<int> visited(this->getParent());
|
||||
std::stack<Vec3i> todoPos;
|
||||
|
||||
const int maxNeighbors = this->is3D() ? 6 : 4;
|
||||
const Vec3i maxSize(this->getSize() - 1);
|
||||
|
||||
Vec3i bnd(2 * boundaryWidth);
|
||||
if (!this->is3D())
|
||||
bnd.z = 0;
|
||||
const int cellCntNoBnd = (this->getSizeX() - bnd.x) * (this->getSizeY() - bnd.y) *
|
||||
(this->getSizeZ() - bnd.z);
|
||||
|
||||
/* Initialize temporary helper grids. */
|
||||
levelsetCopy.copyFrom(*this);
|
||||
visited.setConst(ID_UNKNOWN);
|
||||
|
||||
FOR_IJK_BND(visited, boundaryWidth)
|
||||
{
|
||||
|
||||
/* Skip inside / outside cells depending on search mode. */
|
||||
if (outside && levelsetCopy(i, j, k) < 0)
|
||||
continue;
|
||||
if (!outside && levelsetCopy(i, j, k) >= 0)
|
||||
continue;
|
||||
/* Skip cell if it already has been visited. */
|
||||
if (visited(i, j, k) == ID_VISITED)
|
||||
continue;
|
||||
|
||||
Vec3i c(i, j, k);
|
||||
|
||||
bool isWallCell = (c.x - boundaryWidth == 0 || c.x == maxSize.x - boundaryWidth);
|
||||
isWallCell |= (c.y - boundaryWidth == 0 || c.y == maxSize.y - boundaryWidth);
|
||||
if (this->is3D())
|
||||
isWallCell |= (c.z - boundaryWidth == 0 || c.z == maxSize.z - boundaryWidth);
|
||||
|
||||
/* Only start searching from borders. */
|
||||
if (!isWallCell)
|
||||
continue;
|
||||
|
||||
/* Start flood-fill loop by initializing todo stack with current cell. */
|
||||
todoPos.push(c);
|
||||
visited(c) = ID_VISITED;
|
||||
|
||||
while (!todoPos.empty()) {
|
||||
c = todoPos.top();
|
||||
todoPos.pop();
|
||||
|
||||
/* Add all neighbor cells to search stack. */
|
||||
for (int nb = 0; nb < maxNeighbors; nb++) {
|
||||
const Vec3i neigh(c + neighbors[nb]);
|
||||
|
||||
if (!visited.isInBounds(neigh, boundaryWidth))
|
||||
continue;
|
||||
/* Skip inside / outside area depening on what we search for. */
|
||||
if (outside && levelsetCopy(neigh) < 0)
|
||||
continue;
|
||||
if (!outside && levelsetCopy(neigh) >= 0)
|
||||
continue;
|
||||
/* Skip neighbor if it already has been visited. */
|
||||
if (visited(neigh) == ID_VISITED)
|
||||
continue;
|
||||
|
||||
assertMsg(visited(neigh) == ID_UNKNOWN,
|
||||
"Cell must be of type 'unknown' at this point in the loop");
|
||||
todoPos.push(neigh);
|
||||
visited(neigh) = ID_VISITED;
|
||||
}
|
||||
assertMsg(todoPos.size() <= cellCntNoBnd,
|
||||
"Flood-fill todo stack cannot be greater than domain cell count - "
|
||||
<< todoPos.size() << " vs " << cellCntNoBnd);
|
||||
}
|
||||
}
|
||||
KnFillApply(*this, visited, value, boundaryWidth, outside);
|
||||
}
|
||||
|
||||
/* Deprecated: Use floodFill() function instead. */
|
||||
void LevelsetGrid::fillHoles(int maxDepth, int boundaryWidth)
|
||||
{
|
||||
debMsg("Deprecated - do not use fillHoles() ... use floodFill() instead", 1);
|
||||
|
||||
Real curVal, i1, i2, j1, j2, k1, k2;
|
||||
Vec3i c, cTmp;
|
||||
std::stack<Vec3i> undoPos;
|
||||
|
29
extern/mantaflow/preprocessed/levelset.h
vendored
29
extern/mantaflow/preprocessed/levelset.h
vendored
@@ -234,35 +234,6 @@ class LevelsetGrid : public Grid<Real> {
|
||||
}
|
||||
}
|
||||
|
||||
//! flood-fill the levelset to ensure that closed obstacles are filled inside
|
||||
void floodFill(const Real value = -0.5, const bool outside = true, const int boundaryWidth = 1);
|
||||
static PyObject *_W_7(PyObject *_self, PyObject *_linargs, PyObject *_kwds)
|
||||
{
|
||||
try {
|
||||
PbArgs _args(_linargs, _kwds);
|
||||
LevelsetGrid *pbo = dynamic_cast<LevelsetGrid *>(Pb::objFromPy(_self));
|
||||
bool noTiming = _args.getOpt<bool>("notiming", -1, 0);
|
||||
pbPreparePlugin(pbo->getParent(), "LevelsetGrid::floodFill", !noTiming);
|
||||
PyObject *_retval = 0;
|
||||
{
|
||||
ArgLocker _lock;
|
||||
const Real value = _args.getOpt<Real>("value", 0, -0.5, &_lock);
|
||||
const bool outside = _args.getOpt<bool>("outside", 1, true, &_lock);
|
||||
const int boundaryWidth = _args.getOpt<int>("boundaryWidth", 2, 1, &_lock);
|
||||
pbo->_args.copy(_args);
|
||||
_retval = getPyNone();
|
||||
pbo->floodFill(value, outside, boundaryWidth);
|
||||
pbo->_args.check();
|
||||
}
|
||||
pbFinalizePlugin(pbo->getParent(), "LevelsetGrid::floodFill", !noTiming);
|
||||
return _retval;
|
||||
}
|
||||
catch (std::exception &e) {
|
||||
pbSetError("LevelsetGrid::floodFill", e.what());
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
static Real invalidTimeValue();
|
||||
public:
|
||||
PbArgs _args;
|
||||
|
@@ -15,7 +15,6 @@ static const Pb::Register _R_15("LevelsetGrid", "join", LevelsetGrid::_W_3);
|
||||
static const Pb::Register _R_16("LevelsetGrid", "subtract", LevelsetGrid::_W_4);
|
||||
static const Pb::Register _R_17("LevelsetGrid", "initFromFlags", LevelsetGrid::_W_5);
|
||||
static const Pb::Register _R_18("LevelsetGrid", "fillHoles", LevelsetGrid::_W_6);
|
||||
static const Pb::Register _R_19("LevelsetGrid", "floodFill", LevelsetGrid::_W_7);
|
||||
#endif
|
||||
extern "C" {
|
||||
void PbRegister_file_11()
|
||||
@@ -28,7 +27,6 @@ void PbRegister_file_11()
|
||||
KEEP_UNUSED(_R_16);
|
||||
KEEP_UNUSED(_R_17);
|
||||
KEEP_UNUSED(_R_18);
|
||||
KEEP_UNUSED(_R_19);
|
||||
}
|
||||
}
|
||||
} // namespace Manta
|
@@ -695,12 +695,7 @@ struct KnApplyEmission : public KernelBase {
|
||||
// (important for emit from particles)
|
||||
bool isInflow = (type & FlagGrid::TypeInflow && flags.isInflow(i, j, k));
|
||||
bool isOutflow = (type & FlagGrid::TypeOutflow && flags.isOutflow(i, j, k));
|
||||
|
||||
if (type && !isInflow)
|
||||
return;
|
||||
if (type && isOutflow)
|
||||
return;
|
||||
if (emissionTexture && !(*emissionTexture)(i, j, k))
|
||||
if ((type && !isInflow && !isOutflow) && (emissionTexture && !(*emissionTexture)(i, j, k)))
|
||||
return;
|
||||
|
||||
if (isAbsolute)
|
||||
|
@@ -270,14 +270,6 @@ if(WITH_CYCLES_EMBREE)
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
add_definitions(-DWITH_NANOVDB)
|
||||
include_directories(
|
||||
SYSTEM
|
||||
${NANOVDB_INCLUDE_DIR}
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENSUBDIV)
|
||||
add_definitions(-DWITH_OPENSUBDIV)
|
||||
include_directories(
|
||||
|
@@ -70,11 +70,6 @@ def _configure_argument_parser():
|
||||
parser.add_argument("--cycles-print-stats",
|
||||
help="Print rendering statistics to stderr",
|
||||
action='store_true')
|
||||
parser.add_argument("--cycles-device",
|
||||
help="Set the device to use for Cycles, overriding user preferences and the scene setting."
|
||||
"Valid options are 'CPU', 'CUDA', 'OPTIX' or 'OPENCL'."
|
||||
"Additionally, you can append '+CPU' to any GPU type for hybrid rendering.",
|
||||
default=None)
|
||||
return parser
|
||||
|
||||
|
||||
@@ -107,10 +102,6 @@ def _parse_command_line():
|
||||
import _cycles
|
||||
_cycles.enable_print_stats()
|
||||
|
||||
if args.cycles_device:
|
||||
import _cycles
|
||||
_cycles.set_device_override(args.cycles_device)
|
||||
|
||||
|
||||
def init():
|
||||
import bpy
|
||||
@@ -159,7 +150,8 @@ def create(engine, data, region=None, v3d=None, rv3d=None, preview_osl=False):
|
||||
screen = screen or rv3d.id_data.as_pointer()
|
||||
rv3d = rv3d.as_pointer()
|
||||
|
||||
engine.session = _cycles.create(engine.as_pointer(), prefs, data, screen, region, v3d, rv3d, preview_osl)
|
||||
engine.session = _cycles.create(
|
||||
engine.as_pointer(), prefs, data, screen, region, v3d, rv3d, preview_osl)
|
||||
|
||||
|
||||
def free(engine):
|
||||
@@ -232,7 +224,6 @@ def system_info():
|
||||
import _cycles
|
||||
return _cycles.system_info()
|
||||
|
||||
|
||||
def list_render_passes(scene, srl):
|
||||
# Builtin Blender passes.
|
||||
yield ("Combined", "RGBA", 'COLOR')
|
||||
@@ -307,7 +298,6 @@ def list_render_passes(scene, srl):
|
||||
else:
|
||||
yield (aov.name, "RGBA", 'COLOR')
|
||||
|
||||
|
||||
def register_passes(engine, scene, view_layer):
|
||||
# Detect duplicate render pass names, first one wins.
|
||||
listed = set()
|
||||
@@ -316,7 +306,6 @@ def register_passes(engine, scene, view_layer):
|
||||
engine.register_pass(scene, view_layer, name, len(channelids), channelids, channeltype)
|
||||
listed.add(name)
|
||||
|
||||
|
||||
def detect_conflicting_passes(scene, view_layer):
|
||||
# Detect conflicting render pass names for UI.
|
||||
counter = {}
|
||||
|
@@ -46,8 +46,8 @@ class CYCLES_OT_use_shading_nodes(Operator):
|
||||
|
||||
class CYCLES_OT_add_aov(bpy.types.Operator):
|
||||
"""Add an AOV pass"""
|
||||
bl_idname = "cycles.add_aov"
|
||||
bl_label = "Add AOV"
|
||||
bl_idname="cycles.add_aov"
|
||||
bl_label="Add AOV"
|
||||
|
||||
def execute(self, context):
|
||||
view_layer = context.view_layer
|
||||
@@ -61,8 +61,8 @@ class CYCLES_OT_add_aov(bpy.types.Operator):
|
||||
|
||||
class CYCLES_OT_remove_aov(bpy.types.Operator):
|
||||
"""Remove an AOV pass"""
|
||||
bl_idname = "cycles.remove_aov"
|
||||
bl_label = "Remove AOV"
|
||||
bl_idname="cycles.remove_aov"
|
||||
bl_label="Remove AOV"
|
||||
|
||||
def execute(self, context):
|
||||
view_layer = context.view_layer
|
||||
@@ -203,7 +203,6 @@ classes = (
|
||||
CYCLES_OT_merge_images
|
||||
)
|
||||
|
||||
|
||||
def register():
|
||||
from bpy.utils import register_class
|
||||
for cls in classes:
|
||||
|
@@ -143,7 +143,7 @@ enum_texture_limit = (
|
||||
('8192', "8192", "Limit texture size to 8192 pixels", 7),
|
||||
)
|
||||
|
||||
enum_view3d_shading_render_pass = (
|
||||
enum_view3d_shading_render_pass= (
|
||||
('', "General", ""),
|
||||
|
||||
('COMBINED', "Combined", "Show the Combined Render pass", 1),
|
||||
@@ -188,13 +188,11 @@ def enum_openimagedenoise_denoiser(self, context):
|
||||
return [('OPENIMAGEDENOISE', "OpenImageDenoise", "Use Intel OpenImageDenoise AI denoiser running on the CPU", 4)]
|
||||
return []
|
||||
|
||||
|
||||
def enum_optix_denoiser(self, context):
|
||||
if not context or bool(context.preferences.addons[__package__].preferences.get_devices_for_type('OPTIX')):
|
||||
return [('OPTIX', "OptiX", "Use the OptiX AI denoiser with GPU acceleration, only available on NVIDIA GPUs", 2)]
|
||||
return []
|
||||
|
||||
|
||||
def enum_preview_denoiser(self, context):
|
||||
optix_items = enum_optix_denoiser(self, context)
|
||||
oidn_items = enum_openimagedenoise_denoiser(self, context)
|
||||
@@ -208,14 +206,12 @@ def enum_preview_denoiser(self, context):
|
||||
items += oidn_items
|
||||
return items
|
||||
|
||||
|
||||
def enum_denoiser(self, context):
|
||||
items = [('NLM', "NLM", "Cycles native non-local means denoiser, running on any compute device", 1)]
|
||||
items += enum_optix_denoiser(self, context)
|
||||
items += enum_openimagedenoise_denoiser(self, context)
|
||||
return items
|
||||
|
||||
|
||||
enum_denoising_input_passes = (
|
||||
('RGB', "Color", "Use only color as input", 1),
|
||||
('RGB_ALBEDO', "Color + Albedo", "Use color and albedo data as input", 2),
|
||||
@@ -417,18 +413,18 @@ class CyclesRenderSettings(bpy.types.PropertyGroup):
|
||||
)
|
||||
|
||||
min_light_bounces: IntProperty(
|
||||
name="Min Light Bounces",
|
||||
description="Minimum number of light bounces. Setting this higher reduces noise in the first bounces, "
|
||||
"but can also be less efficient for more complex geometry like hair and volumes",
|
||||
min=0, max=1024,
|
||||
default=0,
|
||||
name="Min Light Bounces",
|
||||
description="Minimum number of light bounces. Setting this higher reduces noise in the first bounces, "
|
||||
"but can also be less efficient for more complex geometry like hair and volumes",
|
||||
min=0, max=1024,
|
||||
default=0,
|
||||
)
|
||||
min_transparent_bounces: IntProperty(
|
||||
name="Min Transparent Bounces",
|
||||
description="Minimum number of transparent bounces. Setting this higher reduces noise in the first bounces, "
|
||||
"but can also be less efficient for more complex geometry like hair and volumes",
|
||||
min=0, max=1024,
|
||||
default=0,
|
||||
name="Min Transparent Bounces",
|
||||
description="Minimum number of transparent bounces. Setting this higher reduces noise in the first bounces, "
|
||||
"but can also be less efficient for more complex geometry like hair and volumes",
|
||||
min=0, max=1024,
|
||||
default=0,
|
||||
)
|
||||
|
||||
caustics_reflective: BoolProperty(
|
||||
@@ -1329,7 +1325,6 @@ class CyclesAOVPass(bpy.types.PropertyGroup):
|
||||
default=""
|
||||
)
|
||||
|
||||
|
||||
class CyclesRenderLayerSettings(bpy.types.PropertyGroup):
|
||||
|
||||
pass_debug_bvh_traversed_nodes: BoolProperty(
|
||||
@@ -1473,31 +1468,31 @@ class CyclesRenderLayerSettings(bpy.types.PropertyGroup):
|
||||
description="Render cryptomatte object pass, for isolating objects in compositing",
|
||||
default=False,
|
||||
update=update_render_passes,
|
||||
)
|
||||
)
|
||||
use_pass_crypto_material: BoolProperty(
|
||||
name="Cryptomatte Material",
|
||||
description="Render cryptomatte material pass, for isolating materials in compositing",
|
||||
default=False,
|
||||
update=update_render_passes,
|
||||
)
|
||||
)
|
||||
use_pass_crypto_asset: BoolProperty(
|
||||
name="Cryptomatte Asset",
|
||||
description="Render cryptomatte asset pass, for isolating groups of objects with the same parent",
|
||||
default=False,
|
||||
update=update_render_passes,
|
||||
)
|
||||
)
|
||||
pass_crypto_depth: IntProperty(
|
||||
name="Cryptomatte Levels",
|
||||
description="Sets how many unique objects can be distinguished per pixel",
|
||||
default=6, min=2, max=16, step=2,
|
||||
update=update_render_passes,
|
||||
)
|
||||
)
|
||||
pass_crypto_accurate: BoolProperty(
|
||||
name="Cryptomatte Accurate",
|
||||
description="Generate a more accurate Cryptomatte pass. CPU only, may render slower and use more memory",
|
||||
default=True,
|
||||
update=update_render_passes,
|
||||
)
|
||||
)
|
||||
|
||||
aovs: CollectionProperty(
|
||||
type=CyclesAOVPass,
|
||||
@@ -1649,6 +1644,7 @@ class CyclesPreferences(bpy.types.AddonPreferences):
|
||||
col.label(text="OptiX support is experimental", icon='INFO')
|
||||
col.label(text="Not all Cycles features are supported yet", icon='BLANK1')
|
||||
|
||||
|
||||
def draw_impl(self, layout, context):
|
||||
row = layout.row()
|
||||
row.prop(self, "compute_device_type", expand=True)
|
||||
|
@@ -711,9 +711,9 @@ class CYCLES_RENDER_PT_performance_acceleration_structure(CyclesButtonsPanel, Pa
|
||||
if use_cpu(context):
|
||||
use_embree = _cycles.with_embree
|
||||
if not use_embree:
|
||||
sub = col.column(align=True)
|
||||
sub.label(text="Cycles built without Embree support")
|
||||
sub.label(text="CPU raytracing performance will be poor")
|
||||
sub = col.column(align=True)
|
||||
sub.label(text="Cycles built without Embree support")
|
||||
sub.label(text="CPU raytracing performance will be poor")
|
||||
|
||||
col.prop(cscene, "debug_use_spatial_splits")
|
||||
sub = col.column()
|
||||
@@ -843,6 +843,8 @@ class CYCLES_RENDER_PT_passes_data(CyclesButtonsPanel, Panel):
|
||||
col.prop(cycles_view_layer, "pass_debug_render_time", text="Render Time")
|
||||
col.prop(cycles_view_layer, "pass_debug_sample_count", text="Sample Count")
|
||||
|
||||
|
||||
|
||||
layout.prop(view_layer, "pass_alpha_threshold")
|
||||
|
||||
|
||||
@@ -957,15 +959,7 @@ class CYCLES_RENDER_PT_passes_aov(CyclesButtonsPanel, Panel):
|
||||
|
||||
row = layout.row()
|
||||
col = row.column()
|
||||
col.template_list(
|
||||
"CYCLES_RENDER_UL_aov",
|
||||
"aovs",
|
||||
cycles_view_layer,
|
||||
"aovs",
|
||||
cycles_view_layer,
|
||||
"active_aov",
|
||||
rows=2,
|
||||
)
|
||||
col.template_list("CYCLES_RENDER_UL_aov", "aovs", cycles_view_layer, "aovs", cycles_view_layer, "active_aov", rows=2)
|
||||
|
||||
col = row.column()
|
||||
sub = col.column(align=True)
|
||||
@@ -973,9 +967,9 @@ class CYCLES_RENDER_PT_passes_aov(CyclesButtonsPanel, Panel):
|
||||
sub.operator("cycles.remove_aov", icon='REMOVE', text="")
|
||||
|
||||
if cycles_view_layer.active_aov < len(cycles_view_layer.aovs):
|
||||
active_aov = cycles_view_layer.aovs[cycles_view_layer.active_aov]
|
||||
if active_aov.conflict:
|
||||
layout.label(text=active_aov.conflict, icon='ERROR')
|
||||
active_aov = cycles_view_layer.aovs[cycles_view_layer.active_aov]
|
||||
if active_aov.conflict:
|
||||
layout.label(text=active_aov.conflict, icon='ERROR')
|
||||
|
||||
|
||||
class CYCLES_RENDER_PT_denoising(CyclesButtonsPanel, Panel):
|
||||
@@ -1228,7 +1222,7 @@ class CYCLES_OBJECT_PT_motion_blur(CyclesButtonsPanel, Panel):
|
||||
|
||||
def has_geometry_visibility(ob):
|
||||
return ob and ((ob.type in {'MESH', 'CURVE', 'SURFACE', 'FONT', 'META', 'LIGHT'}) or
|
||||
(ob.instance_type == 'COLLECTION' and ob.instance_collection))
|
||||
(ob.instance_type == 'COLLECTION' and ob.instance_collection))
|
||||
|
||||
|
||||
class CYCLES_OBJECT_PT_shading(CyclesButtonsPanel, Panel):
|
||||
@@ -1238,7 +1232,7 @@ class CYCLES_OBJECT_PT_shading(CyclesButtonsPanel, Panel):
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return CyclesButtonsPanel.poll(context) and (context.object)
|
||||
return CyclesButtonsPanel.poll(context) and (context.object)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@@ -1261,7 +1255,7 @@ class CYCLES_OBJECT_PT_visibility(CyclesButtonsPanel, Panel):
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return CyclesButtonsPanel.poll(context) and (context.object)
|
||||
return CyclesButtonsPanel.poll(context) and (context.object)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@@ -1871,7 +1865,6 @@ class CYCLES_RENDER_PT_bake_influence(CyclesButtonsPanel, Panel):
|
||||
bl_context = "render"
|
||||
bl_parent_id = "CYCLES_RENDER_PT_bake"
|
||||
COMPAT_ENGINES = {'CYCLES'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
scene = context.scene
|
||||
@@ -2089,7 +2082,6 @@ class CYCLES_RENDER_PT_simplify_viewport(CyclesButtonsPanel, Panel):
|
||||
col.prop(rd, "simplify_child_particles", text="Child Particles")
|
||||
col.prop(cscene, "texture_limit", text="Texture Limit")
|
||||
col.prop(cscene, "ao_bounces", text="AO Bounces")
|
||||
col.prop(rd, "simplify_volumes", text="Volume Resolution")
|
||||
|
||||
|
||||
class CYCLES_RENDER_PT_simplify_render(CyclesButtonsPanel, Panel):
|
||||
@@ -2157,10 +2149,8 @@ class CYCLES_VIEW3D_PT_shading_render_pass(Panel):
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return (
|
||||
context.engine in cls.COMPAT_ENGINES and
|
||||
context.space_data.shading.type == 'RENDERED'
|
||||
)
|
||||
return (context.engine in cls.COMPAT_ENGINES
|
||||
and context.space_data.shading.type == 'RENDERED')
|
||||
|
||||
def draw(self, context):
|
||||
shading = context.space_data.shading
|
||||
@@ -2178,10 +2168,8 @@ class CYCLES_VIEW3D_PT_shading_lighting(Panel):
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return (
|
||||
context.engine in cls.COMPAT_ENGINES and
|
||||
context.space_data.shading.type == 'RENDERED'
|
||||
)
|
||||
return (context.engine in cls.COMPAT_ENGINES
|
||||
and context.space_data.shading.type == 'RENDERED')
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@@ -2210,14 +2198,12 @@ class CYCLES_VIEW3D_PT_shading_lighting(Panel):
|
||||
col.prop(shading, "studiolight_intensity")
|
||||
col.prop(shading, "studiolight_background_alpha")
|
||||
|
||||
|
||||
class CYCLES_VIEW3D_PT_simplify_greasepencil(CyclesButtonsPanel, Panel, GreasePencilSimplifyPanel):
|
||||
bl_label = "Grease Pencil"
|
||||
bl_parent_id = "CYCLES_RENDER_PT_simplify"
|
||||
COMPAT_ENGINES = {'CYCLES'}
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
|
||||
def draw_device(self, context):
|
||||
scene = context.scene
|
||||
layout = self.layout
|
||||
|
@@ -137,11 +137,9 @@ def do_versions(self):
|
||||
# Caustics Reflective/Refractive separation in 272
|
||||
if version <= (2, 72, 0):
|
||||
cscene = scene.cycles
|
||||
if (
|
||||
cscene.get("no_caustics", False) and
|
||||
not cscene.is_property_set("caustics_reflective") and
|
||||
not cscene.is_property_set("caustics_refractive")
|
||||
):
|
||||
if (cscene.get("no_caustics", False) and
|
||||
not cscene.is_property_set("caustics_reflective") and
|
||||
not cscene.is_property_set("caustics_refractive")):
|
||||
cscene.caustics_reflective = False
|
||||
cscene.caustics_refractive = False
|
||||
|
||||
|
@@ -15,7 +15,6 @@
|
||||
*/
|
||||
|
||||
#include "blender/blender_device.h"
|
||||
#include "blender/blender_session.h"
|
||||
#include "blender/blender_util.h"
|
||||
|
||||
#include "util/util_foreach.h"
|
||||
@@ -43,18 +42,6 @@ int blender_device_threads(BL::Scene &b_scene)
|
||||
|
||||
DeviceInfo blender_device_info(BL::Preferences &b_preferences, BL::Scene &b_scene, bool background)
|
||||
{
|
||||
if (BlenderSession::device_override != DEVICE_MASK_ALL) {
|
||||
vector<DeviceInfo> devices = Device::available_devices(BlenderSession::device_override);
|
||||
|
||||
if (devices.empty()) {
|
||||
printf("Found no Cycles device of the specified type, falling back to CPU...\n");
|
||||
return Device::available_devices(DEVICE_MASK_CPU).front();
|
||||
}
|
||||
|
||||
int threads = blender_device_threads(b_scene);
|
||||
return Device::get_multi_device(devices, threads, background);
|
||||
}
|
||||
|
||||
PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
|
||||
|
||||
/* Default to CPU device. */
|
||||
|
@@ -968,44 +968,6 @@ static PyObject *get_device_types_func(PyObject * /*self*/, PyObject * /*args*/)
|
||||
return list;
|
||||
}
|
||||
|
||||
static PyObject *set_device_override_func(PyObject * /*self*/, PyObject *arg)
|
||||
{
|
||||
PyObject *override_string = PyObject_Str(arg);
|
||||
string override = PyUnicode_AsUTF8(override_string);
|
||||
Py_DECREF(override_string);
|
||||
|
||||
bool include_cpu = false;
|
||||
const string cpu_suffix = "+CPU";
|
||||
if (string_endswith(override, cpu_suffix)) {
|
||||
include_cpu = true;
|
||||
override = override.substr(0, override.length() - cpu_suffix.length());
|
||||
}
|
||||
|
||||
if (override == "CPU") {
|
||||
BlenderSession::device_override = DEVICE_MASK_CPU;
|
||||
}
|
||||
else if (override == "OPENCL") {
|
||||
BlenderSession::device_override = DEVICE_MASK_OPENCL;
|
||||
}
|
||||
else if (override == "CUDA") {
|
||||
BlenderSession::device_override = DEVICE_MASK_CUDA;
|
||||
}
|
||||
else if (override == "OPTIX") {
|
||||
BlenderSession::device_override = DEVICE_MASK_OPTIX;
|
||||
}
|
||||
else {
|
||||
printf("\nError: %s is not a valid Cycles device.\n", override.c_str());
|
||||
Py_RETURN_FALSE;
|
||||
}
|
||||
|
||||
if (include_cpu) {
|
||||
BlenderSession::device_override = (DeviceTypeMask)(BlenderSession::device_override |
|
||||
DEVICE_MASK_CPU);
|
||||
}
|
||||
|
||||
Py_RETURN_TRUE;
|
||||
}
|
||||
|
||||
static PyMethodDef methods[] = {
|
||||
{"init", init_func, METH_VARARGS, ""},
|
||||
{"exit", exit_func, METH_VARARGS, ""},
|
||||
@@ -1045,7 +1007,6 @@ static PyMethodDef methods[] = {
|
||||
|
||||
/* Compute Device selection */
|
||||
{"get_device_types", get_device_types_func, METH_VARARGS, ""},
|
||||
{"set_device_override", set_device_override_func, METH_O, ""},
|
||||
|
||||
{NULL, NULL, 0, NULL},
|
||||
};
|
||||
|
@@ -47,7 +47,6 @@
|
||||
|
||||
CCL_NAMESPACE_BEGIN
|
||||
|
||||
DeviceTypeMask BlenderSession::device_override = DEVICE_MASK_ALL;
|
||||
bool BlenderSession::headless = false;
|
||||
int BlenderSession::num_resumable_chunks = 0;
|
||||
int BlenderSession::current_resumable_chunk = 0;
|
||||
@@ -563,10 +562,6 @@ void BlenderSession::render(BL::Depsgraph &b_depsgraph_)
|
||||
session->reset(buffer_params, effective_layer_samples);
|
||||
|
||||
/* render */
|
||||
if (!b_engine.is_preview() && background && print_render_stats) {
|
||||
scene->enable_update_stats();
|
||||
}
|
||||
|
||||
session->start();
|
||||
session->wait();
|
||||
|
||||
|
@@ -126,7 +126,6 @@ class BlenderSession {
|
||||
/* Global state which is common for all render sessions created from Blender.
|
||||
* Usually denotes command line arguments.
|
||||
*/
|
||||
static DeviceTypeMask device_override;
|
||||
|
||||
/* Blender is running from the command line, no windows are shown and some
|
||||
* extra render optimization is possible (possible to free draw-only data and
|
||||
|
@@ -422,13 +422,11 @@ void BlenderSync::sync_film(BL::SpaceView3D &b_v3d)
|
||||
|
||||
void BlenderSync::sync_view_layer(BL::SpaceView3D & /*b_v3d*/, BL::ViewLayer &b_view_layer)
|
||||
{
|
||||
/* render layer */
|
||||
view_layer.name = b_view_layer.name();
|
||||
|
||||
/* Filter. */
|
||||
view_layer.use_background_shader = b_view_layer.use_sky();
|
||||
view_layer.use_background_ao = b_view_layer.use_ao();
|
||||
/* Always enable surfaces for baking, otherwise there is nothing to bake to. */
|
||||
view_layer.use_surfaces = b_view_layer.use_solid() || scene->bake_manager->get_baking();
|
||||
view_layer.use_surfaces = b_view_layer.use_solid();
|
||||
view_layer.use_hair = b_view_layer.use_strand();
|
||||
view_layer.use_volumes = b_view_layer.use_volumes();
|
||||
|
||||
|
@@ -359,10 +359,6 @@ string CUDADevice::compile_kernel_get_common_cflags(
|
||||
cflags += " -D__SPLIT__";
|
||||
}
|
||||
|
||||
# ifdef WITH_NANOVDB
|
||||
cflags += " -DWITH_NANOVDB";
|
||||
# endif
|
||||
|
||||
return cflags;
|
||||
}
|
||||
|
||||
@@ -1257,6 +1253,42 @@ void CUDADevice::tex_alloc(device_texture &mem)
|
||||
cuda_assert(cuMemcpyHtoD(mem.device_pointer, mem.host_pointer, size));
|
||||
}
|
||||
|
||||
/* Kepler+, bindless textures. */
|
||||
CUDA_RESOURCE_DESC resDesc;
|
||||
memset(&resDesc, 0, sizeof(resDesc));
|
||||
|
||||
if (array_3d) {
|
||||
resDesc.resType = CU_RESOURCE_TYPE_ARRAY;
|
||||
resDesc.res.array.hArray = array_3d;
|
||||
resDesc.flags = 0;
|
||||
}
|
||||
else if (mem.data_height > 0) {
|
||||
resDesc.resType = CU_RESOURCE_TYPE_PITCH2D;
|
||||
resDesc.res.pitch2D.devPtr = mem.device_pointer;
|
||||
resDesc.res.pitch2D.format = format;
|
||||
resDesc.res.pitch2D.numChannels = mem.data_elements;
|
||||
resDesc.res.pitch2D.height = mem.data_height;
|
||||
resDesc.res.pitch2D.width = mem.data_width;
|
||||
resDesc.res.pitch2D.pitchInBytes = dst_pitch;
|
||||
}
|
||||
else {
|
||||
resDesc.resType = CU_RESOURCE_TYPE_LINEAR;
|
||||
resDesc.res.linear.devPtr = mem.device_pointer;
|
||||
resDesc.res.linear.format = format;
|
||||
resDesc.res.linear.numChannels = mem.data_elements;
|
||||
resDesc.res.linear.sizeInBytes = mem.device_size;
|
||||
}
|
||||
|
||||
CUDA_TEXTURE_DESC texDesc;
|
||||
memset(&texDesc, 0, sizeof(texDesc));
|
||||
texDesc.addressMode[0] = address_mode;
|
||||
texDesc.addressMode[1] = address_mode;
|
||||
texDesc.addressMode[2] = address_mode;
|
||||
texDesc.filterMode = filter_mode;
|
||||
texDesc.flags = CU_TRSF_NORMALIZED_COORDINATES;
|
||||
|
||||
cuda_assert(cuTexObjectCreate(&cmem->texobject, &resDesc, &texDesc, NULL));
|
||||
|
||||
/* Resize once */
|
||||
const uint slot = mem.slot;
|
||||
if (slot >= texture_info.size()) {
|
||||
@@ -1267,51 +1299,8 @@ void CUDADevice::tex_alloc(device_texture &mem)
|
||||
|
||||
/* Set Mapping and tag that we need to (re-)upload to device */
|
||||
texture_info[slot] = mem.info;
|
||||
texture_info[slot].data = (uint64_t)cmem->texobject;
|
||||
need_texture_info = true;
|
||||
|
||||
if (mem.info.data_type != IMAGE_DATA_TYPE_NANOVDB_FLOAT &&
|
||||
mem.info.data_type != IMAGE_DATA_TYPE_NANOVDB_FLOAT3) {
|
||||
/* Kepler+, bindless textures. */
|
||||
CUDA_RESOURCE_DESC resDesc;
|
||||
memset(&resDesc, 0, sizeof(resDesc));
|
||||
|
||||
if (array_3d) {
|
||||
resDesc.resType = CU_RESOURCE_TYPE_ARRAY;
|
||||
resDesc.res.array.hArray = array_3d;
|
||||
resDesc.flags = 0;
|
||||
}
|
||||
else if (mem.data_height > 0) {
|
||||
resDesc.resType = CU_RESOURCE_TYPE_PITCH2D;
|
||||
resDesc.res.pitch2D.devPtr = mem.device_pointer;
|
||||
resDesc.res.pitch2D.format = format;
|
||||
resDesc.res.pitch2D.numChannels = mem.data_elements;
|
||||
resDesc.res.pitch2D.height = mem.data_height;
|
||||
resDesc.res.pitch2D.width = mem.data_width;
|
||||
resDesc.res.pitch2D.pitchInBytes = dst_pitch;
|
||||
}
|
||||
else {
|
||||
resDesc.resType = CU_RESOURCE_TYPE_LINEAR;
|
||||
resDesc.res.linear.devPtr = mem.device_pointer;
|
||||
resDesc.res.linear.format = format;
|
||||
resDesc.res.linear.numChannels = mem.data_elements;
|
||||
resDesc.res.linear.sizeInBytes = mem.device_size;
|
||||
}
|
||||
|
||||
CUDA_TEXTURE_DESC texDesc;
|
||||
memset(&texDesc, 0, sizeof(texDesc));
|
||||
texDesc.addressMode[0] = address_mode;
|
||||
texDesc.addressMode[1] = address_mode;
|
||||
texDesc.addressMode[2] = address_mode;
|
||||
texDesc.filterMode = filter_mode;
|
||||
texDesc.flags = CU_TRSF_NORMALIZED_COORDINATES;
|
||||
|
||||
cuda_assert(cuTexObjectCreate(&cmem->texobject, &resDesc, &texDesc, NULL));
|
||||
|
||||
texture_info[slot].data = (uint64_t)cmem->texobject;
|
||||
}
|
||||
else {
|
||||
texture_info[slot].data = (uint64_t)mem.device_pointer;
|
||||
}
|
||||
}
|
||||
|
||||
void CUDADevice::tex_free(device_texture &mem)
|
||||
|
@@ -166,8 +166,6 @@ device_texture::device_texture(Device *device,
|
||||
data_elements = 4;
|
||||
break;
|
||||
case IMAGE_DATA_TYPE_BYTE:
|
||||
case IMAGE_DATA_TYPE_NANOVDB_FLOAT:
|
||||
case IMAGE_DATA_TYPE_NANOVDB_FLOAT3:
|
||||
data_type = TYPE_UCHAR;
|
||||
data_elements = 1;
|
||||
break;
|
||||
|
@@ -2035,10 +2035,6 @@ string OpenCLDevice::kernel_build_options(const string *debug_src)
|
||||
build_options += "-D__KERNEL_DEBUG__ ";
|
||||
# endif
|
||||
|
||||
# ifdef WITH_NANOVDB
|
||||
build_options += "-DWITH_NANOVDB ";
|
||||
# endif
|
||||
|
||||
return build_options;
|
||||
}
|
||||
|
||||
|
@@ -1172,20 +1172,9 @@ bool OpenCLInfo::get_device_extensions(cl_device_id device_id,
|
||||
string *device_extensions,
|
||||
cl_int *error)
|
||||
{
|
||||
size_t extension_length = 0;
|
||||
char buffer[1024];
|
||||
cl_int err;
|
||||
/* Determine the size of the extension string*/
|
||||
if ((err = clGetDeviceInfo(device_id, CL_DEVICE_EXTENSIONS, 0, 0, &extension_length)) !=
|
||||
CL_SUCCESS) {
|
||||
if (error != NULL) {
|
||||
*error = err;
|
||||
}
|
||||
*device_extensions = "";
|
||||
return false;
|
||||
}
|
||||
vector<char> buffer(extension_length);
|
||||
if ((err = clGetDeviceInfo(
|
||||
device_id, CL_DEVICE_EXTENSIONS, extension_length, buffer.data(), NULL)) !=
|
||||
if ((err = clGetDeviceInfo(device_id, CL_DEVICE_EXTENSIONS, sizeof(buffer), &buffer, NULL)) !=
|
||||
CL_SUCCESS) {
|
||||
if (error != NULL) {
|
||||
*error = err;
|
||||
@@ -1196,7 +1185,7 @@ bool OpenCLInfo::get_device_extensions(cl_device_id device_id,
|
||||
if (error != NULL) {
|
||||
*error = CL_SUCCESS;
|
||||
}
|
||||
*device_extensions = string(buffer.data());
|
||||
*device_extensions = buffer;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@@ -35,7 +35,6 @@ Node::Node(const NodeType *type_, ustring name_) : name(name_), type(type_)
|
||||
assert(type);
|
||||
|
||||
owner = nullptr;
|
||||
socket_modified = ~0;
|
||||
|
||||
/* assign non-empty name, convenient for debugging */
|
||||
if (name.empty()) {
|
||||
@@ -75,37 +74,37 @@ static bool is_socket_array_float3(const SocketType &socket)
|
||||
void Node::set(const SocketType &input, bool value)
|
||||
{
|
||||
assert(input.type == SocketType::BOOLEAN);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<bool>(this, input) = value;
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, int value)
|
||||
{
|
||||
assert((input.type == SocketType::INT || input.type == SocketType::ENUM));
|
||||
set_if_different(input, value);
|
||||
get_socket_value<int>(this, input) = value;
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, uint value)
|
||||
{
|
||||
assert(input.type == SocketType::UINT);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<uint>(this, input) = value;
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, float value)
|
||||
{
|
||||
assert(input.type == SocketType::FLOAT);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<float>(this, input) = value;
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, float2 value)
|
||||
{
|
||||
assert(input.type == SocketType::FLOAT);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<float2>(this, input) = value;
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, float3 value)
|
||||
{
|
||||
assert(is_socket_float3(input));
|
||||
set_if_different(input, value);
|
||||
get_socket_value<float3>(this, input) = value;
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, const char *value)
|
||||
@@ -116,12 +115,12 @@ void Node::set(const SocketType &input, const char *value)
|
||||
void Node::set(const SocketType &input, ustring value)
|
||||
{
|
||||
if (input.type == SocketType::STRING) {
|
||||
set_if_different(input, value);
|
||||
get_socket_value<ustring>(this, input) = value;
|
||||
}
|
||||
else if (input.type == SocketType::ENUM) {
|
||||
const NodeEnum &enm = *input.enum_values;
|
||||
if (enm.exists(value)) {
|
||||
set_if_different(input, enm[value]);
|
||||
get_socket_value<int>(this, input) = enm[value];
|
||||
}
|
||||
else {
|
||||
assert(0);
|
||||
@@ -135,62 +134,62 @@ void Node::set(const SocketType &input, ustring value)
|
||||
void Node::set(const SocketType &input, const Transform &value)
|
||||
{
|
||||
assert(input.type == SocketType::TRANSFORM);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<Transform>(this, input) = value;
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, Node *value)
|
||||
{
|
||||
assert(input.type == SocketType::NODE);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<Node *>(this, input) = value;
|
||||
}
|
||||
|
||||
/* set array values */
|
||||
void Node::set(const SocketType &input, array<bool> &value)
|
||||
{
|
||||
assert(input.type == SocketType::BOOLEAN_ARRAY);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<array<bool>>(this, input).steal_data(value);
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, array<int> &value)
|
||||
{
|
||||
assert(input.type == SocketType::INT_ARRAY);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<array<int>>(this, input).steal_data(value);
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, array<float> &value)
|
||||
{
|
||||
assert(input.type == SocketType::FLOAT_ARRAY);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<array<float>>(this, input).steal_data(value);
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, array<float2> &value)
|
||||
{
|
||||
assert(input.type == SocketType::FLOAT_ARRAY);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<array<float2>>(this, input).steal_data(value);
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, array<float3> &value)
|
||||
{
|
||||
assert(is_socket_array_float3(input));
|
||||
set_if_different(input, value);
|
||||
get_socket_value<array<float3>>(this, input).steal_data(value);
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, array<ustring> &value)
|
||||
{
|
||||
assert(input.type == SocketType::STRING_ARRAY);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<array<ustring>>(this, input).steal_data(value);
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, array<Transform> &value)
|
||||
{
|
||||
assert(input.type == SocketType::TRANSFORM_ARRAY);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<array<Transform>>(this, input).steal_data(value);
|
||||
}
|
||||
|
||||
void Node::set(const SocketType &input, array<Node *> &value)
|
||||
{
|
||||
assert(input.type == SocketType::TRANSFORM_ARRAY);
|
||||
set_if_different(input, value);
|
||||
get_socket_value<array<Node *>>(this, input).steal_data(value);
|
||||
}
|
||||
|
||||
/* get values */
|
||||
@@ -697,56 +696,4 @@ void Node::set_owner(const NodeOwner *owner_)
|
||||
owner = owner_;
|
||||
}
|
||||
|
||||
bool Node::socket_is_modified(const SocketType &input) const
|
||||
{
|
||||
return (socket_modified & input.modified_flag_bit) != 0;
|
||||
}
|
||||
|
||||
bool Node::is_modified()
|
||||
{
|
||||
return socket_modified != 0;
|
||||
}
|
||||
|
||||
void Node::tag_modified()
|
||||
{
|
||||
socket_modified = ~0u;
|
||||
}
|
||||
|
||||
void Node::clear_modified()
|
||||
{
|
||||
socket_modified = 0;
|
||||
}
|
||||
|
||||
template<typename T> void Node::set_if_different(const SocketType &input, T value)
|
||||
{
|
||||
if (get_socket_value<T>(this, input) == value) {
|
||||
return;
|
||||
}
|
||||
|
||||
get_socket_value<T>(this, input) = value;
|
||||
socket_modified |= input.modified_flag_bit;
|
||||
}
|
||||
|
||||
template<typename T> void Node::set_if_different(const SocketType &input, array<T> &value)
|
||||
{
|
||||
if (!socket_is_modified(input)) {
|
||||
if (get_socket_value<array<T>>(this, input) == value) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
get_socket_value<array<T>>(this, input).steal_data(value);
|
||||
socket_modified |= input.modified_flag_bit;
|
||||
}
|
||||
|
||||
void Node::print_modified_sockets() const
|
||||
{
|
||||
printf("Node : %s\n", name.c_str());
|
||||
for (auto &socket : type->inputs) {
|
||||
if (socket_is_modified(socket)) {
|
||||
printf("-- socket modified : %s\n", socket.name.c_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CCL_NAMESPACE_END
|
||||
|
@@ -101,15 +101,6 @@ struct Node {
|
||||
/* Type testing, taking into account base classes. */
|
||||
bool is_a(const NodeType *type);
|
||||
|
||||
bool socket_is_modified(const SocketType &input) const;
|
||||
|
||||
bool is_modified();
|
||||
|
||||
void tag_modified();
|
||||
void clear_modified();
|
||||
|
||||
void print_modified_sockets() const;
|
||||
|
||||
ustring name;
|
||||
const NodeType *type;
|
||||
|
||||
@@ -118,12 +109,6 @@ struct Node {
|
||||
|
||||
protected:
|
||||
const NodeOwner *owner;
|
||||
|
||||
SocketModifiedFlags socket_modified;
|
||||
|
||||
template<typename T> void set_if_different(const SocketType &input, T value);
|
||||
|
||||
template<typename T> void set_if_different(const SocketType &input, array<T> &value);
|
||||
};
|
||||
|
||||
CCL_NAMESPACE_END
|
||||
|
@@ -167,8 +167,6 @@ void NodeType::register_input(ustring name,
|
||||
socket.enum_values = enum_values;
|
||||
socket.node_type = node_type;
|
||||
socket.flags = flags | extra_flags;
|
||||
assert(inputs.size() < std::numeric_limits<SocketModifiedFlags>::digits);
|
||||
socket.modified_flag_bit = (1ull << inputs.size());
|
||||
inputs.push_back(socket);
|
||||
}
|
||||
|
||||
|
@@ -28,8 +28,6 @@ CCL_NAMESPACE_BEGIN
|
||||
struct Node;
|
||||
struct NodeType;
|
||||
|
||||
typedef uint64_t SocketModifiedFlags;
|
||||
|
||||
/* Socket Type */
|
||||
|
||||
struct SocketType {
|
||||
@@ -90,7 +88,6 @@ struct SocketType {
|
||||
const NodeType **node_type;
|
||||
int flags;
|
||||
ustring ui_name;
|
||||
SocketModifiedFlags modified_flag_bit;
|
||||
|
||||
size_t size() const;
|
||||
bool is_array() const;
|
||||
@@ -165,12 +162,12 @@ struct NodeType {
|
||||
|
||||
/* Sock Definition Macros */
|
||||
|
||||
#define SOCKET_OFFSETOF(T, name) offsetof(T, name)
|
||||
#define SOCKET_SIZEOF(T, name) (sizeof(T::name))
|
||||
#define SOCKET_OFFSETOF(T, name) (((char *)&(((T *)1)->name)) - (char *)1)
|
||||
#define SOCKET_SIZEOF(T, name) (sizeof(((T *)1)->name))
|
||||
#define SOCKET_DEFINE(name, ui_name, default_value, datatype, TYPE, flags, ...) \
|
||||
{ \
|
||||
static datatype defval = default_value; \
|
||||
CHECK_TYPE(T::name, datatype); \
|
||||
CHECK_TYPE(((T *)1)->name, datatype); \
|
||||
type->register_input(ustring(#name), \
|
||||
ustring(ui_name), \
|
||||
TYPE, \
|
||||
|
@@ -442,12 +442,6 @@ if(WITH_CYCLES_CUDA_BINARIES)
|
||||
set(cuda_flags ${cuda_flags} -D __KERNEL_DEBUG__)
|
||||
endif()
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
set(cuda_flags ${cuda_flags}
|
||||
-D WITH_NANOVDB
|
||||
-I "${NANOVDB_INCLUDE_DIR}")
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES_CUBIN_COMPILER)
|
||||
string(SUBSTRING ${arch} 3 -1 CUDA_ARCH)
|
||||
|
||||
@@ -533,14 +527,8 @@ if(WITH_CYCLES_DEVICE_OPTIX AND WITH_CYCLES_CUDA_BINARIES)
|
||||
set(cuda_flags ${cuda_flags}
|
||||
-D __KERNEL_DEBUG__)
|
||||
endif()
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
set(cuda_flags ${cuda_flags}
|
||||
-D WITH_NANOVDB
|
||||
-I "${NANOVDB_INCLUDE_DIR}")
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES_CUBIN_COMPILER)
|
||||
|
||||
# Needed to find libnvrtc-builtins.so. Can't do it from inside
|
||||
# cycles_cubin_cc since the env variable is read before main()
|
||||
if(APPLE)
|
||||
@@ -721,17 +709,3 @@ delayed_install(${CMAKE_CURRENT_SOURCE_DIR} "${SRC_SVM_HEADERS}" ${CYCLES_INSTAL
|
||||
delayed_install(${CMAKE_CURRENT_SOURCE_DIR} "${SRC_GEOM_HEADERS}" ${CYCLES_INSTALL_PATH}/source/kernel/geom)
|
||||
delayed_install(${CMAKE_CURRENT_SOURCE_DIR} "${SRC_UTIL_HEADERS}" ${CYCLES_INSTALL_PATH}/source/util)
|
||||
delayed_install(${CMAKE_CURRENT_SOURCE_DIR} "${SRC_SPLIT_HEADERS}" ${CYCLES_INSTALL_PATH}/source/kernel/split)
|
||||
|
||||
|
||||
if(WITH_NANOVDB)
|
||||
set(SRC_NANOVDB_HEADERS
|
||||
nanovdb/NanoVDB.h
|
||||
nanovdb/CNanoVDB.h
|
||||
)
|
||||
set(SRC_NANOVDB_UTIL_HEADERS
|
||||
nanovdb/util/CSampleFromVoxels.h
|
||||
nanovdb/util/SampleFromVoxels.h
|
||||
)
|
||||
delayed_install(${NANOVDB_INCLUDE_DIR} "${SRC_NANOVDB_HEADERS}" ${CYCLES_INSTALL_PATH}/source/nanovdb)
|
||||
delayed_install(${NANOVDB_INCLUDE_DIR} "${SRC_NANOVDB_UTIL_HEADERS}" ${CYCLES_INSTALL_PATH}/source/nanovdb/util)
|
||||
endif()
|
||||
|
@@ -438,8 +438,7 @@ ccl_device void bssrdf_sample(const ShaderClosure *sc, float xi, float *r, float
|
||||
}
|
||||
else if (xi < 2.0f) {
|
||||
xi -= 1.0f;
|
||||
radius = (bssrdf->radius.x > 0.0f && bssrdf->radius.y > 0.0f) ? bssrdf->radius.y :
|
||||
bssrdf->radius.z;
|
||||
radius = (bssrdf->radius.x > 0.0f) ? bssrdf->radius.y : bssrdf->radius.z;
|
||||
}
|
||||
else {
|
||||
xi -= 2.0f;
|
||||
|
@@ -195,7 +195,7 @@ ccl_device_inline void path_radiance_init(KernelGlobals *kg, PathRadiance *L)
|
||||
L->emission = make_float3(0.0f, 0.0f, 0.0f);
|
||||
L->background = make_float3(0.0f, 0.0f, 0.0f);
|
||||
L->ao = make_float3(0.0f, 0.0f, 0.0f);
|
||||
L->shadow = make_float3(0.0f, 0.0f, 0.0f);
|
||||
L->shadow = make_float4(0.0f, 0.0f, 0.0f, 0.0f);
|
||||
L->mist = 0.0f;
|
||||
|
||||
L->state.diffuse = make_float3(0.0f, 0.0f, 0.0f);
|
||||
@@ -439,7 +439,9 @@ ccl_device_inline void path_radiance_accum_light(KernelGlobals *kg,
|
||||
L->direct_volume += shaded_throughput * bsdf_eval->volume;
|
||||
|
||||
if (is_lamp) {
|
||||
L->shadow += shadow * shadow_fac;
|
||||
L->shadow.x += shadow.x * shadow_fac;
|
||||
L->shadow.y += shadow.y * shadow_fac;
|
||||
L->shadow.z += shadow.z * shadow_fac;
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
@@ -383,7 +383,7 @@ ccl_device void kernel_bake_evaluate(
|
||||
break;
|
||||
}
|
||||
case SHADER_EVAL_SHADOW: {
|
||||
out = L.shadow;
|
||||
out = make_float3(L.shadow.x, L.shadow.y, L.shadow.z);
|
||||
break;
|
||||
}
|
||||
case SHADER_EVAL_DIFFUSE: {
|
||||
|
@@ -35,7 +35,6 @@ typedef unsigned int uint32_t;
|
||||
typedef unsigned long long uint64_t;
|
||||
typedef unsigned short half;
|
||||
typedef unsigned long long CUtexObject;
|
||||
|
||||
#ifdef CYCLES_CUBIN_CC
|
||||
# define FLT_MIN 1.175494350822287507969e-38f
|
||||
# define FLT_MAX 340282346638528859811704183484516925440.0f
|
||||
|
@@ -326,10 +326,9 @@ ccl_device_inline void kernel_write_light_passes(KernelGlobals *kg,
|
||||
kernel_write_pass_float3(buffer + kernel_data.film.pass_transmission_color,
|
||||
L->color_transmission);
|
||||
if (light_flag & PASSMASK(SHADOW)) {
|
||||
float3 shadow = L->shadow;
|
||||
kernel_write_pass_float4(
|
||||
buffer + kernel_data.film.pass_shadow,
|
||||
make_float4(shadow.x, shadow.y, shadow.z, kernel_data.film.pass_shadow_scale));
|
||||
float4 shadow = L->shadow;
|
||||
shadow.w = kernel_data.film.pass_shadow_scale;
|
||||
kernel_write_pass_float4(buffer + kernel_data.film.pass_shadow, shadow);
|
||||
}
|
||||
if (light_flag & PASSMASK(MIST))
|
||||
kernel_write_pass_float(buffer + kernel_data.film.pass_mist, 1.0f - L->mist);
|
||||
|
@@ -522,7 +522,7 @@ typedef ccl_addr_space struct PathRadiance {
|
||||
float3 indirect_transmission;
|
||||
float3 indirect_volume;
|
||||
|
||||
float3 shadow;
|
||||
float4 shadow;
|
||||
float mist;
|
||||
#endif
|
||||
|
||||
|
@@ -23,7 +23,7 @@
|
||||
#ifndef WITH_CYCLES_OPTIMIZED_KERNEL_AVX
|
||||
# define KERNEL_STUB
|
||||
#else
|
||||
/* SSE optimization disabled for now on 32 bit, see bug T36316. */
|
||||
/* SSE optimization disabled for now on 32 bit, see bug #36316 */
|
||||
# if !(defined(__GNUC__) && (defined(i386) || defined(_M_IX86)))
|
||||
# define __KERNEL_SSE__
|
||||
# define __KERNEL_SSE2__
|
||||
|
@@ -23,7 +23,7 @@
|
||||
#ifndef WITH_CYCLES_OPTIMIZED_KERNEL_AVX2
|
||||
# define KERNEL_STUB
|
||||
#else
|
||||
/* SSE optimization disabled for now on 32 bit, see bug T36316. */
|
||||
/* SSE optimization disabled for now on 32 bit, see bug #36316 */
|
||||
# if !(defined(__GNUC__) && (defined(i386) || defined(_M_IX86)))
|
||||
# define __KERNEL_SSE__
|
||||
# define __KERNEL_SSE2__
|
||||
|
@@ -23,7 +23,7 @@
|
||||
#ifndef WITH_CYCLES_OPTIMIZED_KERNEL_SSE2
|
||||
# define KERNEL_STUB
|
||||
#else
|
||||
/* SSE optimization disabled for now on 32 bit, see bug T36316. */
|
||||
/* SSE optimization disabled for now on 32 bit, see bug #36316 */
|
||||
# if !(defined(__GNUC__) && (defined(i386) || defined(_M_IX86)))
|
||||
# define __KERNEL_SSE2__
|
||||
# endif
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user