1
1

Compare commits

..

2 Commits

Author SHA1 Message Date
9f2a381841 Updated based on feedback by @mano-wii
---

Best result so far!
**Benchmarking**
||master|temp branch:
|---|---|---|
|large_mesh_editing:|Average: 6.973309 FPS|Average: 11.112296 FPS
||rdata 9ms iter 33ms (frame 144ms)|rdata 8ms iter 32ms (frame 88ms)
|large_mesh_editing_ledge: |Average: 6.943300 FPS|Average: 10.853841 FPS
||rdata 9ms iter 36ms (frame 144ms)|rdata 8ms iter 35ms (frame 92ms)
```

Here are some things I will change before testing:
diff --git a/source/blender/draw/intern/draw_cache_extract_mesh.cc b/source/blender/draw/intern/draw_cache_extract_mesh.cc
index 622ea191a0a..4d7c2e16e3a 100644
--- a/source/blender/draw/intern/draw_cache_extract_mesh.cc
+++ b/source/blender/draw/intern/draw_cache_extract_mesh.cc
@@ -40,7 +40,7 @@
 #include "draw_cache_extract_mesh_private.h"
 #include "draw_cache_inline.h"

-// #define DEBUG_TIME
+#define DEBUG_TIME

 #ifdef DEBUG_TIME
 #  include "PIL_time_utildefines.h"
diff --git a/source/blender/editors/transform/transform_convert_mesh.c b/source/blender/editors/transform/transform_convert_mesh.c
index 26d844d4316..2e9109c17ed 100644
--- a/source/blender/editors/transform/transform_convert_mesh.c
+++ b/source/blender/editors/transform/transform_convert_mesh.c
@@ -1698,7 +1698,7 @@ static struct BMesh_PartialInfo *tc_mesh_ensure_partial_update(TransInfo *t,
       tcmd->editmesh_partial_info_prop_size = t->prop_size;
       recalc = false;
     }
-    else if (tcmd->editmesh_partial_info_prop_size != t->prop_size) {
+    else if (tcmd->editmesh_partial_info_prop_size_stored != t->prop_size) {
       BLI_assert(tcmd->editmesh_partial_info_prop_size_stored >
                  tcmd->editmesh_partial_info_prop_size);
       recalc = true;
@@ -1738,7 +1738,8 @@ static struct BMesh_PartialInfo *tc_mesh_ensure_partial_update(TransInfo *t,
   TransDataMirror *td_mirror = tc->data_mirror;
   for (i = 0; i < tc->data_mirror_len; i++, td_mirror++) {
     BMVert *v_mirr = (BMVert *)POINTER_OFFSET(td_mirror->loc_src, -offsetof(BMVert, co));
-    if (BM_elem_flag_test(v_mirr, BM_ELEM_TAG)) {
+    if (BM_elem_flag_test(v_mirr, BM_ELEM_TAG) ||
+        !compare_v3v3(td_mirror->loc, td_mirror->iloc, FLT_EPSILON)) {
       BMVert *v_mirr_other = (BMVert *)td_mirror->extra;
       if (!BM_elem_flag_test(v_mirr_other, BM_ELEM_TAG)) {
         BM_elem_flag_enable(v_mirr_other, BM_ELEM_TAG);

```
2021-06-03 15:25:40 +10:00
97e63f5945 Partial updates 2021-06-03 15:19:43 +10:00
3355 changed files with 67370 additions and 138268 deletions

View File

@@ -265,5 +265,4 @@ ForEachMacros:
- VECTOR_SET_SLOT_PROBING_BEGIN
StatementMacros:
- PyObject_HEAD
- PyObject_VAR_HEAD

View File

@@ -110,10 +110,6 @@ if(POLICY CMP0074)
cmake_policy(SET CMP0074 NEW)
endif()
# Install CODE|SCRIPT allow the use of generator expressions.
if(POLICY CMP0087)
cmake_policy(SET CMP0087 NEW)
endif()
#-----------------------------------------------------------------------------
# Load some macros.
include(build_files/cmake/macros.cmake)
@@ -353,7 +349,7 @@ mark_as_advanced(WITH_SYSTEM_GLOG)
option(WITH_FREESTYLE "Enable Freestyle (advanced edges rendering)" ON)
# Misc
if(WIN32 OR APPLE)
if(WIN32)
option(WITH_INPUT_IME "Enable Input Method Editor (IME) for complex Asian character input" ON)
endif()
option(WITH_INPUT_NDOF "Enable NDOF input devices (SpaceNavigator and friends)" ON)
@@ -407,14 +403,14 @@ set(CYCLES_TEST_DEVICES CPU CACHE STRING "Run regression tests on the specified
set(CYCLES_CUDA_BINARIES_ARCH sm_30 sm_35 sm_37 sm_50 sm_52 sm_60 sm_61 sm_70 sm_75 sm_86 compute_75 CACHE STRING "CUDA architectures to build binaries for")
mark_as_advanced(CYCLES_CUDA_BINARIES_ARCH)
unset(PLATFORM_DEFAULT)
option(WITH_CYCLES_LOGGING "Build Cycles with logging support" ON)
option(WITH_CYCLES_DEBUG_NAN "Build Cycles with additional asserts for detecting NaNs and invalid values" OFF)
option(WITH_CYCLES_LOGGING "Build Cycles with logging support" ON)
option(WITH_CYCLES_DEBUG "Build Cycles with extra debug capabilities" OFF)
option(WITH_CYCLES_NATIVE_ONLY "Build Cycles with native kernel only (which fits current CPU, use for development only)" OFF)
option(WITH_CYCLES_KERNEL_ASAN "Build Cycles kernels with address sanitizer when WITH_COMPILER_ASAN is on, even if it's very slow" OFF)
mark_as_advanced(WITH_CYCLES_KERNEL_ASAN)
mark_as_advanced(WITH_CYCLES_CUBIN_COMPILER)
mark_as_advanced(WITH_CYCLES_LOGGING)
mark_as_advanced(WITH_CYCLES_DEBUG_NAN)
mark_as_advanced(WITH_CYCLES_DEBUG)
mark_as_advanced(WITH_CYCLES_NATIVE_ONLY)
option(WITH_CYCLES_DEVICE_CUDA "Enable Cycles CUDA compute support" ON)
@@ -428,10 +424,6 @@ mark_as_advanced(WITH_CYCLES_NETWORK)
option(WITH_CUDA_DYNLOAD "Dynamically load CUDA libraries at runtime" ON)
mark_as_advanced(WITH_CUDA_DYNLOAD)
# Draw Manager
option(WITH_DRAW_DEBUG "Add extra debug capabilities to Draw Manager" OFF)
mark_as_advanced(WITH_DRAW_DEBUG)
# LLVM
option(WITH_LLVM "Use LLVM" OFF)
if(APPLE)
@@ -612,6 +604,12 @@ if(WIN32)
option(WITH_WINDOWS_FIND_MODULES "Use find_package to locate libraries" OFF)
mark_as_advanced(WITH_WINDOWS_FIND_MODULES)
option(WINDOWS_USE_VISUAL_STUDIO_PROJECT_FOLDERS "Organize the visual studio projects according to source folder structure." ON)
mark_as_advanced(WINDOWS_USE_VISUAL_STUDIO_PROJECT_FOLDERS)
option(WINDOWS_USE_VISUAL_STUDIO_SOURCE_FOLDERS "Organize the source files in filters matching the source folders." ON)
mark_as_advanced(WINDOWS_USE_VISUAL_STUDIO_SOURCE_FOLDERS)
option(WINDOWS_PYTHON_DEBUG "Include the files needed for debugging python scripts with visual studio 2017+." OFF)
mark_as_advanced(WINDOWS_PYTHON_DEBUG)
@@ -629,18 +627,6 @@ if(WIN32)
endif()
if(WIN32 OR XCODE)
option(IDE_GROUP_SOURCES_IN_FOLDERS "Organize the source files in filters matching the source folders." ON)
mark_as_advanced(IDE_GROUP_SOURCES_IN_FOLDERS)
option(IDE_GROUP_PROJECTS_IN_FOLDERS "Organize the projects according to source folder structure." ON)
mark_as_advanced(IDE_GROUP_PROJECTS_IN_FOLDERS)
if (IDE_GROUP_PROJECTS_IN_FOLDERS)
set_property(GLOBAL PROPERTY USE_FOLDERS ON)
endif()
endif()
if(UNIX)
# See WITH_WINDOWS_SCCACHE for Windows.
option(WITH_COMPILER_CCACHE "Use ccache to improve rebuild times (Works with Ninja, Makefiles and Xcode)" OFF)
@@ -850,7 +836,7 @@ if(WITH_PYTHON)
# because UNIX will search for the old Python paths which may not exist.
# giving errors about missing paths before this case is met.
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.9")
message(FATAL_ERROR "At least Python 3.9 is required to build, but found Python ${PYTHON_VERSION}")
message(FATAL_ERROR "At least Python 3.9 is required to build")
endif()
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/scripts/addons")
@@ -1598,9 +1584,6 @@ elseif(CMAKE_C_COMPILER_ID MATCHES "Clang")
ADD_CHECK_C_COMPILER_FLAG(C_WARNINGS C_WARN_UNUSED_PARAMETER -Wunused-parameter)
ADD_CHECK_CXX_COMPILER_FLAG(CXX_WARNINGS CXX_WARN_ALL -Wall)
# Using C++20 features while having C++17 as the project language isn't allowed by MSVC.
ADD_CHECK_CXX_COMPILER_FLAG(CXX_WARNINGS CXX_CXX20_DESIGNATOR -Wc++20-designator)
ADD_CHECK_CXX_COMPILER_FLAG(CXX_WARNINGS CXX_WARN_NO_AUTOLOGICAL_COMPARE -Wno-tautological-compare)
ADD_CHECK_CXX_COMPILER_FLAG(CXX_WARNINGS CXX_WARN_NO_UNKNOWN_PRAGMAS -Wno-unknown-pragmas)
ADD_CHECK_CXX_COMPILER_FLAG(CXX_WARNINGS CXX_WARN_NO_CHAR_SUBSCRIPTS -Wno-char-subscripts)
@@ -1722,18 +1705,22 @@ if(WITH_PYTHON)
endif()
endif()
# Select C++17 as the standard for C++ projects.
set(CMAKE_CXX_STANDARD 17)
# If C++17 is not available, downgrading to an earlier standard is NOT OK.
set(CMAKE_CXX_STANDARD_REQUIRED ON)
# Do not enable compiler specific language extentions.
set(CMAKE_CXX_EXTENSIONS OFF)
# Make MSVC properly report the value of the __cplusplus preprocessor macro
# Available MSVC 15.7 (1914) and up, without this it reports 199711L regardless
# of the C++ standard chosen above.
if(MSVC AND MSVC_VERSION GREATER 1913)
string(APPEND CMAKE_CXX_FLAGS " /Zc:__cplusplus")
if(MSVC)
string(APPEND CMAKE_CXX_FLAGS " /std:c++17")
# Make MSVC properly report the value of the __cplusplus preprocessor macro
# Available MSVC 15.7 (1914) and up, without this it reports 199711L regardless
# of the C++ standard chosen above
if(MSVC_VERSION GREATER 1913)
string(APPEND CMAKE_CXX_FLAGS " /Zc:__cplusplus")
endif()
elseif(
CMAKE_COMPILER_IS_GNUCC OR
CMAKE_C_COMPILER_ID MATCHES "Clang" OR
CMAKE_C_COMPILER_ID MATCHES "Intel"
)
string(APPEND CMAKE_CXX_FLAGS " -std=c++17")
else()
message(FATAL_ERROR "Unknown compiler ${CMAKE_C_COMPILER_ID}, can't enable C++17 build")
endif()
# Visual Studio has all standards it supports available by default
@@ -1928,7 +1915,6 @@ if(FIRST_RUN)
info_cfg_option(WITH_IK_ITASC)
info_cfg_option(WITH_IK_SOLVER)
info_cfg_option(WITH_INPUT_NDOF)
info_cfg_option(WITH_INPUT_IME)
info_cfg_option(WITH_INTERNATIONAL)
info_cfg_option(WITH_OPENCOLLADA)
info_cfg_option(WITH_OPENCOLORIO)

View File

@@ -63,7 +63,7 @@ Package Targets
* package_debian: Build a debian package.
* package_pacman: Build an arch linux pacman package.
* package_archive: Build an archive package.
* package_archive: Build an archive package.
Testing Targets
Not associated with building Blender.
@@ -167,7 +167,7 @@ endef
# This makefile is not meant for Windows
ifeq ($(OS),Windows_NT)
$(error On Windows, use "cmd //c make.bat" instead of "make")
$(error On Windows, use "cmd //c make.bat" instead of "make")
endif
# System Vars
@@ -379,7 +379,7 @@ deps: .FORCE
@cmake -H"$(DEPS_SOURCE_DIR)" \
-B"$(DEPS_BUILD_DIR)" \
-DHARVEST_TARGET=$(DEPS_INSTALL_DIR)
-DHARVEST_TARGET=$(DEPS_INSTALL_DIR)
@echo
@echo Building dependencies ...
@@ -456,8 +456,7 @@ project_eclipse: .FORCE
check_cppcheck: .FORCE
$(CMAKE_CONFIG)
cd "$(BUILD_DIR)" ; \
$(PYTHON) \
"$(BLENDER_DIR)/build_files/cmake/cmake_static_check_cppcheck.py" 2> \
$(PYTHON) "$(BLENDER_DIR)/build_files/cmake/cmake_static_check_cppcheck.py" 2> \
"$(BLENDER_DIR)/check_cppcheck.txt"
@echo "written: check_cppcheck.txt"
@@ -519,9 +518,8 @@ source_archive: .FORCE
python3 ./build_files/utils/make_source_archive.py
source_archive_complete: .FORCE
cmake \
-S "$(BLENDER_DIR)/build_files/build_environment" -B"$(BUILD_DIR)/source_archive" \
-DCMAKE_BUILD_TYPE_INIT:STRING=$(BUILD_TYPE) -DPACKAGE_USE_UPSTREAM_SOURCES=OFF
cmake -S "$(BLENDER_DIR)/build_files/build_environment" -B"$(BUILD_DIR)/source_archive" \
-DCMAKE_BUILD_TYPE_INIT:STRING=$(BUILD_TYPE) -DPACKAGE_USE_UPSTREAM_SOURCES=OFF
# This assumes CMake is still using a default `PACKAGE_DIR` variable:
python3 ./build_files/utils/make_source_archive.py --include-packages "$(BUILD_DIR)/source_archive/packages"
@@ -529,11 +527,9 @@ source_archive_complete: .FORCE
INKSCAPE_BIN?="inkscape"
icons: .FORCE
BLENDER_BIN=$(BLENDER_BIN) INKSCAPE_BIN=$(INKSCAPE_BIN) \
"$(BLENDER_DIR)/release/datafiles/blender_icons_update.py"
INKSCAPE_BIN=$(INKSCAPE_BIN) \
"$(BLENDER_DIR)/release/datafiles/prvicons_update.py"
INKSCAPE_BIN=$(INKSCAPE_BIN) \
"$(BLENDER_DIR)/release/datafiles/alert_icons_update.py"
"$(BLENDER_DIR)/release/datafiles/blender_icons_update.py"
BLENDER_BIN=$(BLENDER_BIN) INKSCAPE_BIN=$(INKSCAPE_BIN) \
"$(BLENDER_DIR)/release/datafiles/prvicons_update.py"
icons_geom: .FORCE
BLENDER_BIN=$(BLENDER_BIN) \
@@ -547,7 +543,7 @@ update_code: .FORCE
format: .FORCE
PATH="../lib/${OS_NCASE}_${CPU}/llvm/bin/:../lib/${OS_NCASE}_centos7_${CPU}/llvm/bin/:../lib/${OS_NCASE}/llvm/bin/:$(PATH)" \
$(PYTHON) source/tools/utils_maintenance/clang_format_paths.py $(PATHS)
$(PYTHON) source/tools/utils_maintenance/clang_format_paths.py $(PATHS)
# -----------------------------------------------------------------------------
@@ -557,9 +553,8 @@ format: .FORCE
# Simple version of ./doc/python_api/sphinx_doc_gen.sh with no PDF generation.
doc_py: .FORCE
ASAN_OPTIONS=halt_on_error=0:${ASAN_OPTIONS} \
$(BLENDER_BIN) \
--background -noaudio --factory-startup \
--python doc/python_api/sphinx_doc_gen.py
$(BLENDER_BIN) --background -noaudio --factory-startup \
--python doc/python_api/sphinx_doc_gen.py
sphinx-build -b html -j $(NPROCS) doc/python_api/sphinx-in doc/python_api/sphinx-out
@echo "docs written into: '$(BLENDER_DIR)/doc/python_api/sphinx-out/index.html'"
@@ -568,9 +563,8 @@ doc_doxy: .FORCE
@echo "docs written into: '$(BLENDER_DIR)/doc/doxygen/html/index.html'"
doc_dna: .FORCE
$(BLENDER_BIN) \
--background -noaudio --factory-startup \
--python doc/blender_file_format/BlendFileDnaExporter_25.py
$(BLENDER_BIN) --background -noaudio --factory-startup \
--python doc/blender_file_format/BlendFileDnaExporter_25.py
@echo "docs written into: '$(BLENDER_DIR)/doc/blender_file_format/dna.html'"
doc_man: .FORCE

View File

@@ -56,7 +56,6 @@ else()
endif()
include(cmake/zlib.cmake)
include(cmake/zstd.cmake)
include(cmake/openal.cmake)
include(cmake/png.cmake)
include(cmake/jpeg.cmake)
@@ -82,11 +81,7 @@ if(UNIX)
endif()
include(cmake/openimageio.cmake)
include(cmake/tiff.cmake)
if(WIN32)
include(cmake/flexbison.cmake)
elseif(UNIX AND NOT APPLE)
include(cmake/flex.cmake)
endif()
include(cmake/flexbison.cmake)
include(cmake/osl.cmake)
include(cmake/tbb.cmake)
include(cmake/openvdb.cmake)
@@ -169,7 +164,6 @@ endif()
if(UNIX AND NOT APPLE)
include(cmake/libglu.cmake)
include(cmake/mesa.cmake)
include(cmake/wayland_protocols.cmake)
endif()
include(cmake/harvest.cmake)

View File

@@ -87,10 +87,7 @@ download_source(LIBGLU)
download_source(MESA)
download_source(NASM)
download_source(XR_OPENXR_SDK)
download_source(WL_PROTOCOLS)
download_source(ISPC)
download_source(GMP)
download_source(POTRACE)
download_source(HARU)
download_source(ZSTD)
download_source(FLEX)

View File

@@ -43,12 +43,6 @@ endif()
if(WIN32)
set(EMBREE_BUILD_DIR ${BUILD_MODE}/)
if(BUILD_MODE STREQUAL Debug)
list(APPEND EMBREE_EXTRA_ARGS
-DEMBREE_TBBMALLOC_LIBRARY_NAME=tbbmalloc_debug
-DEMBREE_TBB_LIBRARY_NAME=tbb_debug
)
endif()
else()
set(EMBREE_BUILD_DIR)
endif()

View File

@@ -30,7 +30,6 @@ if(WIN32)
--enable-w32threads
--disable-pthreads
--enable-libopenjpeg
--disable-mediafoundation
)
if("${CMAKE_SIZEOF_VOID_P}" EQUAL "4")
set(FFMPEG_EXTRA_FLAGS

View File

@@ -1,28 +0,0 @@
# ***** BEGIN GPL LICENSE BLOCK *****
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENSE BLOCK *****
ExternalProject_Add(external_flex
URL file://${PACKAGE_DIR}/${FLEX_FILE}
URL_HASH ${FLEX_HASH_TYPE}=${FLEX_HASH}
DOWNLOAD_DIR ${DOWNLOAD_DIR}
PREFIX ${BUILD_DIR}/flex
CONFIGURE_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/flex/src/external_flex/ && ${CONFIGURE_COMMAND} --prefix=${LIBDIR}/flex
BUILD_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/flex/src/external_flex/ && make -j${MAKE_THREADS}
INSTALL_COMMAND ${CONFIGURE_ENV} && cd ${BUILD_DIR}/flex/src/external_flex/ && make install
INSTALL_DIR ${LIBDIR}/flex
)

View File

@@ -106,7 +106,6 @@ harvest(llvm/include llvm/include "*")
harvest(llvm/bin llvm/bin "llvm-config")
harvest(llvm/lib llvm/lib "libLLVM*.a")
harvest(llvm/lib llvm/lib "libclang*.a")
harvest(llvm/lib/clang llvm/lib/clang "*.h")
if(APPLE)
harvest(openmp/lib openmp/lib "*")
harvest(openmp/include openmp/include "*.h")
@@ -127,8 +126,6 @@ if(UNIX AND NOT APPLE)
harvest(xml2/include xml2/include "*.h")
harvest(xml2/lib xml2/lib "*.a")
harvest(wayland-protocols/share/wayland-protocols wayland-protocols/share/wayland-protocols/ "*.xml")
else()
harvest(blosc/lib openvdb/lib "*.a")
harvest(xml2/lib opencollada/lib "*.a")
@@ -193,8 +190,6 @@ harvest(potrace/include potrace/include "*.h")
harvest(potrace/lib potrace/lib "*.a")
harvest(haru/include haru/include "*.h")
harvest(haru/lib haru/lib "*.a")
harvest(zstd/include zstd/include "*.h")
harvest(zstd/lib zstd/lib "*.a")
if(UNIX AND NOT APPLE)
harvest(libglu/lib mesa/lib "*.so*")

View File

@@ -35,7 +35,6 @@ elseif(APPLE)
else()
set(ISPC_EXTRA_ARGS_APPLE
-DBISON_EXECUTABLE=/usr/local/opt/bison/bin/bison
-DFLEX_EXECUTABLE=/usr/local/opt/flex/bin/flex
-DARM_ENABLED=Off
)
endif()
@@ -44,7 +43,6 @@ elseif(UNIX)
-DCMAKE_C_COMPILER=${LIBDIR}/llvm/bin/clang
-DCMAKE_CXX_COMPILER=${LIBDIR}/llvm/bin/clang++
-DARM_ENABLED=Off
-DFLEX_EXECUTABLE=${LIBDIR}/flex/bin/flex
)
endif()
@@ -84,9 +82,4 @@ if(WIN32)
external_ispc
external_flexbison
)
elseif(UNIX AND NOT APPLE)
add_dependencies(
external_ispc
external_flex
)
endif()

View File

@@ -66,11 +66,7 @@ ExternalProject_Add(ll
if(MSVC)
if(BUILD_MODE STREQUAL Release)
set(LLVM_HARVEST_COMMAND
${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/llvm/lib ${HARVEST_TARGET}/llvm/lib &&
${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/llvm/include ${HARVEST_TARGET}/llvm/include &&
${CMAKE_COMMAND} -E copy ${LIBDIR}/llvm/bin/clang-format.exe ${HARVEST_TARGET}/llvm/bin/clang-format.exe
)
set(LLVM_HARVEST_COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/llvm/ ${HARVEST_TARGET}/llvm/ )
else()
set(LLVM_HARVEST_COMMAND
${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/llvm/lib/ ${HARVEST_TARGET}/llvm/debug/lib/ &&

View File

@@ -45,6 +45,7 @@ ExternalProject_Add(external_openimagedenoise
DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH ${OIDN_HASH_TYPE}=${OIDN_HASH}
PREFIX ${BUILD_DIR}/openimagedenoise
PATCH_COMMAND ${PATCH_CMD} -p 1 -N -d ${BUILD_DIR}/openimagedenoise/src/external_openimagedenoise < ${PATCH_DIR}/oidn.diff
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/openimagedenoise ${DEFAULT_CMAKE_FLAGS} ${OIDN_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/openimagedenoise
)

View File

@@ -16,20 +16,15 @@
#
# ***** END GPL LICENSE BLOCK *****
if(APPLE)
set(OPENMP_PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/openmp/src/external_openmp < ${PATCH_DIR}/openmp.diff)
else()
set(OPENMP_PATCH_COMMAND)
endif()
ExternalProject_Add(external_openmp
URL file://${PACKAGE_DIR}/${OPENMP_FILE}
DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH ${OPENMP_HASH_TYPE}=${OPENMP_HASH}
PREFIX ${BUILD_DIR}/openmp
PATCH_COMMAND ${OPENMP_PATCH_COMMAND}
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/openmp/src/external_openmp < ${PATCH_DIR}/openmp.diff
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/openmp ${DEFAULT_CMAKE_FLAGS}
INSTALL_COMMAND cd ${BUILD_DIR}/openmp/src/external_openmp-build && install_name_tool -id @rpath/libomp.dylib runtime/src/libomp.dylib && make install
INSTALL_COMMAND cd ${BUILD_DIR}/openmp/src/external_openmp-build && install_name_tool -id @executable_path/../Resources/lib/libomp.dylib runtime/src/libomp.dylib && make install
INSTALL_DIR ${LIBDIR}/openmp
)

View File

@@ -20,10 +20,12 @@ if(WIN32)
set(OSL_CMAKE_CXX_STANDARD_LIBRARIES "kernel32${LIBEXT} user32${LIBEXT} gdi32${LIBEXT} winspool${LIBEXT} shell32${LIBEXT} ole32${LIBEXT} oleaut32${LIBEXT} uuid${LIBEXT} comdlg32${LIBEXT} advapi32${LIBEXT} psapi${LIBEXT}")
set(OSL_FLEX_BISON -DFLEX_EXECUTABLE=${LIBDIR}/flexbison/win_flex.exe -DBISON_EXECUTABLE=${LIBDIR}/flexbison/win_bison.exe)
set(OSL_SIMD_FLAGS -DOIIO_NOSIMD=1 -DOIIO_SIMD=sse2)
SET(OSL_PLATFORM_FLAGS -DLINKSTATIC=ON)
else()
set(OSL_CMAKE_CXX_STANDARD_LIBRARIES)
set(OSL_FLEX_BISON)
set(OSL_OPENIMAGEIO_LIBRARY "${LIBDIR}/openimageio/lib/${LIBPREFIX}OpenImageIO${LIBEXT};${LIBDIR}/openimageio/lib/${LIBPREFIX}OpenImageIO_Util${LIBEXT};${LIBDIR}/png/lib/${LIBPREFIX}png16${LIBEXT};${LIBDIR}/jpg/lib/${LIBPREFIX}jpeg${LIBEXT};${LIBDIR}/tiff/lib/${LIBPREFIX}tiff${LIBEXT};${LIBDIR}/openexr/lib/${LIBPREFIX}IlmImf${OPENEXR_VERSION_POSTFIX}${LIBEXT}")
SET(OSL_PLATFORM_FLAGS)
endif()
set(OSL_ILMBASE_CUSTOM_LIBRARIES "${LIBDIR}/openexr/lib/Imath${OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/Half{OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/IlmThread${OPENEXR_VERSION_POSTFIX}.lib^^${LIBDIR}/openexr/lib/Iex${OPENEXR_VERSION_POSTFIX}.lib")
@@ -49,13 +51,12 @@ set(OSL_EXTRA_ARGS
-DOpenImageIO_ROOT=${LIBDIR}/openimageio/
-DOSL_BUILD_TESTS=OFF
-DOSL_BUILD_MATERIALX=OFF
-DPNG_ROOT=${LIBDIR}/png
-DZLIB_LIBRARY=${LIBDIR}/zlib/lib/${ZLIB_LIBRARY}
-DZLIB_INCLUDE_DIR=${LIBDIR}/zlib/include/
${OSL_FLEX_BISON}
-DCMAKE_CXX_STANDARD_LIBRARIES=${OSL_CMAKE_CXX_STANDARD_LIBRARIES}
-DBUILD_SHARED_LIBS=OFF
-DLINKSTATIC=ON
${OSL_PLATFORM_FLAGS}
-DOSL_BUILD_PLUGINS=OFF
-DSTOP_ON_WARNING=OFF
-DUSE_LLVM_BITCODE=OFF
@@ -68,9 +69,13 @@ set(OSL_EXTRA_ARGS
${OSL_SIMD_FLAGS}
-Dpugixml_ROOT=${LIBDIR}/pugixml
-DUSE_PYTHON=OFF
-DCMAKE_CXX_STANDARD=14
)
# Apple arm64 uses LLVM 11, LLVM 10+ requires C++14
if (APPLE AND "${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
list(APPEND OSL_EXTRA_ARGS -DCMAKE_CXX_STANDARD=14)
endif()
ExternalProject_Add(external_osl
URL file://${PACKAGE_DIR}/${OSL_FILE}
DOWNLOAD_DIR ${DOWNLOAD_DIR}
@@ -88,20 +93,10 @@ add_dependencies(
ll
external_openexr
external_zlib
external_flexbison
external_openimageio
external_pugixml
)
if(WIN32)
add_dependencies(
external_osl
external_flexbison
)
elseif(UNIX AND NOT APPLE)
add_dependencies(
external_osl
external_flex
)
endif()
if(WIN32)
if(BUILD_MODE STREQUAL Release)

View File

@@ -23,7 +23,7 @@ set(PNG_EXTRA_ARGS
)
if(BLENDER_PLATFORM_ARM)
set(PNG_EXTRA_ARGS ${PNG_EXTRA_ARGS} -DPNG_HARDWARE_OPTIMIZATIONS=ON -DPNG_ARM_NEON=on -DCMAKE_SYSTEM_PROCESSOR="aarch64")
set(PNG_EXTRA_ARGS ${PNG_EXTRA_ARGS} -DPNG_HARDWARE_OPTIMIZATIONS=ON -DPNG_ARM_NEON=ON -DCMAKE_SYSTEM_PROCESSOR="aarch64")
endif()
ExternalProject_Add(external_png

View File

@@ -22,7 +22,6 @@ if(WIN32)
-DTBB_BUILD_TBBMALLOC_PROXY=On
-DTBB_BUILD_STATIC=Off
-DTBB_BUILD_TESTS=Off
-DCMAKE_DEBUG_POSTFIX=_debug
)
set(TBB_LIBRARY tbb)
set(TBB_STATIC_LIBRARY Off)
@@ -56,17 +55,17 @@ if(WIN32)
ExternalProject_Add_Step(external_tbb after_install
# findtbb.cmake in some deps *NEEDS* to find tbb_debug.lib even if they are not going to use it
# to make that test pass, we place a copy with the right name in the lib folder.
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${LIBDIR}/tbb/lib/tbb_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb.dll ${LIBDIR}/tbb/bin/tbb_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc.dll ${LIBDIR}/tbb/bin/tbbmalloc_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${HARVEST_TARGET}/tbb/lib/tbb_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.dll ${HARVEST_TARGET}/tbb/lib/tbb_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.dll
# Normal collection of build artifacts
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${HARVEST_TARGET}/tbb/lib/tbb.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb.dll ${HARVEST_TARGET}/tbb/bin/tbb.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.dll ${HARVEST_TARGET}/tbb/lib/tbb.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_proxy.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy.dll
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/tbb/include/ ${HARVEST_TARGET}/tbb/include/
DEPENDEES install
)
@@ -77,12 +76,11 @@ if(WIN32)
# to make that test pass, we place a copy with the right name in the lib folder.
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${LIBDIR}/tbb/lib/tbb.lib
# Normal collection of build artifacts
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${HARVEST_TARGET}/tbb/lib/tbb_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb_debug.dll ${HARVEST_TARGET}/tbb/bin/tbb_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy_debug.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_debug.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_proxy_debug.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_proxy_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${HARVEST_TARGET}/tbb/lib/debug/tbb_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.dll ${HARVEST_TARGET}/tbb/lib/debug/tbb_debug.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy_debug.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/debug/tbbmalloc.dll
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/lib/debug/tbbmalloc_proxy.dll
DEPENDEES install
)
endif()

View File

@@ -152,28 +152,35 @@ set(OPENCOLORIO_HASH 1a2e3478b6cd9a1549f24e1b2205e3f0)
set(OPENCOLORIO_HASH_TYPE MD5)
set(OPENCOLORIO_FILE OpenColorIO-${OPENCOLORIO_VERSION}.tar.gz)
set(LLVM_VERSION 12.0.0)
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-project-${LLVM_VERSION}.src.tar.xz)
set(LLVM_HASH 5a4fab4d7fc84aefffb118ac2c8a4fc0)
set(LLVM_HASH_TYPE MD5)
set(LLVM_FILE llvm-project-${LLVM_VERSION}.src.tar.xz)
if(BLENDER_PLATFORM_ARM)
# Newer version required by ISPC with arm support.
set(LLVM_VERSION 11.0.1)
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-project-${LLVM_VERSION}.src.tar.xz)
set(LLVM_HASH e700af40ab83463e4e9ab0ba3708312e)
set(LLVM_HASH_TYPE MD5)
set(LLVM_FILE llvm-project-${LLVM_VERSION}.src.tar.xz)
if(APPLE)
# Cloth physics test is crashing due to this bug:
# https://bugs.llvm.org/show_bug.cgi?id=50579
set(OPENMP_VERSION 9.0.1)
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${OPENMP_VERSION}/openmp-${OPENMP_VERSION}.src.tar.xz)
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
set(OPENMP_HASH_TYPE MD5)
set(OPENMP_FILE openmp-${OPENMP_VERSION}.src.tar.xz)
else()
set(OPENMP_VERSION ${LLVM_VERSION})
set(OPENMP_HASH ac48ce3e4582ccb82f81ab59eb3fc9dc)
endif()
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${OPENMP_VERSION}/openmp-${OPENMP_VERSION}.src.tar.xz)
set(OPENMP_HASH_TYPE MD5)
set(OPENMP_FILE openmp-${OPENMP_VERSION}.src.tar.xz)
set(LLVM_VERSION 9.0.1)
set(LLVM_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-project-${LLVM_VERSION}.tar.xz)
set(LLVM_HASH b4268e733dfe352960140dc07ef2efcb)
set(LLVM_HASH_TYPE MD5)
set(LLVM_FILE llvm-project-${LLVM_VERSION}.tar.xz)
set(OPENIMAGEIO_VERSION 2.2.15.1)
set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/openmp-${LLVM_VERSION}.src.tar.xz)
set(OPENMP_HASH 6eade16057edbdecb3c4eef9daa2bfcf)
set(OPENMP_HASH_TYPE MD5)
set(OPENMP_FILE openmp-${LLVM_VERSION}.src.tar.xz)
endif()
set(OPENIMAGEIO_VERSION 2.1.15.0)
set(OPENIMAGEIO_URI https://github.com/OpenImageIO/oiio/archive/Release-${OPENIMAGEIO_VERSION}.tar.gz)
set(OPENIMAGEIO_HASH 3db5c5f0b3dc91597c75e5df09eb9072)
set(OPENIMAGEIO_HASH f03aa5e3ac4795af04771ee4146e9832)
set(OPENIMAGEIO_HASH_TYPE MD5)
set(OPENIMAGEIO_FILE OpenImageIO-${OPENIMAGEIO_VERSION}.tar.gz)
@@ -183,9 +190,9 @@ set(TIFF_HASH 2165e7aba557463acc0664e71a3ed424)
set(TIFF_HASH_TYPE MD5)
set(TIFF_FILE tiff-${TIFF_VERSION}.tar.gz)
set(OSL_VERSION 1.11.14.1)
set(OSL_VERSION 1.11.10.0)
set(OSL_URI https://github.com/imageworks/OpenShadingLanguage/archive/Release-${OSL_VERSION}.tar.gz)
set(OSL_HASH 1abd7ce40481771a9fa937f19595d2f2)
set(OSL_HASH dfdc23597aeef083832cbada62211756)
set(OSL_HASH_TYPE MD5)
set(OSL_FILE OpenShadingLanguage-${OSL_VERSION}.tar.gz)
@@ -209,9 +216,9 @@ set(OPENVDB_HASH 01b490be16cc0e15c690f9a153c21461)
set(OPENVDB_HASH_TYPE MD5)
set(OPENVDB_FILE openvdb-${OPENVDB_VERSION}.tar.gz)
set(NANOVDB_GIT_UID dc37d8a631922e7bef46712947dc19b755f3e841)
set(NANOVDB_GIT_UID e62f7a0bf1e27397223c61ddeaaf57edf111b77f)
set(NANOVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_GIT_UID}.tar.gz)
set(NANOVDB_HASH e7b9e863ec2f3b04ead171dec2322807)
set(NANOVDB_HASH 90919510bc6ccd630fedc56f748cb199)
set(NANOVDB_HASH_TYPE MD5)
set(NANOVDB_FILE nano-vdb-${NANOVDB_GIT_UID}.tar.gz)
@@ -363,18 +370,12 @@ set(PUGIXML_HASH 0c208b0664c7fb822bf1b49ad035e8fd)
set(PUGIXML_HASH_TYPE MD5)
set(PUGIXML_FILE pugixml-${PUGIXML_VERSION}.tar.gz)
set(FLEXBISON_VERSION 2.5.24)
set(FLEXBISON_VERSION 2.5.5)
set(FLEXBISON_URI http://prdownloads.sourceforge.net/winflexbison/win_flex_bison-${FLEXBISON_VERSION}.zip)
set(FLEXBISON_HASH 6b549d43e34ece0e8ed05af92daa31c4)
set(FLEXBISON_HASH d87a3938194520d904013abef3df10ce)
set(FLEXBISON_HASH_TYPE MD5)
set(FLEXBISON_FILE win_flex_bison-${FLEXBISON_VERSION}.zip)
set(FLEX_VERSION 2.6.4)
set(FLEX_URI https://github.com/westes/flex/releases/download/v${FLEX_VERSION}/flex-${FLEX_VERSION}.tar.gz)
set(FLEX_HASH 2882e3179748cc9f9c23ec593d6adc8d)
set(FLEX_HASH_TYPE MD5)
set(FLEX_FILE flex-${FLEX_VERSION}.tar.gz)
# Libraries to keep Python modules static on Linux.
# NOTE: bzip.org domain does no longer belong to BZip 2 project, so we download
@@ -431,9 +432,9 @@ set(USD_HASH 1dd1e2092d085ed393c1f7c450a4155a)
set(USD_HASH_TYPE MD5)
set(USD_FILE usd-v${USD_VERSION}.tar.gz)
set(OIDN_VERSION 1.4.1)
set(OIDN_VERSION 1.3.0)
set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz)
set(OIDN_HASH df4007b0ab93b1c41cdf223b075d01c0)
set(OIDN_HASH 301a5a0958d375a942014df0679b9270)
set(OIDN_HASH_TYPE MD5)
set(OIDN_FILE oidn-${OIDN_VERSION}.src.tar.gz)
@@ -443,10 +444,10 @@ set(LIBGLU_HASH 151aef599b8259efe9acd599c96ea2a3)
set(LIBGLU_HASH_TYPE MD5)
set(LIBGLU_FILE glu-${LIBGLU_VERSION}.tar.xz)
set(MESA_VERSION 21.1.5)
set(MESA_VERSION 20.3.4)
set(MESA_URI ftp://ftp.freedesktop.org/pub/mesa/mesa-${MESA_VERSION}.tar.xz)
set(MESA_HASH 022c7293074aeeced2278c872db4fa693147c70f8595b076cf3f1ef81520766d)
set(MESA_HASH_TYPE SHA256)
set(MESA_HASH 556338446aef8ae947a789b3e0b5e056)
set(MESA_HASH_TYPE MD5)
set(MESA_FILE mesa-${MESA_VERSION}.tar.xz)
set(NASM_VERSION 2.15.02)
@@ -455,23 +456,25 @@ set(NASM_HASH aded8b796c996a486a56e0515c83e414116decc3b184d88043480b32eb0a8589)
set(NASM_HASH_TYPE SHA256)
set(NASM_FILE nasm-${NASM_VERSION}.tar.gz)
set(XR_OPENXR_SDK_VERSION 1.0.17)
set(XR_OPENXR_SDK_VERSION 1.0.14)
set(XR_OPENXR_SDK_URI https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_SDK_VERSION}.tar.gz)
set(XR_OPENXR_SDK_HASH bf0fd8828837edff01047474e90013e1)
set(XR_OPENXR_SDK_HASH 0df6b2fd6045423451a77ff6bc3e1a75)
set(XR_OPENXR_SDK_HASH_TYPE MD5)
set(XR_OPENXR_SDK_FILE OpenXR-SDK-${XR_OPENXR_SDK_VERSION}.tar.gz)
set(WL_PROTOCOLS_VERSION 1.21)
set(WL_PROTOCOLS_FILE wayland-protocols-${WL_PROTOCOLS_VERSION}.tar.gz)
set(WL_PROTOCOLS_URI https://gitlab.freedesktop.org/wayland/wayland-protocols/-/archive/${WL_PROTOCOLS_VERSION}/${WL_PROTOCOLS_FILE})
set(WL_PROTOCOLS_HASH af5ca07e13517cdbab33504492cef54a)
set(WL_PROTOCOLS_HASH_TYPE MD5)
set(ISPC_VERSION v1.16.0)
set(ISPC_URI https://github.com/ispc/ispc/archive/${ISPC_VERSION}.tar.gz)
set(ISPC_HASH 2e3abedbc0ea9aaec17d6562c632454d)
set(ISPC_HASH_TYPE MD5)
set(ISPC_FILE ispc-${ISPC_VERSION}.tar.gz)
if(BLENDER_PLATFORM_ARM)
# Unreleased version with macOS arm support.
set(ISPC_URI https://github.com/ispc/ispc/archive/f5949c055eb9eeb93696978a3da4bfb3a6a30b35.zip)
set(ISPC_HASH d382fea18d01dbd0cd05d9e1ede36d7d)
set(ISPC_HASH_TYPE MD5)
set(ISPC_FILE f5949c055eb9eeb93696978a3da4bfb3a6a30b35.zip)
else()
set(ISPC_VERSION v1.14.1)
set(ISPC_URI https://github.com/ispc/ispc/archive/${ISPC_VERSION}.tar.gz)
set(ISPC_HASH 968fbc8dfd16a60ba4e32d2e0e03ea7a)
set(ISPC_HASH_TYPE MD5)
set(ISPC_FILE ispc-${ISPC_VERSION}.tar.gz)
endif()
set(GMP_VERSION 6.2.0)
set(GMP_URI https://gmplib.org/download/gmp/gmp-${GMP_VERSION}.tar.xz)
@@ -491,11 +494,5 @@ set(HARU_HASH 4f916aa49c3069b3a10850013c507460)
set(HARU_HASH_TYPE MD5)
set(HARU_FILE libharu-${HARU_VERSION}.tar.gz)
set(ZSTD_VERSION 1.5.0)
set(ZSTD_URI https://github.com/facebook/zstd/releases/download/v${ZSTD_VERSION}/zstd-${ZSTD_VERSION}.tar.gz)
set(ZSTD_HASH 5194fbfa781fcf45b98c5e849651aa7b3b0a008c6b72d4a0db760f3002291e94)
set(ZSTD_HASH_TYPE SHA256)
set(ZSTD_FILE zstd-${ZSTD_VERSION}.tar.gz)
set(SSE2NEON_GIT https://github.com/DLTcollab/sse2neon.git)
set(SSE2NEON_GIT_HASH fe5ff00bb8d19b327714a3c290f3e2ce81ba3525)

View File

@@ -1,27 +0,0 @@
# ***** BEGIN GPL LICENSE BLOCK *****
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENSE BLOCK *****
ExternalProject_Add(external_wayland_protocols
URL file://${PACKAGE_DIR}/${WL_PROTOCOLS_FILE}
DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH ${WL_PROTOCOLS_HASH_TYPE}=${WL_PROTOCOLS_HASH}
PREFIX ${BUILD_DIR}/wayland-protocols
CONFIGURE_COMMAND meson --prefix ${LIBDIR}/wayland-protocols . ../external_wayland_protocols -Dtests=false
BUILD_COMMAND ninja
INSTALL_COMMAND ninja install
)

View File

@@ -1,51 +0,0 @@
# ***** BEGIN GPL LICENSE BLOCK *****
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENSE BLOCK *****
set(ZSTD_EXTRA_ARGS
-DZSTD_BUILD_PROGRAMS=OFF
-DZSTD_BUILD_SHARED=OFF
-DZSTD_BUILD_STATIC=ON
-DZSTD_BUILD_TESTS=OFF
-DZSTD_LEGACY_SUPPORT=OFF
-DZSTD_LZ4_SUPPORT=OFF
-DZSTD_LZMA_SUPPORT=OFF
-DZSTD_MULTITHREAD_SUPPORT=ON
-DZSTD_PROGRAMS_LINK_SHARED=OFF
-DZSTD_USE_STATIC_RUNTIME=OFF
-DZSTD_ZLIB_SUPPORT=OFF
)
ExternalProject_Add(external_zstd
URL file://${PACKAGE_DIR}/${ZSTD_FILE}
DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH ${ZSTD_HASH_TYPE}=${ZSTD_HASH}
PREFIX ${BUILD_DIR}/zstd
SOURCE_SUBDIR build/cmake
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/zstd ${DEFAULT_CMAKE_FLAGS} ${ZSTD_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/zstd
)
if(WIN32)
if(BUILD_MODE STREQUAL Release)
ExternalProject_Add_Step(external_zstd after_install
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/zstd/lib/zstd_static${LIBEXT} ${HARVEST_TARGET}/zstd/lib/zstd_static${LIBEXT}
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/zstd/include/ ${HARVEST_TARGET}/zstd/include/
DEPENDEES install
)
endif()
endif()

View File

@@ -474,25 +474,25 @@ OPENEXR_FORCE_REBUILD=false
OPENEXR_SKIP=false
_with_built_openexr=false
OIIO_VERSION="2.2.15.1"
OIIO_VERSION_SHORT="2.2"
OIIO_VERSION="2.1.15.0"
OIIO_VERSION_SHORT="2.1"
OIIO_VERSION_MIN="2.1.12"
OIIO_VERSION_MAX="2.3.0"
OIIO_VERSION_MAX="2.2.10"
OIIO_FORCE_BUILD=false
OIIO_FORCE_REBUILD=false
OIIO_SKIP=false
LLVM_VERSION="12.0.0"
LLVM_VERSION_SHORT="12.0"
LLVM_VERSION_MIN="11.0"
LLVM_VERSION_MAX="13.0"
LLVM_VERSION="9.0.1"
LLVM_VERSION_SHORT="9.0"
LLVM_VERSION_MIN="6.0"
LLVM_VERSION_MAX="12.0"
LLVM_VERSION_FOUND=""
LLVM_FORCE_BUILD=false
LLVM_FORCE_REBUILD=false
LLVM_SKIP=false
# OSL needs to be compiled for now!
OSL_VERSION="1.11.14.1"
OSL_VERSION="1.11.10.0"
OSL_VERSION_SHORT="1.11"
OSL_VERSION_MIN="1.11"
OSL_VERSION_MAX="2.0"
@@ -553,26 +553,26 @@ EMBREE_FORCE_BUILD=false
EMBREE_FORCE_REBUILD=false
EMBREE_SKIP=false
OIDN_VERSION="1.4.1"
OIDN_VERSION_SHORT="1.4"
OIDN_VERSION_MIN="1.4.0"
OIDN_VERSION_MAX="1.5"
OIDN_VERSION="1.3.0"
OIDN_VERSION_SHORT="1.3"
OIDN_VERSION_MIN="1.3.0"
OIDN_VERSION_MAX="1.4"
OIDN_FORCE_BUILD=false
OIDN_FORCE_REBUILD=false
OIDN_SKIP=false
ISPC_VERSION="1.16.0"
ISPC_VERSION="1.14.1"
FFMPEG_VERSION="4.4"
FFMPEG_VERSION_SHORT="4.4"
FFMPEG_VERSION_MIN="3.0"
FFMPEG_VERSION_MIN="4.4"
FFMPEG_VERSION_MAX="5.0"
FFMPEG_FORCE_BUILD=false
FFMPEG_FORCE_REBUILD=false
FFMPEG_SKIP=false
_ffmpeg_list_sep=";"
XR_OPENXR_VERSION="1.0.17"
XR_OPENXR_VERSION="1.0.14"
XR_OPENXR_VERSION_SHORT="1.0"
XR_OPENXR_VERSION_MIN="1.0.8"
XR_OPENXR_VERSION_MAX="2.0"
@@ -1019,7 +1019,7 @@ PRINT ""
PYTHON_SOURCE=( "https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tgz" )
_boost_version_nodots=`echo "$BOOST_VERSION" | sed -r 's/\./_/g'`
BOOST_SOURCE=( "https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_$_boost_version_nodots.tar.bz2" )
BOOST_SOURCE=( "https://dl.bintray.com/boostorg/release/$BOOST_VERSION/source/boost_$_boost_version_nodots.tar.bz2" )
BOOST_BUILD_MODULES="--with-system --with-filesystem --with-thread --with-regex --with-locale --with-date_time --with-wave --with-iostreams --with-python --with-program_options --with-serialization --with-atomic"
TBB_SOURCE=( "https://github.com/oneapi-src/oneTBB/archive/$TBB_VERSION$TBB_VERSION_UPDATE.tar.gz" )
@@ -1073,7 +1073,7 @@ OPENVDB_SOURCE=( "https://github.com/AcademySoftwareFoundation/openvdb/archive/v
#~ OPENVDB_SOURCE_REPO_BRANCH="dev"
NANOVDB_USE_REPO=false
NANOVDB_SOURCE_REPO_UID="dc37d8a631922e7bef46712947dc19b755f3e841"
NANOVDB_SOURCE_REPO_UID="e62f7a0bf1e27397223c61ddeaaf57edf111b77f"
NANOVDB_SOURCE=( "https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_SOURCE_REPO_UID}.tar.gz" )
ALEMBIC_USE_REPO=false
@@ -1108,9 +1108,9 @@ FFMPEG_SOURCE=( "http://ffmpeg.org/releases/ffmpeg-$FFMPEG_VERSION.tar.bz2" )
XR_OPENXR_USE_REPO=false
XR_OPENXR_SOURCE=("https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_VERSION}.tar.gz")
XR_OPENXR_SOURCE_REPO=("https://github.com/KhronosGroup/OpenXR-SDK.git")
XR_OPENXR_REPO_UID="bf21ccb1007bb531b45d9978919a56ea5059c245"
XR_OPENXR_REPO_BRANCH="master"
#~ XR_OPENXR_SOURCE_REPO=("https://github.com/KhronosGroup/OpenXR-SDK.git")
#~ XR_OPENXR_REPO_UID="5900c51562769b03bea699dc0352cae56acb6419d"
#~ XR_OPENXR_REPO_BRANCH="master"
# C++11 is required now
CXXFLAGS_BACK=$CXXFLAGS
@@ -1128,8 +1128,7 @@ Those libraries should be available as packages in all recent distributions (opt
* Basics of dev environment (cmake, gcc, svn , git, ...).
* libjpeg, libpng, libtiff, [openjpeg2], [libopenal].
* libx11, libxcursor, libxi, libxrandr, libxinerama (and other libx... as needed).
* libwayland-client0, libwayland-cursor0, libwayland-egl1, libxkbcommon0, libdbus-1-3, libegl1 (Wayland)
* libsqlite3, libzstd, libbz2, libssl, libfftw3, libxml2, libtinyxml, yasm, libyaml-cpp, flex.
* libsqlite3, libbz2, libssl, libfftw3, libxml2, libtinyxml, yasm, libyaml-cpp.
* libsdl2, libglew, libpugixml, libpotrace, [libgmp], [libglewmx], fontconfig, [libharu/libhpdf].\""
DEPS_SPECIFIC_INFO="\"BUILDABLE DEPENDENCIES:
@@ -1447,7 +1446,9 @@ compile_Python() {
make -j$THREADS && make install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Python--$PYTHON_VERSION failed to compile, exiting"
exit 1
fi
@@ -1463,9 +1464,6 @@ compile_Python() {
INFO "If you want to force rebuild of this lib, use the --force-python option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "python-$PYTHON_VERSION_SHORT"
# Extra step: install required modules with pip.
@@ -1559,7 +1557,9 @@ compile_Boost() {
--prefix=$_inst --disable-icu boost.locale.icu=off install
./b2 --clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Boost-$BOOST_VERSION failed to compile, exiting"
exit 1
fi
@@ -1573,9 +1573,7 @@ compile_Boost() {
INFO "If you want to force rebuild of this lib, use the --force-boost option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
# Just always run it, much simpler this way!
run_ldconfig "boost"
}
@@ -1688,7 +1686,9 @@ compile_TBB() {
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "TBB-$TBB_VERSION$TBB_VERSION_UPDATE failed to compile, exiting"
exit 1
fi
@@ -1702,9 +1702,6 @@ compile_TBB() {
INFO "If you want to force rebuild of this lib, use the --force-tbb option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "tbb"
}
@@ -1824,7 +1821,9 @@ compile_OCIO() {
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenColorIO-$OCIO_VERSION failed to compile, exiting"
exit 1
fi
@@ -1838,9 +1837,6 @@ compile_OCIO() {
INFO "If you want to force rebuild of this lib, use the --force-ocio option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "ocio"
}
@@ -1956,7 +1952,9 @@ compile_OPENEXR() {
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenEXR-$OPENEXR_VERSION failed to compile, exiting"
exit 1
fi
@@ -1972,9 +1970,7 @@ compile_OPENEXR() {
_with_built_openexr=true
if [ -d $_inst ]; then
_create_inst_shortcut
fi
# Just always run it, much simpler this way!
run_ldconfig "openexr"
}
@@ -2115,7 +2111,9 @@ compile_OIIO() {
make -j$THREADS && make install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenImageIO-$OIIO_VERSION failed to compile, exiting"
exit 1
fi
@@ -2129,9 +2127,7 @@ compile_OIIO() {
INFO "If you want to force rebuild of this lib, use the --force-oiio option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
# Just always run it, much simpler this way!
run_ldconfig "oiio"
}
@@ -2240,7 +2236,9 @@ compile_LLVM() {
make -j$THREADS && make install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "LLVM-$LLVM_VERSION failed to compile, exiting"
exit 1
fi
@@ -2253,10 +2251,6 @@ compile_LLVM() {
INFO "Own LLVM-$LLVM_VERSION (CLANG included) is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-llvm option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
}
# ----------------------------------------------------------------------------
@@ -2321,7 +2315,6 @@ compile_OSL() {
tar -C $SRC --transform "s,(.*/?)OpenShadingLanguage-[^/]*(.*),\1OpenShadingLanguage-$OSL_VERSION\2,x" \
-xf $_src.tar.gz
fi
patch -d $_src -p1 < $SCRIPT_DIR/patches/osl.diff
fi
cd $_src
@@ -2344,6 +2337,7 @@ compile_OSL() {
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D BUILD_TESTING=OFF"
cmake_d="$cmake_d -D STOP_ON_WARNING=OFF"
cmake_d="$cmake_d -D OSL_BUILD_PLUGINS=OFF"
cmake_d="$cmake_d -D OSL_BUILD_TESTS=OFF"
@@ -2351,10 +2345,7 @@ compile_OSL() {
cmake_d="$cmake_d -D USE_PARTIO=OFF"
cmake_d="$cmake_d -D OSL_BUILD_MATERIALX=OFF"
cmake_d="$cmake_d -D USE_QT=OFF"
cmake_d="$cmake_d -D USE_Qt5=OFF"
cmake_d="$cmake_d -D USE_PYTHON=OFF"
cmake_d="$cmake_d -D USE_PARTIO=OFF"
cmake_d="$cmake_d -D INSTALL_DOCS=OFF"
if [ $(uname -m) != "aarch64" ]; then
cmake_d="$cmake_d -D USE_SIMD=sse2"
@@ -2376,7 +2367,10 @@ compile_OSL() {
fi
if [ -d $INST/oiio ]; then
cmake_d="$cmake_d -D OpenImageIO_ROOT=$INST/oiio"
cmake_d="$cmake_d -D OPENIMAGEIO_ROOT_DIR=$INST/oiio"
# HACK! SIC!!!!
# Quiet incredible, but if root dir is given, path to lib is found, but not path to include...
cmake_d="$cmake_d -D OPENIMAGEIO_INCLUDE_DIR=$INST/oiio/include"
fi
if [ ! -z $LLVM_VERSION_FOUND ]; then
@@ -2395,7 +2389,9 @@ compile_OSL() {
make -j$THREADS && make install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenShadingLanguage-$OSL_VERSION failed to compile, exiting"
exit 1
fi
@@ -2409,9 +2405,6 @@ compile_OSL() {
INFO "If you want to force rebuild of this lib, use the --force-osl option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "osl"
}
@@ -2512,7 +2505,9 @@ compile_OSD() {
make -j$THREADS && make install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenSubdiv-$OSD_VERSION failed to compile, exiting"
exit 1
fi
@@ -2526,9 +2521,6 @@ compile_OSD() {
INFO "If you want to force rebuild of this lib, use the --force-osd option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "osd"
}
@@ -2618,7 +2610,9 @@ compile_BLOSC() {
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Blosc-$OPENVDB_BLOSC_VERSION failed to compile, exiting"
exit 1
fi
@@ -2631,9 +2625,6 @@ compile_BLOSC() {
magic_compile_set blosc-$OPENVDB_BLOSC_VERSION $blosc_magic
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "blosc"
}
@@ -2724,7 +2715,9 @@ install_NanoVDB() {
#~ mkdir -p $_inst
#~ cp -r $_src/include $_inst/include
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "NanoVDB-v$OPENVDB_VERSION failed to install, exiting"
exit 1
fi
@@ -2736,10 +2729,6 @@ install_NanoVDB() {
else
INFO "Own NanoVDB-v$OPENVDB_VERSION is up to date, nothing to do!"
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
}
@@ -2748,7 +2737,7 @@ _init_openvdb() {
_git=false
_inst=$INST/openvdb-$OPENVDB_VERSION_SHORT
_inst_shortcut=$INST/openvdb
_openvdb_source=$OPENVDB_SOURCE
if [ "$WITH_NANOVDB" = true ]; then
_openvdb_source=$NANOVDB_SOURCE
@@ -2853,13 +2842,15 @@ compile_OPENVDB() {
if [ -d $INST/blosc ]; then
cmake_d="$cmake_d -D Blosc_ROOT=$INST/blosc"
fi
cmake $cmake_d ..
make -j$THREADS install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenVDB-$OPENVDB_VERSION failed to compile, exiting"
exit 1
fi
@@ -2873,9 +2864,6 @@ compile_OPENVDB() {
INFO "If you want to force rebuild of this lib, use the --force-openvdb option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "openvdb"
if [ "$WITH_NANOVDB" = true ]; then
@@ -2973,7 +2961,9 @@ compile_ALEMBIC() {
make -j$THREADS install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Alembic-$ALEMBIC_VERSION failed to compile, exiting"
exit 1
fi
@@ -2987,9 +2977,6 @@ compile_ALEMBIC() {
INFO "If you want to force rebuild of this lib, use the --force-alembic option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "alembic"
}
@@ -3074,7 +3061,9 @@ compile_USD() {
make -j$THREADS install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "USD-$USD_VERSION failed to compile, exiting"
exit 1
fi
@@ -3088,9 +3077,6 @@ compile_USD() {
INFO "If you want to force rebuild of this lib, use the --force-usd option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "usd"
}
@@ -3184,7 +3170,9 @@ compile_OpenCOLLADA() {
make -j$THREADS && make install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenCOLLADA-$OPENCOLLADA_VERSION failed to compile, exiting"
exit 1
fi
@@ -3197,10 +3185,6 @@ compile_OpenCOLLADA() {
INFO "Own OpenCOLLADA-$OPENCOLLADA_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-opencollada option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
}
# ----------------------------------------------------------------------------
@@ -3301,7 +3285,9 @@ compile_Embree() {
make -j$THREADS && make install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Embree-$EMBREE_VERSION failed to compile, exiting"
exit 1
fi
@@ -3314,10 +3300,6 @@ compile_Embree() {
INFO "Own Embree-$EMBREE_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-embree option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
}
# ----------------------------------------------------------------------------
@@ -3380,7 +3362,9 @@ install_ISPC() {
mkdir -p $_inst
cp -r $_src/bin $_inst/bin
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "ISPC-v$ISPC_VERSION failed to install, exiting"
exit 1
fi
@@ -3393,10 +3377,6 @@ install_ISPC() {
INFO "Own ISPC-v$ISPC_VERSION is up to date, nothing to do!"
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
_ispc_path_bin=$_inst/bin
run_ldconfig "ispc"
}
@@ -3496,7 +3476,9 @@ compile_OIDN() {
make -j$THREADS && make install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenImageDenoise-$OIDN_VERSION failed to compile, exiting"
exit 1
fi
@@ -3510,9 +3492,6 @@ compile_OIDN() {
INFO "If you want to force rebuild of this lib, use the --force-oidn option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "oidn"
}
@@ -3629,7 +3608,9 @@ compile_FFmpeg() {
make -j$THREADS && make install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "FFmpeg-$FFMPEG_VERSION failed to compile, exiting"
exit 1
fi
@@ -3642,10 +3623,6 @@ compile_FFmpeg() {
INFO "Own ffmpeg-$FFMPEG_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-ffmpeg option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
}
# ----------------------------------------------------------------------------
@@ -3744,7 +3721,9 @@ compile_XR_OpenXR_SDK() {
make -j$THREADS && make install
make clean
if [ ! -d $_inst ]; then
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "XR-OpenXR-SDK-$XR_OPENXR_VERSION failed to compile, exiting"
exit 1
fi
@@ -3758,9 +3737,6 @@ compile_XR_OpenXR_SDK() {
INFO "If you want to force rebuild of this lib, use the --force-xr-openxr option."
fi
if [ -d $_inst ]; then
_create_inst_shortcut
fi
run_ldconfig "xr-openxr-sdk"
}
@@ -3863,11 +3839,10 @@ install_DEB() {
_packages="gawk cmake cmake-curses-gui build-essential libjpeg-dev libpng-dev libtiff-dev \
git libfreetype6-dev libfontconfig-dev libx11-dev flex bison libxxf86vm-dev \
libxcursor-dev libxi-dev wget libsqlite3-dev libxrandr-dev libxinerama-dev \
libwayland-dev wayland-protocols libegl-dev libxkbcommon-dev libdbus-1-dev linux-libc-dev \
libbz2-dev libncurses5-dev libssl-dev liblzma-dev libreadline-dev \
libopenal-dev libglew-dev yasm $THEORA_DEV $VORBIS_DEV $OGG_DEV \
libsdl2-dev libfftw3-dev patch bzip2 libxml2-dev libtinyxml-dev libjemalloc-dev \
libgmp-dev libpugixml-dev libpotrace-dev libhpdf-dev libzstd-dev"
libgmp-dev libpugixml-dev libpotrace-dev libhpdf-dev"
# libglewmx-dev (broken in deb testing currently...)
VORBIS_USE=true
@@ -4135,8 +4110,6 @@ install_DEB() {
PRINT ""
# Debian OIIO includes again libopencv, without even properly dealing with this dependency...
OIIO_FORCE_BUILD=true
if [ "$OIIO_SKIP" = true ]; then
WARNING "Skipping OpenImageIO installation, as requested..."
elif [ "$OIIO_FORCE_BUILD" = true ]; then
@@ -4535,11 +4508,10 @@ install_RPM() {
_packages="gcc gcc-c++ git make cmake tar bzip2 xz findutils flex bison fontconfig-devel \
libtiff-devel libjpeg-devel libpng-devel sqlite-devel fftw-devel SDL2-devel \
libX11-devel libXi-devel libXcursor-devel libXrandr-devel libXinerama-devel \
wayland-devel wayland-protocols-devel mesa-libEGL-devel libxkbcommon-devel dbus-devel kernel-headers \
wget ncurses-devel readline-devel $OPENJPEG_DEV openal-soft-devel \
glew-devel yasm $THEORA_DEV $VORBIS_DEV $OGG_DEV patch \
libxml2-devel yaml-cpp-devel tinyxml-devel jemalloc-devel \
gmp-devel pugixml-devel potrace-devel libharu-devel libzstd-devel"
gmp-devel pugixml-devel potrace-devel libharu-devel"
OPENJPEG_USE=true
VORBIS_USE=true
@@ -5117,11 +5089,10 @@ install_ARCH() {
BASE_DEVEL=`pacman -Sgq base-devel | sed -e 's/^gcc$/gcc-multilib/g' | paste -s -d' '`
fi
_packages="$BASE_DEVEL git cmake fontconfig flex \
_packages="$BASE_DEVEL git cmake fontconfig \
libxi libxcursor libxrandr libxinerama glew libpng libtiff wget openal \
$OPENJPEG_DEV $VORBIS_DEV $OGG_DEV $THEORA_DEV yasm sdl2 fftw \
libxml2 yaml-cpp tinyxml python-requests jemalloc gmp potrace pugixml libharu \
zstd"
libxml2 yaml-cpp tinyxml python-requests jemalloc gmp potrace pugixml libharu"
OPENJPEG_USE=true
VORBIS_USE=true

View File

@@ -68,32 +68,3 @@
+
return ret;
}
--- a/libavcodec/rl.c
+++ b/libavcodec/rl.c
@@ -71,7 +71,7 @@ av_cold void ff_rl_init(RLTable *rl,
av_cold void ff_rl_init_vlc(RLTable *rl, unsigned static_size)
{
int i, q;
- VLC_TYPE table[1500][2] = {{0}};
+ VLC_TYPE (*table)[2] = av_calloc(sizeof(VLC_TYPE), 1500 * 2);
VLC vlc = { .table = table, .table_allocated = static_size };
av_assert0(static_size <= FF_ARRAY_ELEMS(table));
init_vlc(&vlc, 9, rl->n + 1, &rl->table_vlc[0][1], 4, 2, &rl->table_vlc[0][0], 4, 2, INIT_VLC_USE_NEW_STATIC);
@@ -80,8 +80,10 @@ av_cold void ff_rl_init_vlc(RLTable *rl, unsigned static_size)
int qmul = q * 2;
int qadd = (q - 1) | 1;
- if (!rl->rl_vlc[q])
+ if (!rl->rl_vlc[q]){
+ av_free(table);
return;
+ }
if (q == 0) {
qmul = 1;
@@ -113,4 +115,5 @@ av_cold void ff_rl_init_vlc(RLTable *rl, unsigned static_size)
rl->rl_vlc[q][i].run = run;
}
}
+ av_free(table);
}

View File

@@ -0,0 +1,40 @@
diff -Naur oidn-1.3.0/cmake/FindTBB.cmake external_openimagedenoise/cmake/FindTBB.cmake
--- oidn-1.3.0/cmake/FindTBB.cmake 2021-02-04 16:20:26 -0700
+++ external_openimagedenoise/cmake/FindTBB.cmake 2021-02-12 09:35:53 -0700
@@ -332,20 +332,22 @@
${TBB_ROOT}/lib/${TBB_ARCH}/${TBB_VCVER}
${TBB_ROOT}/lib
)
-
# On Windows, also search the DLL so that the client may install it.
file(GLOB DLL_NAMES
${TBB_ROOT}/bin/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
${TBB_ROOT}/bin/${LIB_NAME}.dll
+ ${TBB_ROOT}/lib/${LIB_NAME}.dll
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB1}.dll
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB2}.dll
${TBB_ROOT}/../redist/${TBB_ARCH}/tbb/${TBB_VCVER}/${LIB_NAME}.dll
${TBB_ROOT}/../redist/${TBB_ARCH}_win/tbb/${TBB_VCVER}/${LIB_NAME}.dll
)
- list(GET DLL_NAMES 0 DLL_NAME)
- get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
- set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
+ if (DLL_NAMES)
+ list(GET DLL_NAMES 0 DLL_NAME)
+ get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
+ set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
+ endif()
elseif(APPLE)
set(LIB_PATHS ${TBB_ROOT}/lib)
else()
--- external_openimagedenoise/cmake/oidn_ispc.cmake 2021-02-15 17:29:34.000000000 +0100
+++ external_openimagedenoise/cmake/oidn_ispc.cmake2 2021-02-15 17:29:28.000000000 +0100
@@ -98,7 +98,7 @@
elseif(OIDN_ARCH STREQUAL "ARM64")
set(ISPC_ARCHITECTURE "aarch64")
if(APPLE)
- set(ISPC_TARGET_OS "--target-os=ios")
+ set(ISPC_TARGET_OS "--target-os=macos")
endif()
endif()

View File

@@ -34,3 +34,24 @@ diff -Naur orig/src/include/OpenImageIO/platform.h external_openimageio/src/incl
# include <windows.h>
#endif
diff -Naur orig/src/libutil/ustring.cpp external_openimageio/src/libutil/ustring.cpp
--- orig/src/libutil/ustring.cpp 2020-05-11 05:43:52.000000000 +0200
+++ external_openimageio/src/libutil/ustring.cpp 2020-11-26 12:06:08.000000000 +0100
@@ -337,6 +337,8 @@
// the std::string to make it point to our chars! In such a case, the
// destructor will be careful not to allow a deallocation.
+ // Disable internal std::string for Apple silicon based Macs
+#if !(defined(__APPLE__) && defined(__arm64__))
#if defined(__GNUC__) && !defined(_LIBCPP_VERSION) \
&& defined(_GLIBCXX_USE_CXX11_ABI) && _GLIBCXX_USE_CXX11_ABI
// NEW gcc ABI
@@ -382,7 +384,7 @@
return;
}
#endif
-
+#endif
// Remaining cases - just assign the internal string. This may result
// in double allocation for the chars. If you care about that, do
// something special for your platform, much like we did for gcc and

View File

@@ -1,3 +1,18 @@
diff -Naur OpenShadingLanguage-Release-1.9.9/src/cmake/flexbison.cmake.rej external_osl/src/cmake/flexbison.cmake.rej
--- OpenShadingLanguage-Release-1.9.9/src/cmake/flexbison.cmake.rej 1969-12-31 17:00:00 -0700
+++ external_osl/src/cmake/flexbison.cmake.rej 2018-08-24 17:42:11 -0600
@@ -0,0 +1,11 @@
+--- src/cmake/flexbison.cmake 2018-05-01 16:39:02 -0600
++++ src/cmake/flexbison.cmake 2018-08-24 10:24:03 -0600
+@@ -77,7 +77,7 @@
+ DEPENDS ${${compiler_headers}}
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} )
+ ADD_CUSTOM_COMMAND ( OUTPUT ${flexoutputcxx}
+- COMMAND ${FLEX_EXECUTABLE} -o ${flexoutputcxx} "${CMAKE_CURRENT_SOURCE_DIR}/${flexsrc}"
++ COMMAND ${FLEX_EXECUTABLE} ${FLEX_EXTRA_OPTIONS} -o ${flexoutputcxx} "${CMAKE_CURRENT_SOURCE_DIR}/${flexsrc}"
+ MAIN_DEPENDENCY ${flexsrc}
+ DEPENDS ${${compiler_headers}}
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} )
diff -Naur OpenShadingLanguage-Release-1.9.9/src/include/OSL/llvm_util.h external_osl/src/include/OSL/llvm_util.h
--- OpenShadingLanguage-Release-1.9.9/src/include/OSL/llvm_util.h 2018-05-01 16:39:02 -0600
+++ external_osl/src/include/OSL/llvm_util.h 2018-08-25 14:05:00 -0600
@@ -48,50 +63,19 @@ diff -Naur org/CMakeLists.txt external_osl/CMakeLists.txt
set (OSL_NO_DEFAULT_TEXTURESYSTEM OFF CACHE BOOL "Do not use create a raw OIIO::TextureSystem")
if (OSL_NO_DEFAULT_TEXTURESYSTEM)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 990f50d69..46ef7351d 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -252,11 +252,9 @@ install (EXPORT OSL_EXPORTED_TARGETS
FILE ${OSL_TARGETS_EXPORT_NAME}
NAMESPACE ${PROJECT_NAME}::)
-
-
-
-osl_add_all_tests()
-
+if (${PROJECT_NAME}_BUILD_TESTS AND NOT ${PROJECT_NAME}_IS_SUBPROJECT)
+ osl_add_all_tests()
+endif ()
if (NOT ${PROJECT_NAME}_IS_SUBPROJECT)
include (packaging)
diff -Naur external_osl_orig/src/cmake/externalpackages.cmake external_osl/src/cmake/externalpackages.cmake
--- external_osl_orig/src/cmake/externalpackages.cmake 2021-06-01 13:44:18 -0600
+++ external_osl/src/cmake/externalpackages.cmake 2021-06-28 07:44:32 -0600
@@ -80,6 +80,7 @@
checked_find_package (ZLIB REQUIRED) # Needed by several packages
+checked_find_package (PNG REQUIRED) # Needed since OIIO needs it
# IlmBase & OpenEXR
checked_find_package (OpenEXR REQUIRED
diff -Naur external_osl_orig/src/liboslcomp/oslcomp.cpp external_osl/src/liboslcomp/oslcomp.cpp
--- external_osl_orig/src/liboslcomp/oslcomp.cpp 2021-06-01 13:44:18 -0600
+++ external_osl/src/liboslcomp/oslcomp.cpp 2021-06-28 09:11:06 -0600
@@ -21,6 +21,13 @@
#if !defined(__STDC_CONSTANT_MACROS)
# define __STDC_CONSTANT_MACROS 1
diff --git a/src/liboslexec/llvm_util.cpp b/src/liboslexec/llvm_util.cpp
index 445f6400..3d468de2 100644
--- a/src/liboslexec/llvm_util.cpp
+++ b/src/liboslexec/llvm_util.cpp
@@ -3430,8 +3430,9 @@ LLVM_Util::call_function (llvm::Value *func, cspan<llvm::Value *> args)
#endif
//llvm_gen_debug_printf (std::string("start ") + std::string(name));
#if OSL_LLVM_VERSION >= 110
- OSL_DASSERT(llvm::isa<llvm::Function>(func));
- llvm::Value *r = builder().CreateCall(llvm::cast<llvm::Function>(func), llvm::ArrayRef<llvm::Value *>(args.data(), args.size()));
+ llvm::Value* r = builder().CreateCall(
+ llvm::cast<llvm::FunctionType>(func->getType()->getPointerElementType()), func,
+ llvm::ArrayRef<llvm::Value*>(args.data(), args.size()));
#else
llvm::Value *r = builder().CreateCall (func, llvm::ArrayRef<llvm::Value *>(args.data(), args.size()));
#endif
+
+// clang uses CALLBACK in its templates which causes issues if it is already defined
+#ifdef _WIN32 && defined(CALLBACK)
+# undef CALLBACK
+#endif
+
+//
#include <clang/Basic/TargetInfo.h>
#include <clang/Frontend/CompilerInstance.h>
#include <clang/Frontend/TextDiagnosticPrinter.h>

View File

@@ -20,24 +20,8 @@ if(NOT CLANG_ROOT_DIR AND NOT $ENV{CLANG_ROOT_DIR} STREQUAL "")
set(CLANG_ROOT_DIR $ENV{CLANG_ROOT_DIR})
endif()
if(NOT LLVM_ROOT_DIR)
if(DEFINED LLVM_VERSION)
message(running llvm-config-${LLVM_VERSION})
find_program(LLVM_CONFIG llvm-config-${LLVM_VERSION})
endif()
if(NOT LLVM_CONFIG)
find_program(LLVM_CONFIG llvm-config)
endif()
execute_process(COMMAND ${LLVM_CONFIG} --prefix
OUTPUT_VARIABLE LLVM_ROOT_DIR
OUTPUT_STRIP_TRAILING_WHITESPACE)
set(LLVM_ROOT_DIR ${LLVM_ROOT_DIR} CACHE PATH "Path to the LLVM installation")
endif()
set(_CLANG_SEARCH_DIRS
${CLANG_ROOT_DIR}
${LLVM_ROOT_DIR}
/opt/lib/clang
)

View File

@@ -472,7 +472,8 @@ if(NOT GFLAGS_FOUND)
gflags_report_not_found(
"Could not find gflags include directory, set GFLAGS_INCLUDE_DIR "
"to directory containing gflags/gflags.h")
endif()
endif(NOT GFLAGS_INCLUDE_DIR OR
NOT EXISTS ${GFLAGS_INCLUDE_DIR})
find_library(GFLAGS_LIBRARY NAMES gflags
PATHS ${GFLAGS_LIBRARY_DIR_HINTS}
@@ -483,7 +484,8 @@ if(NOT GFLAGS_FOUND)
gflags_report_not_found(
"Could not find gflags library, set GFLAGS_LIBRARY "
"to full path to libgflags.")
endif()
endif(NOT GFLAGS_LIBRARY OR
NOT EXISTS ${GFLAGS_LIBRARY})
# gflags typically requires a threading library (which is OS dependent), note
# that this defines the CMAKE_THREAD_LIBS_INIT variable. If we are able to
@@ -558,7 +560,8 @@ if(NOT GFLAGS_FOUND)
gflags_report_not_found(
"Caller defined GFLAGS_INCLUDE_DIR:"
" ${GFLAGS_INCLUDE_DIR} does not contain gflags/gflags.h header.")
endif()
endif(GFLAGS_INCLUDE_DIR AND
NOT EXISTS ${GFLAGS_INCLUDE_DIR}/gflags/gflags.h)
# TODO: This regex for gflags library is pretty primitive, we use lowercase
# for comparison to handle Windows using CamelCase library names, could
# this check be better?
@@ -568,7 +571,8 @@ if(NOT GFLAGS_FOUND)
gflags_report_not_found(
"Caller defined GFLAGS_LIBRARY: "
"${GFLAGS_LIBRARY} does not match gflags.")
endif()
endif(GFLAGS_LIBRARY AND
NOT "${LOWERCASE_GFLAGS_LIBRARY}" MATCHES ".*gflags[^/]*")
gflags_reset_find_library_prefix()

View File

@@ -40,7 +40,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(NanoVDB DEFAULT_MSG
IF(NANOVDB_FOUND)
SET(NANOVDB_INCLUDE_DIRS ${NANOVDB_INCLUDE_DIR})
ENDIF()
ENDIF(NANOVDB_FOUND)
MARK_AS_ADVANCED(
NANOVDB_INCLUDE_DIR

View File

@@ -46,7 +46,7 @@ SET(_opencollada_FIND_COMPONENTS
)
# Fedora openCOLLADA package links these statically
# note that order is important here or it won't link
# note that order is important here ot it wont link
SET(_opencollada_FIND_STATIC_COMPONENTS
buffer
ftoa

View File

@@ -44,7 +44,7 @@ SET(PYTHON_LINKFLAGS "-Xlinker -export-dynamic" CACHE STRING "Linker flags for p
MARK_AS_ADVANCED(PYTHON_LINKFLAGS)
# if the user passes these defines as args, we don't want to overwrite
# if the user passes these defines as args, we dont want to overwrite
SET(_IS_INC_DEF OFF)
SET(_IS_INC_CONF_DEF OFF)
SET(_IS_LIB_DEF OFF)
@@ -143,7 +143,7 @@ IF((NOT _IS_INC_DEF) OR (NOT _IS_INC_CONF_DEF) OR (NOT _IS_LIB_DEF) OR (NOT _IS_
SET(_PYTHON_ABI_FLAGS "${_CURRENT_ABI_FLAGS}")
break()
ELSE()
# ensure we don't find values from 2 different ABI versions
# ensure we dont find values from 2 different ABI versions
IF(NOT _IS_INC_DEF)
UNSET(PYTHON_INCLUDE_DIR CACHE)
ENDIF()

View File

@@ -1,66 +0,0 @@
# - Find Zstd library
# Find the native Zstd includes and library
# This module defines
# ZSTD_INCLUDE_DIRS, where to find zstd.h, Set when
# ZSTD_INCLUDE_DIR is found.
# ZSTD_LIBRARIES, libraries to link against to use Zstd.
# ZSTD_ROOT_DIR, The base directory to search for Zstd.
# This can also be an environment variable.
# ZSTD_FOUND, If false, do not try to use Zstd.
#
# also defined, but not for general use are
# ZSTD_LIBRARY, where to find the Zstd library.
#=============================================================================
# Copyright 2019 Blender Foundation.
#
# Distributed under the OSI-approved BSD License (the "License");
# see accompanying file Copyright.txt for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
# If ZSTD_ROOT_DIR was defined in the environment, use it.
IF(NOT ZSTD_ROOT_DIR AND NOT $ENV{ZSTD_ROOT_DIR} STREQUAL "")
SET(ZSTD_ROOT_DIR $ENV{ZSTD_ROOT_DIR})
ENDIF()
SET(_zstd_SEARCH_DIRS
${ZSTD_ROOT_DIR}
)
FIND_PATH(ZSTD_INCLUDE_DIR
NAMES
zstd.h
HINTS
${_zstd_SEARCH_DIRS}
PATH_SUFFIXES
include
)
FIND_LIBRARY(ZSTD_LIBRARY
NAMES
zstd
HINTS
${_zstd_SEARCH_DIRS}
PATH_SUFFIXES
lib64 lib
)
# handle the QUIETLY and REQUIRED arguments and set ZSTD_FOUND to TRUE if
# all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(Zstd DEFAULT_MSG
ZSTD_LIBRARY ZSTD_INCLUDE_DIR)
IF(ZSTD_FOUND)
SET(ZSTD_LIBRARIES ${ZSTD_LIBRARY})
SET(ZSTD_INCLUDE_DIRS ${ZSTD_INCLUDE_DIR})
ENDIF()
MARK_AS_ADVANCED(
ZSTD_INCLUDE_DIR
ZSTD_LIBRARY
)

View File

@@ -40,7 +40,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(sse2neon DEFAULT_MSG
IF(SSE2NEON_FOUND)
SET(SSE2NEON_INCLUDE_DIRS ${SSE2NEON_INCLUDE_DIR})
ENDIF()
ENDIF(SSE2NEON_FOUND)
MARK_AS_ADVANCED(
SSE2NEON_INCLUDE_DIR

View File

@@ -305,7 +305,7 @@ def file_check_arg_sizes(tu):
for i, node_child in enumerate(children):
children = list(node_child.get_children())
# skip if we don't have an index...
# skip if we dont have an index...
size_def = args_size_definition.get(i, -1)
if size_def == -1:
@@ -354,7 +354,7 @@ def file_check_arg_sizes(tu):
filepath # always the same but useful when running threaded
))
# we don't really care what we are looking at, just scan entire file for
# we dont really care what we are looking at, just scan entire file for
# function calls.
def recursive_func_call_check(node):

View File

@@ -8,9 +8,6 @@ IGNORE_SOURCE = (
# specific source files
"extern/audaspace/",
# Use for `WIN32` only.
"source/creator/blender_launcher_win32.c",
# specific source files
"extern/bullet2/src/BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp",

View File

@@ -82,7 +82,7 @@ def create_nb_project_main():
make_exe = cmake_cache_var("CMAKE_MAKE_PROGRAM")
make_exe_basename = os.path.basename(make_exe)
# --------------- NetBeans specific.
# --------------- NB specific
defines = [("%s=%s" % cdef) if cdef[1] else cdef[0] for cdef in defines]
defines += [cdef.replace("#define", "").strip() for cdef in cmake_compiler_defines()]

View File

@@ -7,6 +7,7 @@
set(WITH_ASSERT_ABORT ON CACHE BOOL "" FORCE)
set(WITH_BUILDINFO OFF CACHE BOOL "" FORCE)
set(WITH_COMPILER_ASAN ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_DEBUG ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_NATIVE_ONLY ON CACHE BOOL "" FORCE)
set(WITH_DOC_MANPAGE OFF CACHE BOOL "" FORCE)
set(WITH_GTESTS ON CACHE BOOL "" FORCE)

View File

@@ -29,7 +29,6 @@ set(WITH_IMAGE_OPENEXR ON CACHE BOOL "" FORCE)
set(WITH_IMAGE_OPENJPEG ON CACHE BOOL "" FORCE)
set(WITH_IMAGE_TIFF ON CACHE BOOL "" FORCE)
set(WITH_INPUT_NDOF ON CACHE BOOL "" FORCE)
set(WITH_INPUT_IME ON CACHE BOOL "" FORCE)
set(WITH_INTERNATIONAL ON CACHE BOOL "" FORCE)
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)

View File

@@ -30,7 +30,6 @@ set(WITH_IMAGE_OPENEXR ON CACHE BOOL "" FORCE)
set(WITH_IMAGE_OPENJPEG ON CACHE BOOL "" FORCE)
set(WITH_IMAGE_TIFF ON CACHE BOOL "" FORCE)
set(WITH_INPUT_NDOF ON CACHE BOOL "" FORCE)
set(WITH_INPUT_IME ON CACHE BOOL "" FORCE)
set(WITH_INTERNATIONAL ON CACHE BOOL "" FORCE)
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)

View File

@@ -208,7 +208,7 @@ function(blender_source_group
)
# if enabled, use the sources directories as filters.
if(IDE_GROUP_SOURCES_IN_FOLDERS)
if(WINDOWS_USE_VISUAL_STUDIO_SOURCE_FOLDERS)
foreach(_SRC ${sources})
# remove ../'s
get_filename_component(_SRC_DIR ${_SRC} REALPATH)
@@ -240,8 +240,8 @@ function(blender_source_group
endforeach()
endif()
# if enabled, set the FOLDER property for the projects
if(IDE_GROUP_PROJECTS_IN_FOLDERS)
# if enabled, set the FOLDER property for visual studio projects
if(WINDOWS_USE_VISUAL_STUDIO_PROJECT_FOLDERS)
get_filename_component(FolderDir ${CMAKE_CURRENT_SOURCE_DIR} DIRECTORY)
string(REPLACE ${CMAKE_SOURCE_DIR} "" FolderDir ${FolderDir})
set_target_properties(${name} PROPERTIES FOLDER ${FolderDir})
@@ -596,6 +596,14 @@ function(SETUP_LIBDIRS)
link_directories(${GMP_LIBPATH})
endif()
if(WITH_GHOST_WAYLAND)
link_directories(
${wayland-client_LIBRARY_DIRS}
${wayland-egl_LIBRARY_DIRS}
${xkbcommon_LIBRARY_DIRS}
${wayland-cursor_LIBRARY_DIRS})
endif()
if(WIN32 AND NOT UNIX)
link_directories(${PTHREADS_LIBPATH})
endif()
@@ -694,7 +702,7 @@ macro(message_first_run)
endmacro()
# when we have warnings as errors applied globally this
# needs to be removed for some external libs which we don't maintain.
# needs to be removed for some external libs which we dont maintain.
# utility macro
macro(remove_cc_flag
@@ -794,7 +802,7 @@ macro(remove_extra_strict_flags)
endmacro()
# note, we can only append flags on a single file so we need to negate the options.
# at the moment we can't shut up ffmpeg deprecations, so use this, but will
# at the moment we cant shut up ffmpeg deprecations, so use this, but will
# probably add more removals here.
macro(remove_strict_c_flags_file
filenames)
@@ -963,6 +971,14 @@ macro(blender_project_hack_post)
unset(_reset_standard_cflags_rel)
unset(_reset_standard_cxxflags_rel)
# ------------------------------------------------------------------
# workaround for omission in cmake 2.8.4's GNU.cmake, fixed in 2.8.5
if(CMAKE_COMPILER_IS_GNUCC)
if(NOT DARWIN)
set(CMAKE_INCLUDE_SYSTEM_FLAG_C "-isystem ")
endif()
endif()
endmacro()
# pair of macros to allow libraries to be specify files to install, but to

View File

@@ -388,10 +388,6 @@ endif()
if(WITH_TBB)
find_package(TBB)
if(NOT TBB_FOUND)
message(WARNING "TBB not found, disabling WITH_TBB")
set(WITH_TBB OFF)
endif()
endif()
if(WITH_POTRACE)
@@ -404,16 +400,32 @@ endif()
# CMake FindOpenMP doesn't know about AppleClang before 3.12, so provide custom flags.
if(WITH_OPENMP)
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
if(CMAKE_C_COMPILER_ID MATCHES "Clang" AND CMAKE_C_COMPILER_VERSION VERSION_GREATER_EQUAL "7.0")
# Use OpenMP from our precompiled libraries.
message(STATUS "Using ${LIBDIR}/openmp for OpenMP")
set(OPENMP_CUSTOM ON)
set(OPENMP_FOUND ON)
set(OpenMP_C_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'")
set(OpenMP_CXX_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'")
set(OpenMP_LIBRARY_DIR "${LIBDIR}/openmp/lib/")
set(OpenMP_LINKER_FLAGS "-L'${OpenMP_LIBRARY_DIR}' -lomp")
set(OpenMP_LIBRARY "${OpenMP_LIBRARY_DIR}/libomp.dylib")
set(OpenMP_LINKER_FLAGS "-L'${LIBDIR}/openmp/lib' -lomp")
# Copy libomp.dylib to allow executables like datatoc and tests to work.
# `@executable_path/../Resources/lib/` `LC_ID_DYLIB` is added by the deps builder.
# For single config generator datatoc, tests etc.
execute_process(
COMMAND mkdir -p ${CMAKE_BINARY_DIR}/Resources/lib
COMMAND cp -p ${LIBDIR}/openmp/lib/libomp.dylib ${CMAKE_BINARY_DIR}/Resources/lib/libomp.dylib
)
# For multi-config generator datatoc, etc.
execute_process(
COMMAND mkdir -p ${CMAKE_BINARY_DIR}/bin/Resources/lib
COMMAND cp -p ${LIBDIR}/openmp/lib/libomp.dylib ${CMAKE_BINARY_DIR}/bin/Resources/lib/libomp.dylib
)
# For multi-config generator tests.
execute_process(
COMMAND mkdir -p ${CMAKE_BINARY_DIR}/bin/tests/Resources/lib
COMMAND cp -p ${LIBDIR}/openmp/lib/libomp.dylib ${CMAKE_BINARY_DIR}/bin/tests/Resources/lib/libomp.dylib
)
endif()
endif()
@@ -441,9 +453,6 @@ if(WITH_HARU)
endif()
endif()
set(ZSTD_ROOT_DIR ${LIBDIR}/zstd)
find_package(Zstd REQUIRED)
if(EXISTS ${LIBDIR})
without_system_libs_end()
endif()
@@ -467,8 +476,10 @@ else()
set(CMAKE_CXX_FLAGS_RELEASE "-O2 -mdynamic-no-pic")
endif()
# Clang has too low template depth of 128 for libmv.
string(APPEND CMAKE_CXX_FLAGS " -ftemplate-depth=1024")
if(${XCODE_VERSION} VERSION_EQUAL 5 OR ${XCODE_VERSION} VERSION_GREATER 5)
# Xcode 5 is always using CLANG, which has too low template depth of 128 for libmv
string(APPEND CMAKE_CXX_FLAGS " -ftemplate-depth=1024")
endif()
# Avoid conflicts with Luxrender, and other plug-ins that may use the same
# libraries as Blender with a different version or build options.
@@ -498,15 +509,3 @@ if(WITH_COMPILER_CCACHE)
endif()
endif()
endif()
# For binaries that are built but not installed (also not distributed) (datatoc,
# makesdna, tests, etc.), we add an rpath to the OpenMP library dir through
# CMAKE_BUILD_RPATH. This avoids having to make many copies of the dylib next to each binary.
#
# For the installed Python module and installed Blender executable, CMAKE_INSTALL_RPATH
# is modified to find the dylib in an adjacent folder. Install step puts the libraries there.
set(CMAKE_SKIP_BUILD_RPATH FALSE)
list(APPEND CMAKE_BUILD_RPATH "${OpenMP_LIBRARY_DIR}")
set(CMAKE_SKIP_INSTALL_RPATH FALSE)
list(APPEND CMAKE_INSTALL_RPATH "@loader_path/../Resources/${BLENDER_VERSION}/lib")

View File

@@ -99,7 +99,6 @@ endif()
find_package_wrapper(JPEG REQUIRED)
find_package_wrapper(PNG REQUIRED)
find_package_wrapper(ZLIB REQUIRED)
find_package_wrapper(Zstd REQUIRED)
find_package_wrapper(Freetype REQUIRED)
if(WITH_PYTHON)
@@ -458,10 +457,6 @@ endif()
if(WITH_TBB)
find_package_wrapper(TBB)
if(NOT TBB_FOUND)
message(WARNING "TBB not found, disabling WITH_TBB")
set(WITH_TBB OFF)
endif()
endif()
if(WITH_XR_OPENXR)
@@ -580,17 +575,17 @@ if(WITH_GHOST_WAYLAND)
pkg_check_modules(wayland-scanner REQUIRED wayland-scanner)
pkg_check_modules(xkbcommon REQUIRED xkbcommon)
pkg_check_modules(wayland-cursor REQUIRED wayland-cursor)
pkg_check_modules(dbus REQUIRED dbus-1)
set(WITH_GL_EGL ON)
list(APPEND PLATFORM_LINKLIBS
${wayland-client_LINK_LIBRARIES}
${wayland-egl_LINK_LIBRARIES}
${xkbcommon_LINK_LIBRARIES}
${wayland-cursor_LINK_LIBRARIES}
${dbus_LINK_LIBRARIES}
)
if(WITH_GHOST_WAYLAND)
list(APPEND PLATFORM_LINKLIBS
${wayland-client_LIBRARIES}
${wayland-egl_LIBRARIES}
${xkbcommon_LIBRARIES}
${wayland-cursor_LIBRARIES}
)
endif()
endif()
if(WITH_GHOST_X11)

View File

@@ -57,6 +57,8 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
endif()
endif()
set_property(GLOBAL PROPERTY USE_FOLDERS ${WINDOWS_USE_VISUAL_STUDIO_PROJECT_FOLDERS})
if(NOT WITH_PYTHON_MODULE)
set_property(DIRECTORY PROPERTY VS_STARTUP_PROJECT blender)
endif()
@@ -151,8 +153,8 @@ if(MSVC_CLANG) # Clangs version of cl doesn't support all flags
string(APPEND CMAKE_CXX_FLAGS " ${CXX_WARN_FLAGS} /nologo /J /Gd /EHsc -Wno-unused-command-line-argument -Wno-microsoft-enum-forward-reference ")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /nologo /J /Gd -Wno-unused-command-line-argument -Wno-microsoft-enum-forward-reference")
else()
string(APPEND CMAKE_CXX_FLAGS " /nologo /J /Gd /MP /EHsc /bigobj /Zc:inline")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /nologo /J /Gd /MP /bigobj /Zc:inline")
string(APPEND CMAKE_CXX_FLAGS " /nologo /J /Gd /MP /EHsc /bigobj")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /nologo /J /Gd /MP /bigobj")
endif()
# X64 ASAN is available and usable on MSVC 16.9 preview 4 and up)
@@ -215,8 +217,8 @@ else()
endif()
if(WITH_WINDOWS_PDB)
set(PDB_INFO_OVERRIDE_FLAGS "${SYMBOL_FORMAT_RELEASE}")
set(PDB_INFO_OVERRIDE_LINKER_FLAGS "/DEBUG /OPT:REF /OPT:ICF /INCREMENTAL:NO")
set(PDB_INFO_OVERRIDE_FLAGS "${SYMBOL_FORMAT_RELEASE}")
set(PDB_INFO_OVERRIDE_LINKER_FLAGS "/DEBUG /OPT:REF /OPT:ICF /INCREMENTAL:NO")
endif()
string(APPEND CMAKE_CXX_FLAGS_DEBUG " /MDd ${SYMBOL_FORMAT}")
@@ -259,10 +261,8 @@ if(NOT DEFINED LIBDIR)
else()
message(FATAL_ERROR "32 bit compiler detected, blender no longer provides pre-build libraries for 32 bit windows, please set the LIBDIR cmake variable to your own library folder")
endif()
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 19.30.30423)
message(STATUS "Visual Studio 2022 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
elseif(MSVC_VERSION GREATER 1919)
# Can be 1910..1912
if(MSVC_VERSION GREATER 1919)
message(STATUS "Visual Studio 2019 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
elseif(MSVC_VERSION GREATER 1909)
@@ -548,6 +548,7 @@ if(WITH_OPENIMAGEIO)
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0")
set(OPENCOLORIO_DEFINITIONS "-DDOpenColorIO_SKIP_IMPORTS")
set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe")
add_definitions(-DOIIO_STATIC_DEFINE)
add_definitions(-DOIIO_NO_SSE=1)
@@ -593,7 +594,7 @@ if(WITH_OPENCOLORIO)
debug ${OPENCOLORIO_LIBPATH}/libexpatdMD.lib
debug ${OPENCOLORIO_LIBPATH}/pystring_d.lib
)
set(OPENCOLORIO_DEFINITIONS "-DOpenColorIO_SKIP_IMPORTS")
set(OPENCOLORIO_DEFINITIONS)
endif()
if(WITH_OPENVDB)
@@ -674,11 +675,10 @@ if(WITH_SYSTEM_AUDASPACE)
endif()
if(WITH_TBB)
set(TBB_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbb.lib debug ${LIBDIR}/tbb/lib/tbb_debug.lib)
set(TBB_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbb.lib debug ${LIBDIR}/tbb/lib/debug/tbb_debug.lib)
set(TBB_INCLUDE_DIR ${LIBDIR}/tbb/include)
set(TBB_INCLUDE_DIRS ${TBB_INCLUDE_DIR})
if(WITH_TBB_MALLOC_PROXY)
set(TBB_MALLOC_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbbmalloc.lib debug ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib)
add_definitions(-DWITH_TBB_MALLOC)
endif()
endif()
@@ -873,6 +873,3 @@ if(WITH_HARU)
set(WITH_HARU OFF)
endif()
endif()
set(ZSTD_INCLUDE_DIRS ${LIBDIR}/zstd/include)
set(ZSTD_LIBRARIES ${LIBDIR}/zstd/lib/zstd_static.lib)

View File

@@ -15,15 +15,6 @@ if(WITH_WINDOWS_BUNDLE_CRT)
include(InstallRequiredSystemLibraries)
# ucrtbase(d).dll cannot be in the manifest, due to the way windows 10 handles
# redirects for this dll, for details see T88813.
foreach(lib ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS})
string(FIND ${lib} "ucrtbase" pos)
if(NOT pos EQUAL -1)
list(REMOVE_ITEM CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS ${lib})
install(FILES ${lib} DESTINATION . COMPONENT Libraries)
endif()
endforeach()
# Install the CRT to the blender.crt Sub folder.
install(FILES ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS} DESTINATION ./blender.crt COMPONENT Libraries)

View File

@@ -1,10 +1,8 @@
Pipeline Config
===============
The `yaml` configuration file is used by buildbot build pipeline `update-code` step.
This configuration file is used by buildbot new pipeline for the `update-code` step.
The file allows to set branches or specific commits for both git submodules and svn artifacts. Can also define various build package versions for use by build workers. Especially useful in experimental and release branches.
It will soon be used by the ../utils/make_update.py script.
NOTE:
* The configuration file is ```NOT``` used by the `../utils/make_update.py` script.
* That will implemented in the future.
Both buildbot and developers will eventually use the same configuration file.

View File

@@ -0,0 +1,87 @@
{
"update-code":
{
"git" :
{
"submodules":
[
{ "path": "release/scripts/addons", "branch": "master", "commit_id": "HEAD" },
{ "path": "release/scripts/addons_contrib", "branch": "master", "commit_id": "HEAD" },
{ "path": "release/datafiles/locale", "branch": "master", "commit_id": "HEAD" },
{ "path": "source/tools", "branch": "master", "commit_id": "HEAD" }
]
},
"svn":
{
"tests": { "path": "lib/tests", "branch": "trunk", "commit_id": "HEAD" },
"libraries":
{
"darwin-x86_64": { "path": "lib/darwin", "branch": "trunk", "commit_id": "HEAD" },
"darwin-arm64": { "path": "lib/darwin_arm64", "branch": "trunk", "commit_id": "HEAD" },
"linux-x86_64": { "path": "lib/linux_centos7_x86_64", "branch": "trunk", "commit_id": "HEAD" },
"windows-amd64": { "path": "lib/win64_vc15", "branch": "trunk", "commit_id": "HEAD" }
}
}
},
"buildbot":
{
"gcc":
{
"version": "9.0"
},
"sdks":
{
"optix":
{
"version": "7.1.0"
},
"cuda10":
{
"version": "10.1"
},
"cuda11":
{
"version": "11.3"
}
},
"cmake":
{
"default":
{
"version": "any",
"overrides":
{
}
},
"darwin-x86_64":
{
"overrides":
{
}
},
"darwin-arm64":
{
"overrides":
{
}
},
"linux-x86_64":
{
"overrides":
{
}
},
"windows-amd64":
{
"overrides":
{
}
}
}
}
}

View File

@@ -1,70 +0,0 @@
#
# Used by Buildbot build pipeline make_update.py script only for now
# We intended to update the make_update.py in the branches to use this file eventually
#
update-code:
git:
submodules:
- branch: master
commit_id: HEAD
path: release/scripts/addons
- branch: master
commit_id: HEAD
path: release/scripts/addons_contrib
- branch: master
commit_id: HEAD
path: release/datafiles/locale
- branch: master
commit_id: HEAD
path: source/tools
svn:
libraries:
darwin-arm64:
branch: trunk
commit_id: HEAD
path: lib/darwin_arm64
darwin-x86_64:
branch: trunk
commit_id: HEAD
path: lib/darwin
linux-x86_64:
branch: trunk
commit_id: HEAD
path: lib/linux_centos7_x86_64
windows-amd64:
branch: trunk
commit_id: HEAD
path: lib/win64_vc15
tests:
branch: trunk
commit_id: HEAD
path: lib/tests
benchmarks:
branch: trunk
commit_id: HEAD
path: lib/benchmarks
#
# Buildbot only configs
#
buildbot:
gcc:
version: '9.0.0'
cuda10:
version: '10.1.243'
cuda11:
version: '11.4.1'
optix:
version: '7.1.0'
cmake:
default:
version: any
overrides: {}
darwin-arm64:
overrides: {}
darwin-x86_64:
overrides: {}
linux-x86_64:
overrides: {}
windows-amd64:
overrides: {}

View File

@@ -31,7 +31,6 @@ def parse_arguments():
parser.add_argument("--no-submodules", action="store_true")
parser.add_argument("--use-tests", action="store_true")
parser.add_argument("--svn-command", default="svn")
parser.add_argument("--svn-branch", default=None)
parser.add_argument("--git-command", default="git")
parser.add_argument("--use-centos-libraries", action="store_true")
return parser.parse_args()
@@ -47,7 +46,7 @@ def svn_update(args, release_version):
svn_non_interactive = [args.svn_command, '--non-interactive']
lib_dirpath = os.path.join(get_blender_git_root(), '..', 'lib')
svn_url = make_utils.svn_libraries_base_url(release_version, args.svn_branch)
svn_url = make_utils.svn_libraries_base_url(release_version)
# Checkout precompiled libraries
if sys.platform == 'darwin':
@@ -171,28 +170,26 @@ def submodules_update(args, release_version, branch):
sys.stderr.write("git not found, can't update code\n")
sys.exit(1)
# Update submodules to appropriate given branch,
# falling back to master if none is given and/or found in a sub-repository.
branch_fallback = "master"
if not branch:
branch = branch_fallback
# Update submodules to latest master or appropriate release branch.
if not release_version:
branch = "master"
submodules = [
("release/scripts/addons", branch, branch_fallback),
("release/scripts/addons_contrib", branch, branch_fallback),
("release/datafiles/locale", branch, branch_fallback),
("source/tools", branch, branch_fallback),
("release/scripts/addons", branch),
("release/scripts/addons_contrib", branch),
("release/datafiles/locale", branch),
("source/tools", branch),
]
# Initialize submodules only if needed.
for submodule_path, submodule_branch, submodule_branch_fallback in submodules:
for submodule_path, submodule_branch in submodules:
if not os.path.exists(os.path.join(submodule_path, ".git")):
call([args.git_command, "submodule", "update", "--init", "--recursive"])
break
# Checkout appropriate branch and pull changes.
skip_msg = ""
for submodule_path, submodule_branch, submodule_branch_fallback in submodules:
for submodule_path, submodule_branch in submodules:
cwd = os.getcwd()
try:
os.chdir(submodule_path)
@@ -204,11 +201,6 @@ def submodules_update(args, release_version, branch):
call([args.git_command, "fetch", "origin"])
call([args.git_command, "checkout", submodule_branch])
call([args.git_command, "pull", "--rebase", "origin", submodule_branch])
# If we cannot find the specified branch for this submodule, fallback to default one (aka master).
if make_utils.git_branch(args.git_command) != submodule_branch:
call([args.git_command, "fetch", "origin"])
call([args.git_command, "checkout", submodule_branch_fallback])
call([args.git_command, "pull", "--rebase", "origin", submodule_branch_fallback])
finally:
os.chdir(cwd)
@@ -222,10 +214,6 @@ if __name__ == "__main__":
# Test if we are building a specific release version.
branch = make_utils.git_branch(args.git_command)
if branch == 'HEAD':
sys.stderr.write('Blender git repository is in detached HEAD state, must be in a branch\n')
sys.exit(1)
tag = make_utils.git_tag(args.git_command)
release_version = make_utils.git_branch_release_version(branch, tag)

View File

@@ -70,11 +70,9 @@ def git_branch_release_version(branch, tag):
return release_version
def svn_libraries_base_url(release_version, branch=None):
def svn_libraries_base_url(release_version):
if release_version:
svn_branch = "tags/blender-" + release_version + "-release"
elif branch:
svn_branch = "branches/" + branch
else:
svn_branch = "trunk"
return "https://svn.blender.org/svnroot/bf-blender/" + svn_branch + "/lib/"

View File

@@ -1,12 +1,9 @@
echo No explicit msvc version requested, autodetecting version.
call "%~dp0\detect_msvc2019.cmd"
if %ERRORLEVEL% EQU 0 goto DetectionComplete
call "%~dp0\detect_msvc2017.cmd"
if %ERRORLEVEL% EQU 0 goto DetectionComplete
call "%~dp0\detect_msvc2022.cmd"
call "%~dp0\detect_msvc2019.cmd"
if %ERRORLEVEL% EQU 0 goto DetectionComplete
echo Compiler Detection failed. Use verbose switch for more information.

View File

@@ -1,6 +1,5 @@
if "%BUILD_VS_YEAR%"=="2017" set BUILD_VS_LIBDIRPOST=vc15
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"

View File

@@ -19,10 +19,10 @@ if "%WITH_PYDEBUG%"=="1" (
set PYDEBUG_CMAKE_ARGS=-DWINDOWS_PYTHON_DEBUG=On
)
if "%BUILD_VS_YEAR%"=="2017" (
set BUILD_GENERATOR_POST=%WINDOWS_ARCH%
) else (
if "%BUILD_VS_YEAR%"=="2019" (
set BUILD_PLATFORM_SELECT=-A %MSBUILD_PLATFORM%
) else (
set BUILD_GENERATOR_POST=%WINDOWS_ARCH%
)
set BUILD_CMAKE_ARGS=%BUILD_CMAKE_ARGS% -G "Visual Studio %BUILD_VS_VER% %BUILD_VS_YEAR%%BUILD_GENERATOR_POST%" %BUILD_PLATFORM_SELECT% %TESTS_CMAKE_ARGS% %CLANG_CMAKE_ARGS% %ASAN_CMAKE_ARGS% %PYDEBUG_CMAKE_ARGS%

View File

@@ -1,3 +0,0 @@
set BUILD_VS_VER=17
set BUILD_VS_YEAR=2022
call "%~dp0\detect_msvc_vswhere.cmd"

View File

@@ -1,34 +0,0 @@
set SOURCEDIR=%BLENDER_DIR%/doc/python_api/sphinx-in
set BUILDDIR=%BLENDER_DIR%/doc/python_api/sphinx-out
if "%BF_LANG%" == "" set BF_LANG=en
set SPHINXOPTS=-j auto -D language=%BF_LANG%
call "%~dp0\find_sphinx.cmd"
if EXIST "%SPHINX_BIN%" (
goto detect_sphinx_done
)
echo unable to locate sphinx-build, run "set sphinx_BIN=full_path_to_sphinx-build.exe"
exit /b 1
:detect_sphinx_done
call "%~dp0\find_blender.cmd"
if EXIST "%BLENDER_BIN%" (
goto detect_blender_done
)
echo unable to locate blender, run "set BLENDER_BIN=full_path_to_blender.exe"
exit /b 1
:detect_blender_done
%BLENDER_BIN% ^
--background -noaudio --factory-startup ^
--python %BLENDER_DIR%/doc/python_api/sphinx_doc_gen.py
"%SPHINX_BIN%" -b html %SPHINXOPTS% %O% %SOURCEDIR% %BUILDDIR%
:EOF

View File

@@ -1,28 +0,0 @@
REM First see if there is an environment variable set
if EXIST "%BLENDER_BIN%" (
goto detect_blender_done
)
REM Check the build folder next, if ninja was used there will be no
REM debug/release folder
set BLENDER_BIN=%BUILD_DIR%\bin\blender.exe
if EXIST "%BLENDER_BIN%" (
goto detect_blender_done
)
REM Check the release folder next
set BLENDER_BIN=%BUILD_DIR%\bin\release\blender.exe
if EXIST "%BLENDER_BIN%" (
goto detect_blender_done
)
REM Check the debug folder next
set BLENDER_BIN=%BUILD_DIR%\bin\debug\blender.exe
if EXIST "%BLENDER_BIN%" (
goto detect_blender_done
)
REM at this point, we don't know where blender is, clear the variable
set BLENDER_BIN=
:detect_blender_done

View File

@@ -1,21 +0,0 @@
REM First see if there is an environment variable set
if EXIST "%INKSCAPE_BIN%" (
goto detect_inkscape_done
)
REM Then see if inkscape is available in the path
for %%X in (inkscape.exe) do (set INKSCAPE_BIN=%%~$PATH:X)
if EXIST "%INKSCAPE_BIN%" (
goto detect_inkscape_done
)
REM Finally see if it is perhaps installed at the default location
set INKSCAPE_BIN=%ProgramFiles%\Inkscape\bin\inkscape.exe
if EXIST "%INKSCAPE_BIN%" (
goto detect_inkscape_done
)
REM If still not found clear the variable
set INKSCAPE_BIN=
:detect_inkscape_done

View File

@@ -1,23 +0,0 @@
REM First see if there is an environment variable set
if EXIST "%SPHINX_BIN%" (
goto detect_sphinx_done
)
REM Then see if inkscape is available in the path
for %%X in (sphinx-build.exe) do (set SPHINX_BIN=%%~$PATH:X)
if EXIST "%SPHINX_BIN%" (
goto detect_sphinx_done
)
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINX_BIN environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
REM If still not found clear the variable
set SPHINX_BIN=
:detect_sphinx_done

View File

@@ -1,42 +0,0 @@
if EXIST "%PYTHON%" (
goto detect_python_done
)
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\39\bin\python.exe
if EXIST %PYTHON% (
goto detect_python_done
)
echo python not found at %PYTHON%
exit /b 1
:detect_python_done
echo found python (%PYTHON%)
call "%~dp0\find_inkscape.cmd"
if EXIST "%INKSCAPE_BIN%" (
goto detect_inkscape_done
)
echo unable to locate inkscape, run "set inkscape_BIN=full_path_to_inkscape.exe"
exit /b 1
:detect_inkscape_done
call "%~dp0\find_blender.cmd"
if EXIST "%BLENDER_BIN%" (
goto detect_blender_done
)
echo unable to locate blender, run "set BLENDER_BIN=full_path_to_blender.exe"
exit /b 1
:detect_blender_done
%PYTHON% -B %BLENDER_DIR%\release\datafiles\blender_icons_update.py
%PYTHON% -B %BLENDER_DIR%\release\datafiles\prvicons_update.py
%PYTHON% -B %BLENDER_DIR%\release\datafiles\alert_icons_update.py
:EOF

View File

@@ -1,29 +0,0 @@
if EXIST %PYTHON% (
goto detect_python_done
)
set PYTHON=%BLENDER_DIR%\..\lib\win64_vc15\python\39\bin\python.exe
if EXIST %PYTHON% (
goto detect_python_done
)
echo python not found at %PYTHON%
exit /b 1
:detect_python_done
echo found python (%PYTHON%)
call "%~dp0\find_blender.cmd"
if EXIST "%BLENDER_BIN%" (
goto detect_blender_done
)
echo unable to locate blender, run "set BLENDER_BIN=full_path_to_blender.exe"
exit /b 1
:detect_blender_done
%PYTHON% -B %BLENDER_DIR%\release\datafiles\blender_icons_geom_update.py
:EOF

View File

@@ -66,14 +66,6 @@ if NOT "%1" == "" (
) else if "%1" == "2019b" (
set BUILD_VS_YEAR=2019
set VSWHERE_ARGS=-products Microsoft.VisualStudio.Product.BuildTools
) else if "%1" == "2022" (
set BUILD_VS_YEAR=2022
) else if "%1" == "2022pre" (
set BUILD_VS_YEAR=2022
set VSWHERE_ARGS=-prerelease
) else if "%1" == "2022b" (
set BUILD_VS_YEAR=2022
set VSWHERE_ARGS=-products Microsoft.VisualStudio.Product.BuildTools
) else if "%1" == "packagename" (
set BUILD_CMAKE_ARGS=%BUILD_CMAKE_ARGS% -DCPACK_OVERRIDE_PACKAGENAME="%2"
shift /1
@@ -107,15 +99,6 @@ if NOT "%1" == "" (
set FORMAT=1
set FORMAT_ARGS=%2 %3 %4 %5 %6 %7 %8 %9
goto EOF
) else if "%1" == "icons" (
set ICONS=1
goto EOF
) else if "%1" == "icons_geom" (
set ICONS_GEOM=1
goto EOF
) else if "%1" == "doc_py" (
set DOC_PY=1
goto EOF
) else (
echo Command "%1" unknown, aborting!
goto ERR

View File

@@ -31,6 +31,3 @@ set PYDEBUG_CMAKE_ARGS=
set FORMAT=
set TEST=
set BUILD_WITH_SCCACHE=
set ICONS=
set ICONS_GEOM=
set DOC_PY=

View File

@@ -31,10 +31,6 @@ echo - 2019 ^(build with visual studio 2019^)
echo - 2019pre ^(build with visual studio 2019 pre-release^)
echo - 2019b ^(build with visual studio 2019 Build Tools^)
echo.
echo Documentation Targets ^(Not associated with building^)
echo - doc_py ^(Generate sphinx python api docs^)
echo.
echo Experimental options
echo - with_opengl_tests ^(enable both the render and draw opengl test suites^)

View File

@@ -123,7 +123,7 @@ def Align(handle):
class BlendFile:
'''
Reads a blendfile and store the header, all the fileblocks, and catalogue
structs found in the DNA fileblock
structs foound in the DNA fileblock
- BlendFile.Header (BlendFileHeader instance)
- BlendFile.Blocks (list of BlendFileBlock instances)

View File

@@ -1,4 +1,4 @@
# Doxyfile 1.9.1
# Doxyfile 1.8.16
# This file describes the settings to be used by the documentation system
# doxygen (www.doxygen.org) for a project.
@@ -38,7 +38,7 @@ PROJECT_NAME = Blender
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = V3.0
PROJECT_NUMBER = "V3.0"
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
@@ -227,14 +227,6 @@ QT_AUTOBRIEF = NO
MULTILINE_CPP_IS_BRIEF = NO
# By default Python docstrings are displayed as preformatted text and doxygen's
# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the
# doxygen's special commands can be used and the contents of the docstring
# documentation blocks is shown as doxygen documentation.
# The default value is: YES.
PYTHON_DOCSTRING = YES
# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
# documentation from any documented member that it re-implements.
# The default value is: YES.
@@ -271,6 +263,12 @@ TAB_SIZE = 4
ALIASES =
# This tag can be used to specify a number of word-keyword mappings (TCL only).
# A mapping has the form "name=value". For example adding "class=itcl::class"
# will allow you to use the command class in the itcl::class meaning.
TCL_SUBST =
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
# only. Doxygen will then generate output that is more tailored for C. For
# instance, some of the names that are used will be different. The list of all
@@ -311,22 +309,19 @@ OPTIMIZE_OUTPUT_SLICE = NO
# parses. With this tag you can assign which parser to use for a given
# extension. Doxygen has a built-in mapping, but you can override or extend it
# using this tag. The format is ext=language, where ext is a file extension, and
# language is one of the parsers supported by doxygen: IDL, Java, JavaScript,
# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL,
# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice,
# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
# tries to guess whether the code is fixed or free formatted code, this is the
# default for Fortran type files). For instance to make doxygen treat .inc files
# as Fortran files (default is PHP), and .f files as C (default is Fortran),
# use: inc=Fortran f=C.
# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat
# .inc files as Fortran files (default is PHP), and .f files as C (default is
# Fortran), use: inc=Fortran f=C.
#
# Note: For files without extension you can use no_extension as a placeholder.
#
# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
# the files are not read by doxygen. When specifying no_extension you should add
# * to the FILE_PATTERNS.
#
# Note see also the list of default file extension mappings.
# the files are not read by doxygen.
EXTENSION_MAPPING =
@@ -460,19 +455,6 @@ TYPEDEF_HIDES_STRUCT = NO
LOOKUP_CACHE_SIZE = 3
# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use
# during processing. When set to 0 doxygen will based this on the number of
# cores available in the system. You can set it explicitly to a value larger
# than 0 to get more control over the balance between CPU load and processing
# speed. At this moment only the input processing can be done using multiple
# threads. Since this is still an experimental feature the default is set to 1,
# which efficively disables parallel processing. Please report any issues you
# encounter. Generating dot graphs in parallel is controlled by the
# DOT_NUM_THREADS setting.
# Minimum value: 0, maximum value: 32, default value: 1.
NUM_PROC_THREADS = 1
#---------------------------------------------------------------------------
# Build related configuration options
#---------------------------------------------------------------------------
@@ -536,13 +518,6 @@ EXTRACT_LOCAL_METHODS = NO
EXTRACT_ANON_NSPACES = NO
# If this flag is set to YES, the name of an unnamed parameter in a declaration
# will be determined by the corresponding definition. By default unnamed
# parameters remain unnamed in the output.
# The default value is: YES.
RESOLVE_UNNAMED_PARAMS = YES
# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
# undocumented members inside documented classes or files. If set to NO these
# members will be included in the various overviews, but no documentation
@@ -560,8 +535,8 @@ HIDE_UNDOC_MEMBERS = NO
HIDE_UNDOC_CLASSES = NO
# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
# declarations. If set to NO, these declarations will be included in the
# documentation.
# (class|struct|union) declarations. If set to NO, these declarations will be
# included in the documentation.
# The default value is: NO.
HIDE_FRIEND_COMPOUNDS = NO
@@ -580,18 +555,11 @@ HIDE_IN_BODY_DOCS = NO
INTERNAL_DOCS = YES
# With the correct setting of option CASE_SENSE_NAMES doxygen will better be
# able to match the capabilities of the underlying filesystem. In case the
# filesystem is case sensitive (i.e. it supports files in the same directory
# whose names only differ in casing), the option must be set to YES to properly
# deal with such files in case they appear in the input. For filesystems that
# are not case sensitive the option should be be set to NO to properly deal with
# output files written for symbols that only differ in casing, such as for two
# classes, one named CLASS and the other named Class, and to also support
# references to files without having to specify the exact matching casing. On
# Windows (including Cygwin) and MacOS, users should typically set this option
# to NO, whereas on Linux or other Unix flavors it should typically be set to
# YES.
# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
# names in lower-case letters. If set to YES, upper-case letters are also
# allowed. This is useful if you have classes or files whose names only differ
# in case and if your file system supports case sensitive file names. Windows
# (including Cygwin) ands Mac users are advised to set this option to NO.
# The default value is: system dependent.
CASE_SENSE_NAMES = YES
@@ -830,10 +798,7 @@ WARN_IF_DOC_ERROR = YES
WARN_NO_PARAMDOC = NO
# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS
# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but
# at the end of the doxygen process doxygen will return with a non-zero status.
# Possible values are: NO, YES and FAIL_ON_WARNINGS.
# a warning is encountered.
# The default value is: NO.
WARN_AS_ERROR = NO
@@ -875,8 +840,8 @@ INPUT = doxygen.main.h \
# This tag can be used to specify the character encoding of the source files
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
# documentation (see:
# https://www.gnu.org/software/libiconv/) for the list of possible encodings.
# documentation (see: https://www.gnu.org/software/libiconv/) for the list of
# possible encodings.
# The default value is: UTF-8.
INPUT_ENCODING = UTF-8
@@ -889,15 +854,11 @@ INPUT_ENCODING = UTF-8
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
# read by doxygen.
#
# Note the list of default checked file patterns might differ from the list of
# default file extension mappings.
#
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment),
# *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, *.vhdl,
# *.ucf, *.qsf and *.ice.
# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice.
FILE_PATTERNS =
@@ -1125,6 +1086,13 @@ VERBATIM_HEADERS = YES
ALPHABETICAL_INDEX = YES
# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
# which the alphabetical index list will be split.
# Minimum value: 1, maximum value: 20, default value: 5.
# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
COLS_IN_ALPHA_INDEX = 5
# In case all classes in a project start with a common prefix, all classes will
# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
# can be used to specify a prefix (or a list of prefixes) that should be ignored
@@ -1263,9 +1231,9 @@ HTML_TIMESTAMP = YES
# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
# documentation will contain a main index with vertical navigation menus that
# are dynamically created via JavaScript. If disabled, the navigation index will
# are dynamically created via Javascript. If disabled, the navigation index will
# consists of multiple levels of tabs that are statically embedded in every HTML
# page. Disable this option to support browsers that do not have JavaScript,
# page. Disable this option to support browsers that do not have Javascript,
# like the Qt help browser.
# The default value is: YES.
# This tag requires that the tag GENERATE_HTML is set to YES.
@@ -1295,11 +1263,10 @@ HTML_INDEX_NUM_ENTRIES = 100
# If the GENERATE_DOCSET tag is set to YES, additional index files will be
# generated that can be used as input for Apple's Xcode 3 integrated development
# environment (see:
# https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To
# create a documentation set, doxygen will generate a Makefile in the HTML
# output directory. Running make will produce the docset in that directory and
# running make install will install the docset in
# environment (see: https://developer.apple.com/xcode/), introduced with OSX
# 10.5 (Leopard). To create a documentation set, doxygen will generate a
# Makefile in the HTML output directory. Running make will produce the docset in
# that directory and running make install will install the docset in
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
# genXcode/_index.html for more information.
@@ -1341,8 +1308,8 @@ DOCSET_PUBLISHER_NAME = Publisher
# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
# (see:
# https://www.microsoft.com/en-us/download/details.aspx?id=21138) on Windows.
# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on
# Windows.
#
# The HTML Help Workshop contains a compiler that can convert all HTML output
# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
@@ -1372,7 +1339,7 @@ CHM_FILE = blender.chm
HHC_LOCATION = "C:/Program Files (x86)/HTML Help Workshop/hhc.exe"
# The GENERATE_CHI flag controls if a separate .chi index file is generated
# (YES) or that it should be included in the main .chm file (NO).
# (YES) or that it should be included in the master .chm file (NO).
# The default value is: NO.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
@@ -1417,8 +1384,7 @@ QCH_FILE =
# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
# Project output. For more information please see Qt Help Project / Namespace
# (see:
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
# The default value is: org.doxygen.Project.
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1426,8 +1392,8 @@ QHP_NAMESPACE = org.doxygen.Project
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
# Help Project output. For more information please see Qt Help Project / Virtual
# Folders (see:
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders).
# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-
# folders).
# The default value is: doc.
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1435,16 +1401,16 @@ QHP_VIRTUAL_FOLDER = doc
# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
# filter to add. For more information please see Qt Help Project / Custom
# Filters (see:
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters).
# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_CUST_FILTER_NAME =
# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
# custom filter to add. For more information please see Qt Help Project / Custom
# Filters (see:
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters).
# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_CUST_FILTER_ATTRS =
@@ -1456,9 +1422,9 @@ QHP_CUST_FILTER_ATTRS =
QHP_SECT_FILTER_ATTRS =
# The QHG_LOCATION tag can be used to specify the location (absolute path
# including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to
# run qhelpgenerator on the generated .qhp file.
# The QHG_LOCATION tag can be used to specify the location of Qt's
# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
# generated .qhp file.
# This tag requires that the tag GENERATE_QHP is set to YES.
QHG_LOCATION =
@@ -1535,17 +1501,6 @@ TREEVIEW_WIDTH = 246
EXT_LINKS_IN_WINDOW = NO
# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg
# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see
# https://inkscape.org) to generate formulas as SVG images instead of PNGs for
# the HTML output. These images will generally look nicer at scaled resolutions.
# Possible values are: png (the default) and svg (looks nicer but requires the
# pdf2svg or inkscape tool).
# The default value is: png.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_FORMULA_FORMAT = png
# Use this tag to change the font size of LaTeX formulas included as images in
# the HTML documentation. When you change the font size after a successful
# doxygen run you need to manually remove any form_*.png images from the HTML
@@ -1566,14 +1521,8 @@ FORMULA_FONTSIZE = 10
FORMULA_TRANSPARENT = YES
# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands
# to create new LaTeX commands to be used in formulas as building blocks. See
# the section "Including formulas" for details.
FORMULA_MACROFILE =
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
# https://www.mathjax.org) which uses client side JavaScript for the rendering
# https://www.mathjax.org) which uses client side Javascript for the rendering
# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
# installed or if you want to formulas look prettier in the HTML output. When
# enabled you may also need to install MathJax separately and configure the path
@@ -1585,7 +1534,7 @@ USE_MATHJAX = NO
# When MathJax is enabled you can set the default output format to be used for
# the MathJax output. See the MathJax site (see:
# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details.
# http://docs.mathjax.org/en/latest/output.html) for more details.
# Possible values are: HTML-CSS (which is slower, but has the best
# compatibility), NativeMML (i.e. MathML) and SVG.
# The default value is: HTML-CSS.
@@ -1601,7 +1550,7 @@ MATHJAX_FORMAT = HTML-CSS
# Content Delivery Network so you can quickly see the result without installing
# MathJax. However, it is strongly recommended to install a local copy of
# MathJax from https://www.mathjax.org before deployment.
# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2.
# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/.
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_RELPATH = http://www.mathjax.org/mathjax
@@ -1615,8 +1564,7 @@ MATHJAX_EXTENSIONS =
# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
# of code that will be used on startup of the MathJax code. See the MathJax site
# (see:
# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an
# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
# example see the documentation.
# This tag requires that the tag USE_MATHJAX is set to YES.
@@ -1644,7 +1592,7 @@ MATHJAX_CODEFILE =
SEARCHENGINE = NO
# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
# implemented using a web server instead of a web client using JavaScript. There
# implemented using a web server instead of a web client using Javascript. There
# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
# setting. When disabled, doxygen will generate a PHP script for searching and
# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
@@ -1663,8 +1611,7 @@ SERVER_BASED_SEARCH = NO
#
# Doxygen ships with an example indexer (doxyindexer) and search engine
# (doxysearch.cgi) which are based on the open source search engine library
# Xapian (see:
# https://xapian.org/).
# Xapian (see: https://xapian.org/).
#
# See the section "External Indexing and Searching" for details.
# The default value is: NO.
@@ -1677,9 +1624,8 @@ EXTERNAL_SEARCH = NO
#
# Doxygen ships with an example indexer (doxyindexer) and search engine
# (doxysearch.cgi) which are based on the open source search engine library
# Xapian (see:
# https://xapian.org/). See the section "External Indexing and Searching" for
# details.
# Xapian (see: https://xapian.org/). See the section "External Indexing and
# Searching" for details.
# This tag requires that the tag SEARCHENGINE is set to YES.
SEARCHENGINE_URL =
@@ -1843,11 +1789,9 @@ LATEX_EXTRA_FILES =
PDF_HYPERLINKS = NO
# If the USE_PDFLATEX tag is set to YES, doxygen will use the engine as
# specified with LATEX_CMD_NAME to generate the PDF file directly from the LaTeX
# files. Set this option to YES, to get a higher quality PDF documentation.
#
# See also section LATEX_CMD_NAME for selecting the engine.
# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
# the PDF file directly from the LaTeX files. Set this option to YES, to get a
# higher quality PDF documentation.
# The default value is: YES.
# This tag requires that the tag GENERATE_LATEX is set to YES.
@@ -2182,8 +2126,7 @@ INCLUDE_FILE_PATTERNS =
# recursively expanded use the := operator instead of the = operator.
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
PREDEFINED = BUILD_DATE \
DOXYGEN=1
PREDEFINED = BUILD_DATE
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
# tag can be used to specify a list of macro names that should be expanded. The
@@ -2360,31 +2303,9 @@ UML_LOOK = YES
# but if the number exceeds 15, the total amount of fields shown is limited to
# 10.
# Minimum value: 0, maximum value: 100, default value: 10.
# This tag requires that the tag UML_LOOK is set to YES.
UML_LIMIT_NUM_FIELDS = 10
# If the DOT_UML_DETAILS tag is set to NO, doxygen will show attributes and
# methods without types and arguments in the UML graphs. If the DOT_UML_DETAILS
# tag is set to YES, doxygen will add type and arguments for attributes and
# methods in the UML graphs. If the DOT_UML_DETAILS tag is set to NONE, doxygen
# will not generate fields with class member information in the UML graphs. The
# class diagrams will look similar to the default class diagrams but using UML
# notation for the relationships.
# Possible values are: NO, YES and NONE.
# The default value is: NO.
# This tag requires that the tag UML_LOOK is set to YES.
DOT_UML_DETAILS = NO
# The DOT_WRAP_THRESHOLD tag can be used to set the maximum number of characters
# to display on a single line. If the actual line length exceeds this threshold
# significantly it will wrapped across multiple lines. Some heuristics are apply
# to avoid ugly line breaks.
# Minimum value: 0, maximum value: 1000, default value: 17.
# This tag requires that the tag HAVE_DOT is set to YES.
DOT_WRAP_THRESHOLD = 17
UML_LIMIT_NUM_FIELDS = 10
# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
# collaboration graphs will show the relations between templates and their
@@ -2575,11 +2496,9 @@ DOT_MULTI_TARGETS = YES
GENERATE_LEGEND = YES
# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate
# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
# files that are used to generate the various graphs.
#
# Note: This setting is not only used for dot files but also for msc and
# plantuml temporary files.
# The default value is: YES.
# This tag requires that the tag HAVE_DOT is set to YES.
DOT_CLEANUP = YES

View File

@@ -122,7 +122,7 @@ is a full-featured 3D application. It supports the entirety of the 3D pipeline -
'''modeling, rigging, animation, simulation, rendering, compositing, motion tracking, and video editing.
Use Blender to create 3D images and animations, films and commercials, content for games, '''
r'''architectural and industrial visualizations, and scientific visualizations.
r'''architectural and industrial visualizatons, and scientific visualizations.
https://www.blender.org''')

View File

@@ -12,7 +12,6 @@ such cases, lock the interface (Render → Lock Interface or
Below is an example of a mesh that is altered from a handler:
"""
def frame_change_pre(scene):
# A triangle that shifts in the z direction
zshift = scene.frame_current * 0.1

View File

@@ -16,12 +16,10 @@ execution_queue = queue.Queue()
def run_in_main_thread(function):
execution_queue.put(function)
def execute_queued_functions():
while not execution_queue.empty():
function = execution_queue.get()
function()
return 1.0
bpy.app.timers.register(execute_queued_functions)

View File

@@ -31,13 +31,11 @@ owner = object()
subscribe_to = bpy.context.object.location
def msgbus_callback(*args):
# This will print:
# Something changed! (1, 2, 3)
print("Something changed!", args)
bpy.msgbus.subscribe_rna(
key=subscribe_to,
owner=owner,

View File

@@ -44,7 +44,7 @@ class OBJECT_OT_object_to_curve(bpy.types.Operator):
# Remove temporary curve.
obj.to_curve_clear()
# Invoke to_curve() with applying modifiers.
curve_with_modifiers = obj.to_curve(depsgraph, apply_modifiers=True)
curve_with_modifiers = obj.to_curve(depsgraph, apply_modifiers = True)
self.report({'INFO'}, f"{len(curve_with_modifiers.splines)} splines in new curve with modifiers.")
# Remove temporary curve.
obj.to_curve_clear()

View File

@@ -21,7 +21,6 @@ batch = batch_for_shader(
},
)
def draw():
shader.bind()
shader.uniform_sampler("image", texture)

View File

@@ -29,7 +29,7 @@ with offscreen.bind():
amount = 10
for i in range(-amount, amount + 1):
x_pos = i / amount
draw_circle_2d((x_pos, 0.0), (1, 1, 1, 1), 0.5, segments=200)
draw_circle_2d((x_pos, 0.0), (1, 1, 1, 1), 0.5, 200)
# Drawing the generated texture in 3D space

View File

@@ -34,9 +34,7 @@ with offscreen.bind():
for i in range(RING_AMOUNT):
draw_circle_2d(
(random.uniform(-1, 1), random.uniform(-1, 1)),
(1, 1, 1, 1), random.uniform(0.1, 1),
segments=20,
)
(1, 1, 1, 1), random.uniform(0.1, 1), 20)
buffer = fb.read_color(0, 0, WIDTH, HEIGHT, 4, 0, 'UBYTE')

View File

@@ -31,8 +31,7 @@ def draw():
context.space_data,
context.region,
view_matrix,
projection_matrix,
True)
projection_matrix)
gpu.state.depth_mask_set(False)
draw_texture_2d(offscreen.texture_color, (10, 10), WIDTH, HEIGHT)

View File

@@ -1,13 +1,2 @@
sphinx==4.1.1
# Sphinx dependencies that are important
Jinja2==3.0.1
Pygments==2.10.0
docutils==0.17.1
snowballstemmer==2.1.0
babel==2.9.1
requests==2.26.0
# Only needed to match the theme used for the official documentation.
# Without this theme, the default theme will be used.
sphinx_rtd_theme==1.0.0
Sphinx==3.5.3
sphinx_rtd_theme==0.5.2

View File

@@ -254,8 +254,6 @@ else:
"gpu.shader",
"gpu.state",
"gpu.texture",
"gpu.platform",
"gpu.capabilities",
"gpu_extras",
"idprop.types",
"mathutils",
@@ -1039,21 +1037,16 @@ context_type_map = {
# context_member: (RNA type, is_collection)
"active_annotation_layer": ("GPencilLayer", False),
"active_bone": ("EditBone", False),
"active_file": ("FileSelectEntry", False),
"active_gpencil_frame": ("GreasePencilLayer", True),
"active_gpencil_layer": ("GPencilLayer", True),
"active_node": ("Node", False),
"active_object": ("Object", False),
"active_operator": ("Operator", False),
"active_pose_bone": ("PoseBone", False),
"active_sequence_strip": ("Sequence", False),
"active_editable_fcurve": ("FCurve", False),
"active_nla_strip": ("NlaStrip", False),
"active_nla_track": ("NlaTrack", False),
"annotation_data": ("GreasePencil", False),
"annotation_data_owner": ("ID", False),
"armature": ("Armature", False),
"asset_library_ref": ("AssetLibraryReference", False),
"bone": ("Bone", False),
"brush": ("Brush", False),
"camera": ("Camera", False),
@@ -1078,7 +1071,6 @@ context_type_map = {
"gpencil_data": ("GreasePencil", False),
"gpencil_data_owner": ("ID", False),
"hair": ("Hair", False),
"id": ("ID", False),
"image_paint_object": ("Object", False),
"lattice": ("Lattice", False),
"light": ("Light", False),
@@ -1107,7 +1099,6 @@ context_type_map = {
"selected_editable_keyframes": ("Keyframe", True),
"selected_editable_objects": ("Object", True),
"selected_editable_sequences": ("Sequence", True),
"selected_files": ("FileSelectEntry", True),
"selected_nla_strips": ("NlaStrip", True),
"selected_nodes": ("Node", True),
"selected_objects": ("Object", True),
@@ -1122,7 +1113,6 @@ context_type_map = {
"texture_slot": ("MaterialTextureSlot", False),
"texture_user": ("ID", False),
"texture_user_property": ("Property", False),
"ui_list": ("UIList", False),
"vertex_paint_object": ("Object", False),
"view_layer": ("ViewLayer", False),
"visible_bones": ("EditBone", True),
@@ -1204,7 +1194,6 @@ def pycontext2sphinx(basepath):
"text_context_dir",
"clip_context_dir",
"sequencer_context_dir",
"file_context_dir",
)
unique = set()
@@ -2002,15 +1991,13 @@ def write_rst_importable_modules(basepath):
"blf": "Font Drawing",
"imbuf": "Image Buffer",
"imbuf.types": "Image Buffer Types",
"gpu": "GPU Module",
"gpu": "GPU Shader Module",
"gpu.types": "GPU Types",
"gpu.matrix": "GPU Matrix Utilities",
"gpu.select": "GPU Select Utilities",
"gpu.shader": "GPU Shader Utilities",
"gpu.state": "GPU State Utilities",
"gpu.texture": "GPU Texture Utilities",
"gpu.platform": "GPU Platform Utilities",
"gpu.capabilities": "GPU Capabilities Utilities",
"bmesh": "BMesh Module",
"bmesh.ops": "BMesh Operators",
"bmesh.types": "BMesh Types",

View File

@@ -111,5 +111,5 @@ if(WITH_MOD_FLUID)
endif()
if (WITH_COMPOSITOR)
add_subdirectory(smaa_areatex)
add_subdirectory(smaa_areatex)
endif()

View File

@@ -86,7 +86,6 @@ AUD_API AUD_SoundInfo AUD_getInfo(AUD_Sound* sound)
info.specs.channels = AUD_CHANNELS_INVALID;
info.specs.rate = AUD_RATE_INVALID;
info.length = 0.0f;
info.start_offset = 0.0f;
try
{
@@ -96,7 +95,6 @@ AUD_API AUD_SoundInfo AUD_getInfo(AUD_Sound* sound)
{
info.specs = convSpecToC(reader->getSpecs());
info.length = reader->getLength() / (float) info.specs.rate;
info.start_offset = reader->getStartOffset();
}
}
catch(Exception&)
@@ -247,7 +245,7 @@ AUD_API int AUD_readSound(AUD_Sound* sound, float* buffer, int length, int sampl
buffer[i * 3] = min;
buffer[i * 3 + 1] = max;
buffer[i * 3 + 2] = sqrt(power / len); // RMS
buffer[i * 3 + 2] = sqrt(power) / len;
if(overallmax < max)
overallmax = max;

View File

@@ -176,5 +176,4 @@ typedef struct
{
AUD_Specs specs;
float length;
double start_offset;
} AUD_SoundInfo;

View File

@@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
import sys
import os

View File

@@ -70,12 +70,6 @@ public:
*/
virtual int getPosition() const=0;
/**
* Returns the start offset the sound should have to line up with related sources.
* \return The required start offset in seconds.
*/
virtual double getStartOffset() const { return 0.0;}
/**
* Returns the specification of the reader.
* \return The Specs structure.

View File

@@ -265,12 +265,6 @@ protected:
*/
void setSpecs(Specs specs);
/**
* Sets the audio output specification of the device.
* \param specs The output specification.
*/
void setSpecs(DeviceSpecs specs);
/**
* Empty default constructor. To setup the device call the function create()
* and to uninitialize call destroy().

View File

@@ -67,4 +67,4 @@ public:
virtual void read(int& length, bool& eos, sample_t* buffer);
};
AUD_NAMESPACE_END
AUD_NAMESPACE_END

View File

@@ -87,12 +87,6 @@ public:
*/
void setSpecs(Specs specs);
/**
* Sets the target specification for superposing.
* \param specs The target specification.
*/
void setSpecs(DeviceSpecs specs);
/**
* Mixes a buffer.
* \param buffer The buffer to superpose.

View File

@@ -68,7 +68,7 @@ int FFMPEGReader::decode(AVPacket& packet, Buffer& buffer)
for(int i = 0; i < m_frame->nb_samples; i++)
{
std::memcpy(((data_t*)buffer.getBuffer()) + buf_pos + ((m_codecCtx->channels * i) + channel) * single_size,
m_frame->data[channel] + i * single_size, single_size);
m_frame->data[channel] + i * single_size, single_size);
}
}
}
@@ -109,7 +109,7 @@ int FFMPEGReader::decode(AVPacket& packet, Buffer& buffer)
for(int i = 0; i < m_frame->nb_samples; i++)
{
std::memcpy(((data_t*)buffer.getBuffer()) + buf_pos + ((m_codecCtx->channels * i) + channel) * single_size,
m_frame->data[channel] + i * single_size, single_size);
m_frame->data[channel] + i * single_size, single_size);
}
}
}
@@ -126,10 +126,7 @@ int FFMPEGReader::decode(AVPacket& packet, Buffer& buffer)
void FFMPEGReader::init()
{
m_position = 0;
m_start_offset = 0.0f;
m_pkgbuf_left = 0;
m_st_time = 0;
m_duration = 0;
if(avformat_find_stream_info(m_formatCtx, nullptr) < 0)
AUD_THROW(FileException, "File couldn't be read, ffmpeg couldn't find the stream info.");
@@ -137,41 +134,15 @@ void FFMPEGReader::init()
// find audio stream and codec
m_stream = -1;
double dur_sec = 0;
for(unsigned int i = 0; i < m_formatCtx->nb_streams; i++)
{
#ifdef FFMPEG_OLD_CODE
if(m_formatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
if((m_formatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
#else
if(m_formatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
if((m_formatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
#endif
&& (m_stream < 0))
{
AVStream *audio_stream = m_formatCtx->streams[i];
double audio_timebase = av_q2d(audio_stream->time_base);
if (audio_stream->start_time != AV_NOPTS_VALUE)
{
m_st_time = audio_stream->start_time;
}
int64_t ctx_start_time = 0;
if (m_formatCtx->start_time != AV_NOPTS_VALUE) {
ctx_start_time = m_formatCtx->start_time;
}
m_start_offset = m_st_time * audio_timebase - (double)ctx_start_time / AV_TIME_BASE;
if(audio_stream->duration != AV_NOPTS_VALUE)
{
dur_sec = audio_stream->duration * audio_timebase;
}
else
{
/* If the audio starts after the stream start time, subract this from the total duration. */
dur_sec = (double)m_formatCtx->duration / AV_TIME_BASE - m_start_offset;
}
m_stream=i;
break;
}
@@ -242,7 +213,6 @@ void FFMPEGReader::init()
}
m_specs.rate = (SampleRate) m_codecCtx->sample_rate;
m_duration = lround(dur_sec * m_codecCtx->sample_rate);
}
FFMPEGReader::FFMPEGReader(std::string filename) :
@@ -368,17 +338,21 @@ void FFMPEGReader::seek(int position)
{
if(position >= 0)
{
double pts_time_base =
av_q2d(m_formatCtx->streams[m_stream]->time_base);
uint64_t st_time = m_formatCtx->start_time;
uint64_t seek_pos = ((uint64_t)position) * ((uint64_t)AV_TIME_BASE) / ((uint64_t)m_specs.rate);
uint64_t seek_pts = (((uint64_t)position) / ((uint64_t)m_specs.rate)) / pts_time_base;
if(m_st_time != AV_NOPTS_VALUE) {
seek_pts += m_st_time;
if(st_time != AV_NOPTS_VALUE) {
seek_pos += st_time;
}
double pts_time_base =
av_q2d(m_formatCtx->streams[m_stream]->time_base);
uint64_t pts_st_time =
((st_time != AV_NOPTS_VALUE) ? st_time : 0)
/ pts_time_base / (uint64_t) AV_TIME_BASE;
// a value < 0 tells us that seeking failed
if(av_seek_frame(m_formatCtx, m_stream, seek_pts,
if(av_seek_frame(m_formatCtx, -1, seek_pos,
AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY) >= 0)
{
avcodec_flush_buffers(m_codecCtx);
@@ -400,7 +374,7 @@ void FFMPEGReader::seek(int position)
if(packet.pts != AV_NOPTS_VALUE)
{
// calculate real position, and read to frame!
m_position = (packet.pts - m_st_time) * pts_time_base * m_specs.rate;
m_position = (packet.pts - pts_st_time) * pts_time_base * m_specs.rate;
if(m_position < position)
{
@@ -431,7 +405,8 @@ void FFMPEGReader::seek(int position)
int FFMPEGReader::getLength() const
{
// return approximated remaning size
return m_duration - m_position;
return (int)((m_formatCtx->duration * m_codecCtx->sample_rate)
/ AV_TIME_BASE)-m_position;
}
int FFMPEGReader::getPosition() const
@@ -439,11 +414,6 @@ int FFMPEGReader::getPosition() const
return m_position;
}
double FFMPEGReader::getStartOffset() const
{
return m_start_offset;
}
Specs FFMPEGReader::getSpecs() const
{
return m_specs.specs;
@@ -480,13 +450,11 @@ void FFMPEGReader::read(int& length, bool& eos, sample_t* buffer)
// decode the package
pkgbuf_pos = decode(packet, m_pkgbuf);
if (packet.pts >= m_st_time) {
// copy to output buffer
data_size = std::min(pkgbuf_pos, left * sample_size);
m_convert((data_t*) buf, (data_t*) m_pkgbuf.getBuffer(), data_size / AUD_FORMAT_SIZE(m_specs.format));
buf += data_size / AUD_FORMAT_SIZE(m_specs.format);
left -= data_size / sample_size;
}
// copy to output buffer
data_size = std::min(pkgbuf_pos, left * sample_size);
m_convert((data_t*) buf, (data_t*) m_pkgbuf.getBuffer(), data_size / AUD_FORMAT_SIZE(m_specs.format));
buf += data_size / AUD_FORMAT_SIZE(m_specs.format);
left -= data_size / sample_size;
}
av_packet_unref(&packet);
}

View File

@@ -54,22 +54,6 @@ private:
*/
int m_position;
/**
* The start offset in seconds relative to the media container start time.
* IE how much the sound should be delayed to be kept in sync with the rest of the containter streams.
*/
double m_start_offset;
/**
* The start time pts of the stream. All packets before this timestamp shouldn't be played back (only decoded).
*/
int64_t m_st_time;
/**
* The duration of the audio stream in samples.
*/
int64_t m_duration;
/**
* The specification of the audio data.
*/
@@ -198,7 +182,6 @@ public:
virtual void seek(int position);
virtual int getLength() const;
virtual int getPosition() const;
virtual double getStartOffset() const;
virtual Specs getSpecs() const;
virtual void read(int& length, bool& eos, sample_t* buffer);
};

View File

@@ -32,24 +32,17 @@ void PulseAudioDevice::PulseAudio_state_callback(pa_context *context, void *data
device->m_state = AUD_pa_context_get_state(context);
}
void PulseAudioDevice::PulseAudio_request(pa_stream *stream, size_t total_bytes, void *data)
void PulseAudioDevice::PulseAudio_request(pa_stream *stream, size_t num_bytes, void *data)
{
PulseAudioDevice* device = (PulseAudioDevice*)data;
void* buffer;
while(total_bytes > 0)
{
size_t num_bytes = total_bytes;
AUD_pa_stream_begin_write(stream, &buffer, &num_bytes);
AUD_pa_stream_begin_write(stream, &buffer, &num_bytes);
device->mix((data_t*)buffer, num_bytes / AUD_DEVICE_SAMPLE_SIZE(device->m_specs));
device->mix((data_t*)buffer, num_bytes / AUD_DEVICE_SAMPLE_SIZE(device->m_specs));
AUD_pa_stream_write(stream, buffer, num_bytes, nullptr, 0, PA_SEEK_RELATIVE);
total_bytes -= num_bytes;
}
AUD_pa_stream_write(stream, buffer, num_bytes, nullptr, 0, PA_SEEK_RELATIVE);
}
void PulseAudioDevice::PulseAudio_underflow(pa_stream *stream, void *data)
@@ -85,7 +78,6 @@ void PulseAudioDevice::runMixingThread()
if(shouldStop())
{
AUD_pa_stream_cork(m_stream, 1, nullptr, nullptr);
AUD_pa_stream_flush(m_stream, nullptr, nullptr);
doStop();
return;
}
@@ -94,15 +86,13 @@ void PulseAudioDevice::runMixingThread()
if(AUD_pa_stream_is_corked(m_stream))
AUD_pa_stream_cork(m_stream, 0, nullptr, nullptr);
// similar to AUD_pa_mainloop_iterate(m_mainloop, false, nullptr); except with a longer timeout
AUD_pa_mainloop_prepare(m_mainloop, 1 << 14);
AUD_pa_mainloop_poll(m_mainloop);
AUD_pa_mainloop_dispatch(m_mainloop);
AUD_pa_mainloop_iterate(m_mainloop, true, nullptr);
}
}
PulseAudioDevice::PulseAudioDevice(std::string name, DeviceSpecs specs, int buffersize) :
m_state(PA_CONTEXT_UNCONNECTED),
m_buffersize(buffersize),
m_underflows(0)
{
m_mainloop = AUD_pa_mainloop_new();
@@ -193,9 +183,6 @@ PulseAudioDevice::PulseAudioDevice(std::string name, DeviceSpecs specs, int buff
AUD_pa_stream_set_write_callback(m_stream, PulseAudio_request, this);
AUD_pa_stream_set_underflow_callback(m_stream, PulseAudio_underflow, this);
buffersize *= AUD_DEVICE_SAMPLE_SIZE(m_specs);
m_buffersize = buffersize;
pa_buffer_attr buffer_attr;
buffer_attr.fragsize = -1U;

View File

@@ -59,7 +59,7 @@ private:
* \param num_bytes The length in bytes to be supplied.
* \param data The PulseAudio device.
*/
AUD_LOCAL static void PulseAudio_request(pa_stream* stream, size_t total_bytes, void* data);
AUD_LOCAL static void PulseAudio_request(pa_stream* stream, size_t num_bytes, void* data);
/**
* Reports an underflow from the PulseAudio server.

View File

@@ -24,7 +24,6 @@ PULSEAUDIO_SYMBOL(pa_context_unref);
PULSEAUDIO_SYMBOL(pa_stream_begin_write);
PULSEAUDIO_SYMBOL(pa_stream_connect_playback);
PULSEAUDIO_SYMBOL(pa_stream_cork);
PULSEAUDIO_SYMBOL(pa_stream_flush);
PULSEAUDIO_SYMBOL(pa_stream_is_corked);
PULSEAUDIO_SYMBOL(pa_stream_new);
PULSEAUDIO_SYMBOL(pa_stream_set_buffer_attr);
@@ -36,6 +35,3 @@ PULSEAUDIO_SYMBOL(pa_mainloop_free);
PULSEAUDIO_SYMBOL(pa_mainloop_get_api);
PULSEAUDIO_SYMBOL(pa_mainloop_new);
PULSEAUDIO_SYMBOL(pa_mainloop_iterate);
PULSEAUDIO_SYMBOL(pa_mainloop_prepare);
PULSEAUDIO_SYMBOL(pa_mainloop_poll);
PULSEAUDIO_SYMBOL(pa_mainloop_dispatch);

View File

@@ -31,81 +31,65 @@ template <class T> void SafeRelease(T **ppT)
}
}
HRESULT WASAPIDevice::setupRenderClient(IAudioRenderClient*& render_client, UINT32& buffer_size)
void WASAPIDevice::runMixingThread()
{
const IID IID_IAudioRenderClient = __uuidof(IAudioRenderClient);
UINT32 buffer_size;
UINT32 padding;
UINT32 length;
data_t* buffer;
HRESULT result;
IAudioRenderClient* render_client = nullptr;
if(FAILED(result = m_audio_client->GetBufferSize(&buffer_size)))
return result;
{
std::lock_guard<ILockable> lock(*this);
if(FAILED(result = m_audio_client->GetService(IID_IAudioRenderClient, reinterpret_cast<void**>(&render_client))))
return result;
const IID IID_IAudioRenderClient = __uuidof(IAudioRenderClient);
if(FAILED(result = m_audio_client->GetCurrentPadding(&padding)))
return result;
if(FAILED(m_audio_client->GetBufferSize(&buffer_size)))
goto init_error;
length = buffer_size - padding;
if(FAILED(m_audio_client->GetService(IID_IAudioRenderClient, reinterpret_cast<void**>(&render_client))))
goto init_error;
if(FAILED(result = render_client->GetBuffer(length, &buffer)))
return result;
if(FAILED(m_audio_client->GetCurrentPadding(&padding)))
goto init_error;
mix((data_t*)buffer, length);
length = buffer_size - padding;
if(FAILED(result = render_client->ReleaseBuffer(length, 0)))
return result;
if(FAILED(render_client->GetBuffer(length, &buffer)))
goto init_error;
mix((data_t*)buffer, length);
if(FAILED(render_client->ReleaseBuffer(length, 0)))
{
init_error:
SafeRelease(&render_client);
doStop();
return;
}
}
m_audio_client->Start();
return result;
}
void WASAPIDevice::runMixingThread()
{
UINT32 buffer_size;
IAudioRenderClient* render_client = nullptr;
std::chrono::milliseconds sleep_duration;
bool run_init = true;
auto sleepDuration = std::chrono::milliseconds(buffer_size * 1000 / int(m_specs.rate) / 2);
for(;;)
{
HRESULT result = S_OK;
{
UINT32 padding;
UINT32 length;
data_t* buffer;
std::lock_guard<ILockable> lock(*this);
if(run_init)
{
result = setupRenderClient(render_client, buffer_size);
if(FAILED(result))
goto stop_thread;
sleep_duration = std::chrono::milliseconds(buffer_size * 1000 / int(m_specs.rate) / 2);
}
if(FAILED(result = m_audio_client->GetCurrentPadding(&padding)))
if(FAILED(m_audio_client->GetCurrentPadding(&padding)))
goto stop_thread;
length = buffer_size - padding;
if(FAILED(result = render_client->GetBuffer(length, &buffer)))
if(FAILED(render_client->GetBuffer(length, &buffer)))
goto stop_thread;
mix((data_t*)buffer, length);
if(FAILED(result = render_client->ReleaseBuffer(length, 0)))
if(FAILED(render_client->ReleaseBuffer(length, 0)))
goto stop_thread;
// stop thread
@@ -114,51 +98,53 @@ void WASAPIDevice::runMixingThread()
stop_thread:
m_audio_client->Stop();
SafeRelease(&render_client);
if(result == AUDCLNT_E_DEVICE_INVALIDATED)
{
DeviceSpecs specs = m_specs;
if(!setupDevice(specs))
result = S_FALSE;
else
{
setSpecs(specs);
run_init = true;
}
}
if(result != AUDCLNT_E_DEVICE_INVALIDATED)
{
doStop();
return;
}
doStop();
return;
}
}
std::this_thread::sleep_for(sleep_duration);
std::this_thread::sleep_for(sleepDuration);
}
}
bool WASAPIDevice::setupDevice(DeviceSpecs &specs)
WASAPIDevice::WASAPIDevice(DeviceSpecs specs, int buffersize) :
m_imm_device_enumerator(nullptr),
m_imm_device(nullptr),
m_audio_client(nullptr),
m_wave_format_extensible({})
{
SafeRelease(&m_audio_client);
SafeRelease(&m_imm_device);
// initialize COM if it hasn't happened yet
CoInitializeEx(nullptr, COINIT_MULTITHREADED);
const CLSID CLSID_MMDeviceEnumerator = __uuidof(MMDeviceEnumerator);
const IID IID_IMMDeviceEnumerator = __uuidof(IMMDeviceEnumerator);
const IID IID_IAudioClient = __uuidof(IAudioClient);
if(FAILED(m_imm_device_enumerator->GetDefaultAudioEndpoint(eRender, eMultimedia, &m_imm_device)))
return false;
if(FAILED(m_imm_device->Activate(IID_IAudioClient, CLSCTX_ALL, nullptr, reinterpret_cast<void**>(&m_audio_client))))
return false;
WAVEFORMATEXTENSIBLE wave_format_extensible_closest_match;
WAVEFORMATEXTENSIBLE* closest_match_pointer = &wave_format_extensible_closest_match;
HRESULT result;
REFERENCE_TIME minimum_time = 0;
REFERENCE_TIME buffer_duration;
if(FAILED(CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_ALL, IID_IMMDeviceEnumerator, reinterpret_cast<void**>(&m_imm_device_enumerator))))
goto error;
if(FAILED(m_imm_device_enumerator->GetDefaultAudioEndpoint(eRender, eMultimedia, &m_imm_device)))
goto error;
if(FAILED(m_imm_device->Activate(IID_IAudioClient, CLSCTX_ALL, nullptr, reinterpret_cast<void**>(&m_audio_client))))
goto error;
if(specs.channels == CHANNELS_INVALID)
specs.channels = CHANNELS_STEREO;
if(specs.format == FORMAT_INVALID)
specs.format = FORMAT_FLOAT32;
if(specs.rate == RATE_INVALID)
specs.rate = RATE_48000;
switch(specs.format)
{
case FORMAT_U8:
@@ -217,14 +203,12 @@ bool WASAPIDevice::setupDevice(DeviceSpecs &specs)
m_wave_format_extensible.Format.cbSize = 22;
m_wave_format_extensible.Samples.wValidBitsPerSample = m_wave_format_extensible.Format.wBitsPerSample;
HRESULT result = m_audio_client->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, reinterpret_cast<const WAVEFORMATEX*>(&m_wave_format_extensible), reinterpret_cast<WAVEFORMATEX**>(&closest_match_pointer));
result = m_audio_client->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, reinterpret_cast<const WAVEFORMATEX*>(&m_wave_format_extensible), reinterpret_cast<WAVEFORMATEX**>(&closest_match_pointer));
if(result == S_FALSE)
{
bool errored = false;
if(closest_match_pointer->Format.wFormatTag != WAVE_FORMAT_EXTENSIBLE)
goto closest_match_error;
goto error;
specs.channels = Channels(closest_match_pointer->Format.nChannels);
specs.rate = closest_match_pointer->Format.nSamplesPerSec;
@@ -236,7 +220,7 @@ bool WASAPIDevice::setupDevice(DeviceSpecs &specs)
else if(closest_match_pointer->Format.wBitsPerSample == 64)
specs.format = FORMAT_FLOAT64;
else
goto closest_match_error;
goto error;
}
else if(closest_match_pointer->SubFormat == KSDATAFORMAT_SUBTYPE_PCM)
{
@@ -255,81 +239,44 @@ bool WASAPIDevice::setupDevice(DeviceSpecs &specs)
specs.format = FORMAT_S32;
break;
default:
goto closest_match_error;
goto error;
break;
}
}
else
goto closest_match_error;
goto error;
m_wave_format_extensible = *closest_match_pointer;
if(false)
{
closest_match_error:
errored = true;
}
if(closest_match_pointer != &wave_format_extensible_closest_match)
{
CoTaskMemFree(closest_match_pointer);
closest_match_pointer = &wave_format_extensible_closest_match;
}
if(errored)
return false;
}
else if(FAILED(result))
return false;
goto error;
if(FAILED(m_audio_client->GetDevicePeriod(nullptr, &minimum_time)))
return false;
goto error;
buffer_duration = REFERENCE_TIME(m_buffersize) * REFERENCE_TIME(10000000) / REFERENCE_TIME(specs.rate);
buffer_duration = REFERENCE_TIME(buffersize) * REFERENCE_TIME(10000000) / REFERENCE_TIME(specs.rate);
if(minimum_time > buffer_duration)
buffer_duration = minimum_time;
if(FAILED(m_audio_client->Initialize(AUDCLNT_SHAREMODE_SHARED, 0, buffer_duration, 0, reinterpret_cast<WAVEFORMATEX*>(&m_wave_format_extensible), nullptr)))
return false;
return true;
}
WASAPIDevice::WASAPIDevice(DeviceSpecs specs, int buffersize) :
m_buffersize(buffersize),
m_imm_device_enumerator(nullptr),
m_imm_device(nullptr),
m_audio_client(nullptr),
m_wave_format_extensible({})
{
// initialize COM if it hasn't happened yet
CoInitializeEx(nullptr, COINIT_MULTITHREADED);
const CLSID CLSID_MMDeviceEnumerator = __uuidof(MMDeviceEnumerator);
const IID IID_IMMDeviceEnumerator = __uuidof(IMMDeviceEnumerator);
if(specs.channels == CHANNELS_INVALID)
specs.channels = CHANNELS_STEREO;
if(specs.format == FORMAT_INVALID)
specs.format = FORMAT_FLOAT32;
if(specs.rate == RATE_INVALID)
specs.rate = RATE_48000;
if(FAILED(CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_ALL, IID_IMMDeviceEnumerator, reinterpret_cast<void**>(&m_imm_device_enumerator))))
goto error;
if(!setupDevice(specs))
goto error;
m_specs = specs;
if(FAILED(m_audio_client->Initialize(AUDCLNT_SHAREMODE_SHARED, 0, buffer_duration, 0, reinterpret_cast<WAVEFORMATEX*>(&m_wave_format_extensible), nullptr)))
goto error;
create();
return;
error:
if(closest_match_pointer != &wave_format_extensible_closest_match)
CoTaskMemFree(closest_match_pointer);
SafeRelease(&m_imm_device);
SafeRelease(&m_imm_device_enumerator);
SafeRelease(&m_audio_client);

View File

@@ -43,21 +43,16 @@ AUD_NAMESPACE_BEGIN
class AUD_PLUGIN_API WASAPIDevice : public ThreadedDevice
{
private:
int m_buffersize;
IMMDeviceEnumerator* m_imm_device_enumerator;
IMMDevice* m_imm_device;
IAudioClient* m_audio_client;
WAVEFORMATEXTENSIBLE m_wave_format_extensible;
AUD_LOCAL HRESULT setupRenderClient(IAudioRenderClient*& render_client, UINT32& buffer_size);
/**
* Streaming thread main function.
*/
AUD_LOCAL void runMixingThread();
AUD_LOCAL bool setupDevice(DeviceSpecs& specs);
// delete copy constructor and operator=
WASAPIDevice(const WASAPIDevice&) = delete;
WASAPIDevice& operator=(const WASAPIDevice&) = delete;

View File

@@ -756,7 +756,6 @@ void SoftwareDevice::mix(data_t* buffer, int length)
// get the buffer from the source
pos = 0;
len = length;
eos = false;
// update 3D Info
sound->update();
@@ -843,27 +842,6 @@ void SoftwareDevice::setSpecs(Specs specs)
{
sound->setSpecs(specs);
}
for(auto& sound : m_pausedSounds)
{
sound->setSpecs(specs);
}
}
void SoftwareDevice::setSpecs(DeviceSpecs specs)
{
m_specs = specs;
m_mixer->setSpecs(specs);
for(auto& sound : m_playingSounds)
{
sound->setSpecs(specs.specs);
}
for(auto& sound : m_pausedSounds)
{
sound->setSpecs(specs.specs);
}
}
SoftwareDevice::SoftwareDevice()

View File

@@ -57,4 +57,4 @@ void VolumeReader::read(int& length, bool& eos, sample_t* buffer)
buffer[i] = buffer[i] * m_volumeStorage->getVolume();
}
AUD_NAMESPACE_END
AUD_NAMESPACE_END

View File

@@ -21,25 +21,9 @@
AUD_NAMESPACE_BEGIN
Mixer::Mixer(DeviceSpecs specs)
Mixer::Mixer(DeviceSpecs specs) :
m_specs(specs)
{
setSpecs(specs);
}
DeviceSpecs Mixer::getSpecs() const
{
return m_specs;
}
void Mixer::setSpecs(Specs specs)
{
m_specs.specs = specs;
}
void Mixer::setSpecs(DeviceSpecs specs)
{
m_specs = specs;
switch(m_specs.format)
{
case FORMAT_U8:
@@ -70,6 +54,16 @@ void Mixer::setSpecs(DeviceSpecs specs)
}
}
DeviceSpecs Mixer::getSpecs() const
{
return m_specs;
}
void Mixer::setSpecs(Specs specs)
{
m_specs.specs = specs;
}
void Mixer::clear(int length)
{
m_buffer.assureSize(length * AUD_SAMPLE_SIZE(m_specs));

View File

@@ -7,4 +7,3 @@ Local modifications:
checks for functions and so are needed.
* Added special definitions of HAVE_SNPRINTF and HAVE_LIB_GFLAGS
in Windows' specific config.h.
* Silenced syscall deprecation warnings on macOS >= 10.12.

View File

@@ -59,7 +59,7 @@
# include <unistd.h>
#endif
#if (defined(HAVE_SYSCALL_H) || defined(HAVE_SYS_SYSCALL_H)) && (!(defined OS_MACOSX))
#if defined(HAVE_SYSCALL_H) || defined(HAVE_SYS_SYSCALL_H)
# define safe_write(fd, s, len) syscall(SYS_write, fd, s, len)
#else
// Not so safe, but what can you do?

View File

@@ -259,13 +259,7 @@ pid_t GetTID() {
#endif
static bool lacks_gettid = false;
if (!lacks_gettid) {
#ifdef OS_MACOSX
uint64_t tid64;
const int error = pthread_threadid_np(NULL, &tid64);
pid_t tid = error ? -1 : (pid_t)tid64;
#else
pid_t tid = syscall(__NR_gettid);
#endif
if (tid != -1) {
return tid;
}

View File

@@ -8,7 +8,7 @@
# YOUR INSTALLATION PATHS GO HERE:
MANTA_INSTALLATION=/Users/sebbas/Developer/Mantaflow/mantaflowDevelop
BLENDER_INSTALLATION=/Users/sebbas/Developer/Blender
BLENDER_INSTALLATION=/Users/sebbas/Developer/Blender/fluid-mantaflow
# Try to check out Mantaflow repository before building?
CLEAN_REPOSITORY=0

View File

@@ -28,13 +28,11 @@ extern PyTypeObject PbVec3Type;
extern PyTypeObject PbVec4Type;
struct PbVec3 {
PyObject_HEAD
float data[3];
PyObject_HEAD float data[3];
};
struct PbVec4 {
PyObject_HEAD
float data[4];
PyObject_HEAD float data[4];
};
PyObject *getPyNone()

View File

@@ -25,8 +25,7 @@ namespace Manta {
extern PyTypeObject PbVec3Type;
struct PbVec3 {
PyObject_HEAD
float data[3];
PyObject_HEAD float data[3];
};
static void PbVec3Dealloc(PbVec3 *self)
@@ -294,8 +293,7 @@ inline PyObject *castPy(PyTypeObject *p)
extern PyTypeObject PbVec4Type;
struct PbVec4 {
PyObject_HEAD
float data[4];
PyObject_HEAD float data[4];
};
static PyMethodDef PbVec4Methods[] = {

View File

@@ -76,8 +76,7 @@ struct ClassData {
};
struct PbObject {
PyObject_HEAD
Manta::PbClass *instance;
PyObject_HEAD Manta::PbClass *instance;
ClassData *classdef;
};

View File

@@ -874,136 +874,6 @@ static const Vec3i nb[6] = {Vec3i(1, 0, 0),
Vec3i(0, 0, 1),
Vec3i(0, 0, -1)};
struct knMarkSkipCells : public KernelBase {
knMarkSkipCells(Grid<Real> &phi, Grid<int> &tmp, bool inside)
: KernelBase(&phi, 1), phi(phi), tmp(tmp), inside(inside)
{
runMessage();
run();
}
inline void op(int i, int j, int k, Grid<Real> &phi, Grid<int> &tmp, bool inside) const
{
if (!inside && phi(i, j, k) < 0.) {
tmp(i, j, k) = 1;
}
if (inside && phi(i, j, k) > 0.) {
tmp(i, j, k) = 1;
}
}
inline Grid<Real> &getArg0()
{
return phi;
}
typedef Grid<Real> type0;
inline Grid<int> &getArg1()
{
return tmp;
}
typedef Grid<int> type1;
inline bool &getArg2()
{
return inside;
}
typedef bool type2;
void runMessage()
{
debMsg("Executing kernel knMarkSkipCells ", 3);
debMsg("Kernel range"
<< " x " << maxX << " y " << maxY << " z " << minZ << " - " << maxZ << " ",
4);
};
void operator()(const tbb::blocked_range<IndexInt> &__r) const
{
const int _maxX = maxX;
const int _maxY = maxY;
if (maxZ > 1) {
for (int k = __r.begin(); k != (int)__r.end(); k++)
for (int j = 1; j < _maxY; j++)
for (int i = 1; i < _maxX; i++)
op(i, j, k, phi, tmp, inside);
}
else {
const int k = 0;
for (int j = __r.begin(); j != (int)__r.end(); j++)
for (int i = 1; i < _maxX; i++)
op(i, j, k, phi, tmp, inside);
}
}
void run()
{
if (maxZ > 1)
tbb::parallel_for(tbb::blocked_range<IndexInt>(minZ, maxZ), *this);
else
tbb::parallel_for(tbb::blocked_range<IndexInt>(1, maxY), *this);
}
Grid<Real> &phi;
Grid<int> &tmp;
bool inside;
};
struct knSetFirstLayer : public KernelBase {
knSetFirstLayer(Grid<int> &tmp, int dim) : KernelBase(&tmp, 1), tmp(tmp), dim(dim)
{
runMessage();
run();
}
inline void op(int i, int j, int k, Grid<int> &tmp, int dim) const
{
Vec3i p(i, j, k);
if (tmp(p))
return;
for (int n = 0; n < 2 * dim; ++n) {
if (tmp(p + nb[n]) == 1) {
tmp(i, j, k) = 2;
break;
}
}
}
inline Grid<int> &getArg0()
{
return tmp;
}
typedef Grid<int> type0;
inline int &getArg1()
{
return dim;
}
typedef int type1;
void runMessage()
{
debMsg("Executing kernel knSetFirstLayer ", 3);
debMsg("Kernel range"
<< " x " << maxX << " y " << maxY << " z " << minZ << " - " << maxZ << " ",
4);
};
void operator()(const tbb::blocked_range<IndexInt> &__r) const
{
const int _maxX = maxX;
const int _maxY = maxY;
if (maxZ > 1) {
for (int k = __r.begin(); k != (int)__r.end(); k++)
for (int j = 1; j < _maxY; j++)
for (int i = 1; i < _maxX; i++)
op(i, j, k, tmp, dim);
}
else {
const int k = 0;
for (int j = __r.begin(); j != (int)__r.end(); j++)
for (int i = 1; i < _maxX; i++)
op(i, j, k, tmp, dim);
}
}
void run()
{
if (maxZ > 1)
tbb::parallel_for(tbb::blocked_range<IndexInt>(minZ, maxZ), *this);
else
tbb::parallel_for(tbb::blocked_range<IndexInt>(1, maxY), *this);
}
Grid<int> &tmp;
int dim;
};
template<class S> struct knExtrapolateLsSimple : public KernelBase {
knExtrapolateLsSimple(Grid<S> &val, int distance, Grid<int> &tmp, const int d, S direction)
: KernelBase(&val, 1), val(val), distance(distance), tmp(tmp), d(d), direction(direction)
@@ -1173,12 +1043,39 @@ void extrapolateLsSimple(Grid<Real> &phi, int distance = 4, bool inside = false)
tmp.clear();
const int dim = (phi.is3D() ? 3 : 2);
// by default, march outside (ie mark all inside to be skipped)
Real direction = (inside) ? -1. : 1.;
knMarkSkipCells(phi, tmp, inside);
// by default, march outside
Real direction = 1.;
if (!inside) {
// mark all inside
FOR_IJK_BND(phi, 1)
{
if (phi(i, j, k) < 0.) {
tmp(i, j, k) = 1;
}
}
}
else {
direction = -1.;
FOR_IJK_BND(phi, 1)
{
if (phi(i, j, k) > 0.) {
tmp(i, j, k) = 1;
}
}
}
// + first layer around
knSetFirstLayer(tmp, dim);
FOR_IJK_BND(phi, 1)
{
Vec3i p(i, j, k);
if (tmp(p))
continue;
for (int n = 0; n < 2 * dim; ++n) {
if (tmp(p + nb[n]) == 1) {
tmp(i, j, k) = 2;
n = 2 * dim;
}
}
}
// extrapolate for distance
for (int d = 2; d < 1 + distance; ++d) {
@@ -1229,12 +1126,37 @@ void extrapolateVec3Simple(Grid<Vec3> &vel, Grid<Real> &phi, int distance = 4, b
tmp.clear();
const int dim = (vel.is3D() ? 3 : 2);
// mark initial cells, by default, march outside (ie mark all inside to be skipped)
Real direction = (inside) ? -1. : 1.;
knMarkSkipCells(phi, tmp, inside);
// mark initial cells, by default, march outside
if (!inside) {
// mark all inside
FOR_IJK_BND(phi, 1)
{
if (phi(i, j, k) < 0.) {
tmp(i, j, k) = 1;
}
}
}
else {
FOR_IJK_BND(phi, 1)
{
if (phi(i, j, k) > 0.) {
tmp(i, j, k) = 1;
}
}
}
// + first layer next to initial cells
knSetFirstLayer(tmp, dim);
FOR_IJK_BND(vel, 1)
{
Vec3i p(i, j, k);
if (tmp(p))
continue;
for (int n = 0; n < 2 * dim; ++n) {
if (tmp(p + nb[n]) == 1) {
tmp(i, j, k) = 2;
n = 2 * dim;
}
}
}
for (int d = 2; d < 1 + distance; ++d) {
knExtrapolateLsSimple<Vec3>(vel, distance, tmp, d, Vec3(0.));

View File

@@ -1,3 +1,3 @@
#define MANTA_GIT_VERSION "commit d5d9a6c28daa8f21426d7a285f48639c0d8fd13f"
#define MANTA_GIT_VERSION "commit 9c505cd22e289b98c9aa717efba8ef3201c7e458"

Some files were not shown because too many files have changed in this diff Show More