Merge branch 'main' into asset-browser-grid-view
This commit is contained in:
@@ -130,7 +130,7 @@ PenaltyReturnTypeOnItsOwnLine: 10000
|
||||
#
|
||||
PenaltyBreakAssignment: 100
|
||||
|
||||
AllowShortFunctionsOnASingleLine: None
|
||||
AllowShortFunctionsOnASingleLine: Empty
|
||||
|
||||
SortIncludes: true
|
||||
|
||||
|
@@ -2,4 +2,4 @@ ${CommitTitle}
|
||||
|
||||
${CommitBody}
|
||||
|
||||
Pull Request #${PullRequestIndex}
|
||||
Pull Request: https://projects.blender.org/blender/blender/pulls/${PullRequestIndex}
|
||||
|
@@ -1,3 +1,3 @@
|
||||
${PullRequestTitle}
|
||||
|
||||
Pull Request #${PullRequestIndex}
|
||||
Pull Request: https://projects.blender.org/blender/blender/pulls/${PullRequestIndex}
|
||||
|
@@ -1,9 +1,9 @@
|
||||
name: Bug Report
|
||||
about: File a bug report
|
||||
labels:
|
||||
- "type::Report"
|
||||
- "status::Needs Triage"
|
||||
- "priority::Normal"
|
||||
- "Type/Report"
|
||||
- "Status/Needs Triage"
|
||||
- "Priority/Normal"
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
@@ -1,7 +1,7 @@
|
||||
name: Design
|
||||
about: Create a design task (for developers only)
|
||||
labels:
|
||||
- "type::Design"
|
||||
- "Type/Design"
|
||||
body:
|
||||
- type: textarea
|
||||
id: body
|
||||
|
@@ -1,7 +1,7 @@
|
||||
name: To Do
|
||||
about: Create a to do task (for developers only)
|
||||
labels:
|
||||
- "type::To Do"
|
||||
- "Type/To Do"
|
||||
body:
|
||||
- type: textarea
|
||||
id: body
|
||||
|
27
.gitignore
vendored
27
.gitignore
vendored
@@ -39,7 +39,7 @@ Desktop.ini
|
||||
/doc/python_api/rst/bmesh.ops.rst
|
||||
|
||||
# in-source lib downloads
|
||||
/build_files/build_environment/downloads
|
||||
/build_files/build_environment/downloads/
|
||||
|
||||
# in-source buildbot signing configuration
|
||||
/build_files/buildbot/codesign/config_server.py
|
||||
@@ -48,4 +48,27 @@ Desktop.ini
|
||||
waveletNoiseTile.bin
|
||||
|
||||
# testing environment
|
||||
/Testing
|
||||
/Testing/
|
||||
|
||||
# Translations.
|
||||
/locale/user-config.py
|
||||
|
||||
# External repositories.
|
||||
/scripts/addons/
|
||||
/scripts/addons_contrib/
|
||||
|
||||
# Ignore old submodules directories.
|
||||
# Eventually need to get rid of those, but for the first time of transition
|
||||
# avoid indidents when the folders exists after bisect and developers staging
|
||||
# them by accident.
|
||||
/release/scripts/addons/
|
||||
/release/datafiles/locale/
|
||||
/release/scripts/addons_contrib/
|
||||
/source/tools/
|
||||
|
||||
# Build files for VS and VS Code.
|
||||
/build/
|
||||
/out/
|
||||
CMakeSettings.json
|
||||
CMakePresets.json
|
||||
CMakeUserPresets.json
|
||||
|
20
.gitmodules
vendored
20
.gitmodules
vendored
@@ -1,20 +0,0 @@
|
||||
[submodule "release/scripts/addons"]
|
||||
path = release/scripts/addons
|
||||
url = ../blender-addons.git
|
||||
branch = main
|
||||
ignore = all
|
||||
[submodule "release/scripts/addons_contrib"]
|
||||
path = release/scripts/addons_contrib
|
||||
url = ../blender-addons-contrib.git
|
||||
branch = main
|
||||
ignore = all
|
||||
[submodule "release/datafiles/locale"]
|
||||
path = release/datafiles/locale
|
||||
url = ../blender-translations.git
|
||||
branch = main
|
||||
ignore = all
|
||||
[submodule "source/tools"]
|
||||
path = source/tools
|
||||
url = ../blender-dev-tools.git
|
||||
branch = main
|
||||
ignore = all
|
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2006 Blender Foundation. All rights reserved.
|
||||
# Copyright 2006 Blender Foundation
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Early Initialization
|
||||
@@ -164,7 +164,7 @@ get_blender_version()
|
||||
# Declare Options
|
||||
|
||||
# Blender internal features
|
||||
option(WITH_BLENDER "Build blender (disable to build only the blender player)" ON)
|
||||
option(WITH_BLENDER "Build blender (disable to build only Cycles stand-alone)." ON)
|
||||
mark_as_advanced(WITH_BLENDER)
|
||||
|
||||
if(WIN32)
|
||||
@@ -331,7 +331,6 @@ option(WITH_MOD_REMESH "Enable Remesh Modifier" ON)
|
||||
option(WITH_MOD_OCEANSIM "Enable Ocean Modifier" ON)
|
||||
|
||||
# Image format support
|
||||
option(WITH_OPENIMAGEIO "Enable OpenImageIO Support (http://www.openimageio.org)" ON)
|
||||
option(WITH_IMAGE_OPENEXR "Enable OpenEXR Support (http://www.openexr.com)" ON)
|
||||
option(WITH_IMAGE_OPENJPEG "Enable OpenJpeg Support (http://www.openjpeg.org)" ON)
|
||||
option(WITH_IMAGE_TIFF "Enable LibTIFF Support" ON)
|
||||
@@ -358,6 +357,7 @@ option(WITH_MATERIALX "Enable MaterialX Support" OFF)
|
||||
# Disable opencollada when we don't have precompiled libs
|
||||
option(WITH_OPENCOLLADA "Enable OpenCollada Support (http://www.opencollada.org)" ON)
|
||||
option(WITH_IO_WAVEFRONT_OBJ "Enable Wavefront-OBJ 3D file format support (*.obj)" ON)
|
||||
option(WITH_IO_PLY "Enable PLY 3D file format support (*.ply)" ON)
|
||||
option(WITH_IO_STL "Enable STL 3D file format support (*.stl)" ON)
|
||||
option(WITH_IO_GPENCIL "Enable grease-pencil file format IO (*.svg, *.pdf)" ON)
|
||||
|
||||
@@ -617,10 +617,12 @@ endif()
|
||||
|
||||
option(WITH_OPENGL "When off limits visibility of the opengl headers to just bf_gpu and gawain (temporary option for development purposes)" ON)
|
||||
option(WITH_GPU_BUILDTIME_SHADER_BUILDER "Shader builder is a developer option enabling linting on GLSL during compilation" OFF)
|
||||
option(WITH_RENDERDOC "Use Renderdoc API to capture frames" OFF)
|
||||
|
||||
mark_as_advanced(
|
||||
WITH_OPENGL
|
||||
WITH_GPU_BUILDTIME_SHADER_BUILDER
|
||||
WITH_RENDERDOC
|
||||
)
|
||||
|
||||
# Vulkan
|
||||
@@ -884,16 +886,17 @@ set_and_warn_dependency(WITH_TBB WITH_MOD_FLUID OFF)
|
||||
# NanoVDB requires OpenVDB to convert the data structure
|
||||
set_and_warn_dependency(WITH_OPENVDB WITH_NANOVDB OFF)
|
||||
|
||||
# OpenVDB and OpenColorIO uses 'half' type from OpenEXR
|
||||
# OpenVDB, Alembic and Vulkan, OSL uses 'half' or 'imath' from OpenEXR
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_OPENVDB OFF)
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_ALEMBIC OFF)
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_VULKAN_BACKEND OFF)
|
||||
set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_CYCLES_OSL OFF)
|
||||
|
||||
# Haru needs `TIFFFaxBlackCodes` & `TIFFFaxWhiteCodes` symbols from TIFF.
|
||||
set_and_warn_dependency(WITH_IMAGE_TIFF WITH_HARU OFF)
|
||||
|
||||
# auto enable openimageio for cycles
|
||||
if(WITH_CYCLES)
|
||||
set(WITH_OPENIMAGEIO ON)
|
||||
|
||||
# auto enable llvm for cycles_osl
|
||||
if(WITH_CYCLES_OSL)
|
||||
set(WITH_LLVM ON CACHE BOOL "" FORCE)
|
||||
@@ -954,21 +957,6 @@ endif()
|
||||
# -----------------------------------------------------------------------------
|
||||
# Check if Sub-modules are Cloned
|
||||
|
||||
if(WITH_INTERNATIONAL)
|
||||
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/datafiles/locale")
|
||||
list(LENGTH RESULT DIR_LEN)
|
||||
if(DIR_LEN EQUAL 0)
|
||||
message(
|
||||
WARNING
|
||||
"Translation path '${CMAKE_SOURCE_DIR}/release/datafiles/locale' is missing, "
|
||||
"This is a 'git submodule', which are known not to work with bridges to other version "
|
||||
"control systems."
|
||||
)
|
||||
set(TRANSLATIONS_FOUND OFF)
|
||||
set_and_warn_library_found("Translations" TRANSLATIONS_FOUND WITH_INTERNATIONAL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_PYTHON)
|
||||
# While we have this as an '#error' in 'bpy_capi_utils.h',
|
||||
# upgrading Python tends to cause confusion for users who build.
|
||||
@@ -984,14 +972,14 @@ if(WITH_PYTHON)
|
||||
)
|
||||
endif()
|
||||
|
||||
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/scripts/addons")
|
||||
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/scripts/addons")
|
||||
list(LENGTH RESULT DIR_LEN)
|
||||
if(DIR_LEN EQUAL 0)
|
||||
message(
|
||||
WARNING
|
||||
"Addons path '${CMAKE_SOURCE_DIR}/release/scripts/addons' is missing, "
|
||||
"This is a 'git submodule', which are known not to work with bridges to other version "
|
||||
"control systems: * CONTINUING WITHOUT ADDONS *"
|
||||
"Addons path '${CMAKE_SOURCE_DIR}/scripts/addons' is missing. "
|
||||
"This is an external repository which needs to be checked out. Use `make update` to do so. "
|
||||
"* CONTINUING WITHOUT ADDONS *"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
@@ -1100,13 +1088,6 @@ if(NOT WITH_FFTW3 AND WITH_MOD_OCEANSIM)
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES)
|
||||
if(NOT WITH_OPENIMAGEIO)
|
||||
message(
|
||||
FATAL_ERROR
|
||||
"Cycles requires WITH_OPENIMAGEIO, the library may not have been found. "
|
||||
"Configure OIIO or disable WITH_CYCLES"
|
||||
)
|
||||
endif()
|
||||
if(WITH_CYCLES_OSL)
|
||||
if(NOT WITH_LLVM)
|
||||
message(
|
||||
@@ -1579,6 +1560,9 @@ elseif(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
# add_check_c_compiler_flag(C_WARNINGS C_WARN_UNUSED_MACROS -Wunused-macros)
|
||||
# add_check_cxx_compiler_flag(CXX_WARNINGS CXX_WARN_UNUSED_MACROS -Wunused-macros)
|
||||
|
||||
add_check_c_compiler_flag(C_WARNINGS C_WARN_ERROR_UNGUARDED_AVAILABILITY_NEW -Werror=unguarded-availability-new)
|
||||
add_check_c_compiler_flag(CXX_WARNINGS CXX_WARN_ERROR_UNGUARDED_AVAILABILITY_NEW -Werror=unguarded-availability-new)
|
||||
|
||||
# ---------------------
|
||||
# Suppress Strict Flags
|
||||
|
||||
@@ -1631,6 +1615,7 @@ elseif(CMAKE_C_COMPILER_ID MATCHES "MSVC")
|
||||
# warning level:
|
||||
"/W3"
|
||||
"/w34062" # switch statement contains 'default' but no 'case' labels
|
||||
"/w34100" # 'identifier' : unreferenced formal parameter
|
||||
"/w34115" # 'type' : named type definition in parentheses
|
||||
"/w34189" # local variable is initialized but not referenced
|
||||
# see https://docs.microsoft.com/en-us/cpp/error-messages/compiler-warnings/c5038?view=vs-2017
|
||||
@@ -1957,7 +1942,6 @@ if(FIRST_RUN)
|
||||
info_cfg_option(WITH_IMAGE_OPENEXR)
|
||||
info_cfg_option(WITH_IMAGE_OPENJPEG)
|
||||
info_cfg_option(WITH_IMAGE_TIFF)
|
||||
info_cfg_option(WITH_OPENIMAGEIO)
|
||||
|
||||
info_cfg_text("Audio:")
|
||||
info_cfg_option(WITH_CODEC_AVI)
|
||||
|
28
GNUmakefile
28
GNUmakefile
@@ -69,7 +69,7 @@ Static Source Code Checking
|
||||
* check_cmake: Runs our own cmake file checker which detects errors in the cmake file list definitions.
|
||||
* check_pep8: Checks all Python script are pep8 which are tagged to use the stricter formatting.
|
||||
* check_mypy: Checks all Python scripts using mypy,
|
||||
see: source/tools/check_source/check_mypy_config.py scripts which are included.
|
||||
see: tools/check_source/check_mypy_config.py scripts which are included.
|
||||
|
||||
Documentation Checking
|
||||
|
||||
@@ -85,7 +85,7 @@ Spell Checkers
|
||||
* check_spelling_osl: Check for spelling errors (OSL only).
|
||||
* check_spelling_py: Check for spelling errors (Python only).
|
||||
|
||||
Note: an additional word-list is maintained at: 'source/tools/check_source/check_spelling_c_config.py'
|
||||
Note: an additional word-list is maintained at: 'tools/check_source/check_spelling_c_config.py'
|
||||
|
||||
Note: that spell checkers can take a 'CHECK_SPELLING_CACHE' filepath argument,
|
||||
so re-running does not need to re-check unchanged files.
|
||||
@@ -490,22 +490,22 @@ check_smatch: .FORCE
|
||||
$(PYTHON) "$(BLENDER_DIR)/build_files/cmake/cmake_static_check_smatch.py"
|
||||
|
||||
check_mypy: .FORCE
|
||||
@$(PYTHON) "$(BLENDER_DIR)/source/tools/check_source/check_mypy.py"
|
||||
@$(PYTHON) "$(BLENDER_DIR)/tools/check_source/check_mypy.py"
|
||||
|
||||
check_wiki_file_structure: .FORCE
|
||||
@PYTHONIOENCODING=utf_8 $(PYTHON) \
|
||||
"$(BLENDER_DIR)/source/tools/check_wiki/check_wiki_file_structure.py"
|
||||
"$(BLENDER_DIR)/tools/check_wiki/check_wiki_file_structure.py"
|
||||
|
||||
check_spelling_py: .FORCE
|
||||
@cd "$(BUILD_DIR)" ; \
|
||||
PYTHONIOENCODING=utf_8 $(PYTHON) \
|
||||
"$(BLENDER_DIR)/source/tools/check_source/check_spelling.py" \
|
||||
"$(BLENDER_DIR)/release/scripts"
|
||||
"$(BLENDER_DIR)/tools/check_source/check_spelling.py" \
|
||||
"$(BLENDER_DIR)/scripts"
|
||||
|
||||
check_spelling_c: .FORCE
|
||||
@cd "$(BUILD_DIR)" ; \
|
||||
PYTHONIOENCODING=utf_8 $(PYTHON) \
|
||||
"$(BLENDER_DIR)/source/tools/check_source/check_spelling.py" \
|
||||
"$(BLENDER_DIR)/tools/check_source/check_spelling.py" \
|
||||
--cache-file=$(CHECK_SPELLING_CACHE) \
|
||||
"$(BLENDER_DIR)/source" \
|
||||
"$(BLENDER_DIR)/intern/cycles" \
|
||||
@@ -515,21 +515,21 @@ check_spelling_c: .FORCE
|
||||
check_spelling_osl: .FORCE
|
||||
@cd "$(BUILD_DIR)" ; \
|
||||
PYTHONIOENCODING=utf_8 $(PYTHON) \
|
||||
"$(BLENDER_DIR)/source/tools/check_source/check_spelling.py" \
|
||||
"$(BLENDER_DIR)/tools/check_source/check_spelling.py" \
|
||||
--cache-file=$(CHECK_SPELLING_CACHE) \
|
||||
"$(BLENDER_DIR)/intern/cycles/kernel/shaders"
|
||||
|
||||
check_descriptions: .FORCE
|
||||
@$(BLENDER_BIN) --background -noaudio --factory-startup --python \
|
||||
"$(BLENDER_DIR)/source/tools/check_source/check_descriptions.py"
|
||||
"$(BLENDER_DIR)/tools/check_source/check_descriptions.py"
|
||||
|
||||
check_deprecated: .FORCE
|
||||
@PYTHONIOENCODING=utf_8 $(PYTHON) \
|
||||
source/tools/check_source/check_deprecated.py
|
||||
tools/check_source/check_deprecated.py
|
||||
|
||||
check_licenses: .FORCE
|
||||
@PYTHONIOENCODING=utf_8 $(PYTHON) \
|
||||
"$(BLENDER_DIR)/source/tools/check_source/check_licenses.py" \
|
||||
"$(BLENDER_DIR)/tools/check_source/check_licenses.py" \
|
||||
"--show-headers=$(SHOW_HEADERS)"
|
||||
|
||||
check_pep8: .FORCE
|
||||
@@ -538,7 +538,7 @@ check_pep8: .FORCE
|
||||
|
||||
check_cmake: .FORCE
|
||||
@PYTHONIOENCODING=utf_8 $(PYTHON) \
|
||||
source/tools/check_source/check_cmake_consistency.py
|
||||
tools/check_source/check_cmake_consistency.py
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
@@ -576,8 +576,8 @@ update_code: .FORCE
|
||||
@$(PYTHON) ./build_files/utils/make_update.py --no-libraries
|
||||
|
||||
format: .FORCE
|
||||
@PATH="${LIBDIR}/llvm/bin/:$(PATH)" $(PYTHON) source/tools/utils_maintenance/clang_format_paths.py $(PATHS)
|
||||
@$(PYTHON) source/tools/utils_maintenance/autopep8_format_paths.py --autopep8-command="$(AUTOPEP8)" $(PATHS)
|
||||
@PATH="${LIBDIR}/llvm/bin/:$(PATH)" $(PYTHON) tools/utils_maintenance/clang_format_paths.py $(PATHS)
|
||||
@$(PYTHON) tools/utils_maintenance/autopep8_format_paths.py --autopep8-command="$(AUTOPEP8)" $(PATHS)
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
@@ -78,12 +78,7 @@ include(cmake/tbb.cmake)
|
||||
include(cmake/python.cmake)
|
||||
include(cmake/llvm.cmake)
|
||||
include(cmake/osl.cmake)
|
||||
option(USE_PIP_NUMPY "Install NumPy using pip wheel instead of building from source" OFF)
|
||||
if(APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64"))
|
||||
set(USE_PIP_NUMPY ON)
|
||||
else()
|
||||
include(cmake/numpy.cmake)
|
||||
endif()
|
||||
include(cmake/numpy.cmake)
|
||||
include(cmake/python_site_packages.cmake)
|
||||
include(cmake/package_python.cmake)
|
||||
include(cmake/openimageio.cmake)
|
||||
|
@@ -6,15 +6,24 @@
|
||||
set(EMBREE_EXTRA_ARGS
|
||||
-DEMBREE_ISPC_SUPPORT=OFF
|
||||
-DEMBREE_TUTORIALS=OFF
|
||||
-DEMBREE_STATIC_LIB=ON
|
||||
-DEMBREE_STATIC_LIB=OFF
|
||||
-DEMBREE_RAY_MASK=ON
|
||||
-DEMBREE_FILTER_FUNCTION=ON
|
||||
-DEMBREE_BACKFACE_CULLING=OFF
|
||||
-DEMBREE_BACKFACE_CULLING_CURVES=ON
|
||||
-DEMBREE_BACKFACE_CULLING_SPHERES=ON
|
||||
-DEMBREE_TASKING_SYSTEM=TBB
|
||||
-DEMBREE_TBB_ROOT=${LIBDIR}/tbb
|
||||
-DTBB_ROOT=${LIBDIR}/tbb
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
set(EMBREE_EXTRA_ARGS
|
||||
${EMBREE_EXTRA_ARGS}
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
)
|
||||
endif()
|
||||
|
||||
if(NOT BLENDER_PLATFORM_ARM)
|
||||
set(EMBREE_EXTRA_ARGS
|
||||
${EMBREE_EXTRA_ARGS}
|
||||
@@ -45,25 +54,19 @@ add_dependencies(
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
||||
if(BUILD_MODE STREQUAL Release)
|
||||
ExternalProject_Add_Step(external_embree after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/embree ${HARVEST_TARGET}/embree
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/embree/include ${HARVEST_TARGET}/embree/include
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/embree/lib ${HARVEST_TARGET}/embree/lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/embree/share ${HARVEST_TARGET}/embree/share
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/bin/embree4.dll ${HARVEST_TARGET}/embree/bin/embree4.dll
|
||||
DEPENDEES install
|
||||
)
|
||||
else()
|
||||
ExternalProject_Add_Step(external_embree after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/embree3.lib ${HARVEST_TARGET}/embree/lib/embree3_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/embree_avx.lib ${HARVEST_TARGET}/embree/lib/embree_avx_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/embree_avx2.lib ${HARVEST_TARGET}/embree/lib/embree_avx2_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/embree_sse42.lib ${HARVEST_TARGET}/embree/lib/embree_sse42_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/lexers.lib ${HARVEST_TARGET}/embree/lib/lexers_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/math.lib ${HARVEST_TARGET}/embree/lib/math_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/simd.lib ${HARVEST_TARGET}/embree/lib/simd_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/sys.lib ${HARVEST_TARGET}/embree/lib/sys_d.lib
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/tasking.lib ${HARVEST_TARGET}/embree/lib/tasking_d.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add_Step(external_embree after_install
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/bin/embree4_d.dll ${HARVEST_TARGET}/embree/bin/embree4_d.dll
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/embree4_d.lib ${HARVEST_TARGET}/embree/lib/embree4_d.lib
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
@@ -44,13 +44,21 @@ set(OPENVDB_EXTRA_ARGS
|
||||
# -DLLVM_DIR=${LIBDIR}/llvm/lib/cmake/llvm
|
||||
)
|
||||
|
||||
set(OPENVDB_PATCH ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/openvdb/src/openvdb < ${PATCH_DIR}/openvdb.diff)
|
||||
if(APPLE)
|
||||
set(OPENVDB_PATCH
|
||||
${OPENVDB_PATCH} &&
|
||||
${PATCH_CMD} -p 0 -d ${BUILD_DIR}/openvdb/src/openvdb < ${PATCH_DIR}/openvdb_metal.diff
|
||||
)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(openvdb
|
||||
URL file://${PACKAGE_DIR}/${OPENVDB_FILE}
|
||||
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||
URL_HASH ${OPENVDB_HASH_TYPE}=${OPENVDB_HASH}
|
||||
CMAKE_GENERATOR ${PLATFORM_ALT_GENERATOR}
|
||||
PREFIX ${BUILD_DIR}/openvdb
|
||||
PATCH_COMMAND ${PATCH_CMD} -p 1 -d ${BUILD_DIR}/openvdb/src/openvdb < ${PATCH_DIR}/openvdb.diff
|
||||
PATCH_COMMAND ${OPENVDB_PATCH}
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/openvdb ${DEFAULT_CMAKE_FLAGS} ${OPENVDB_EXTRA_ARGS}
|
||||
INSTALL_DIR ${LIBDIR}/openvdb
|
||||
)
|
||||
|
@@ -38,15 +38,6 @@ ExternalProject_Add(external_python_site_packages
|
||||
--no-binary :all:
|
||||
)
|
||||
|
||||
if(USE_PIP_NUMPY)
|
||||
# Use only wheel (and not build from source) to stop NumPy from linking against buggy
|
||||
# Accelerate framework backend on macOS. Official wheels are built with OpenBLAS.
|
||||
ExternalProject_Add_Step(external_python_site_packages after_install
|
||||
COMMAND ${PYTHON_BINARY} -m pip install --no-cache-dir numpy==${NUMPY_VERSION} --only-binary :all:
|
||||
DEPENDEES install
|
||||
)
|
||||
endif()
|
||||
|
||||
add_dependencies(
|
||||
external_python_site_packages
|
||||
external_python
|
||||
|
@@ -165,9 +165,9 @@ set(OPENMP_URI https://github.com/llvm/llvm-project/releases/download/llvmorg-${
|
||||
set(OPENMP_HASH_TYPE MD5)
|
||||
set(OPENMP_FILE openmp-${OPENMP_VERSION}.src.tar.xz)
|
||||
|
||||
set(OPENIMAGEIO_VERSION v2.4.6.0)
|
||||
set(OPENIMAGEIO_VERSION v2.4.9.0)
|
||||
set(OPENIMAGEIO_URI https://github.com/OpenImageIO/oiio/archive/refs/tags/${OPENIMAGEIO_VERSION}.tar.gz)
|
||||
set(OPENIMAGEIO_HASH c7acc1b9a8fda04ef48f7de1feda4dae)
|
||||
set(OPENIMAGEIO_HASH 7da92a7d6029921a8599a977ff1efa2a)
|
||||
set(OPENIMAGEIO_HASH_TYPE MD5)
|
||||
set(OPENIMAGEIO_FILE OpenImageIO-${OPENIMAGEIO_VERSION}.tar.gz)
|
||||
|
||||
@@ -478,9 +478,9 @@ set(SQLITE_HASH_TYPE SHA1)
|
||||
set(SQLITE_FILE sqlite-autoconf-${SQLLITE_LONG_VERSION}.tar.gz)
|
||||
set(SQLITE_CPE "cpe:2.3:a:sqlite:sqlite:${SQLITE_VERSION}:*:*:*:*:*:*:*")
|
||||
|
||||
set(EMBREE_VERSION 3.13.4)
|
||||
set(EMBREE_VERSION 4.0.1)
|
||||
set(EMBREE_URI https://github.com/embree/embree/archive/v${EMBREE_VERSION}.zip)
|
||||
set(EMBREE_HASH 52d0be294d6c88ba7a6c9e046796e7be)
|
||||
set(EMBREE_HASH dd26617719a587e126b341d1b32f7fd0)
|
||||
set(EMBREE_HASH_TYPE MD5)
|
||||
set(EMBREE_FILE embree-v${EMBREE_VERSION}.zip)
|
||||
|
||||
@@ -528,16 +528,16 @@ set(XR_OPENXR_SDK_HASH a2623ebab3d0b340bc16311b14f02075)
|
||||
set(XR_OPENXR_SDK_HASH_TYPE MD5)
|
||||
set(XR_OPENXR_SDK_FILE OpenXR-SDK-${XR_OPENXR_SDK_VERSION}.tar.gz)
|
||||
|
||||
set(WL_PROTOCOLS_VERSION 1.21)
|
||||
set(WL_PROTOCOLS_VERSION 1.31)
|
||||
set(WL_PROTOCOLS_FILE wayland-protocols-${WL_PROTOCOLS_VERSION}.tar.gz)
|
||||
set(WL_PROTOCOLS_URI https://gitlab.freedesktop.org/wayland/wayland-protocols/-/archive/${WL_PROTOCOLS_VERSION}/${WL_PROTOCOLS_FILE})
|
||||
set(WL_PROTOCOLS_HASH af5ca07e13517cdbab33504492cef54a)
|
||||
set(WL_PROTOCOLS_HASH a28ff59a56e2ebb746048b6ef8d931d6)
|
||||
set(WL_PROTOCOLS_HASH_TYPE MD5)
|
||||
|
||||
set(WAYLAND_VERSION 1.21.0)
|
||||
set(WAYLAND_VERSION 1.22.0)
|
||||
set(WAYLAND_FILE wayland-${WAYLAND_VERSION}.tar.xz)
|
||||
set(WAYLAND_URI https://gitlab.freedesktop.org/wayland/wayland/-/releases/1.21.0/downloads/wayland-${WAYLAND_VERSION}.tar.xz)
|
||||
set(WAYLAND_HASH f2653a2293bcd882d756c6a83d278903)
|
||||
set(WAYLAND_URI https://gitlab.freedesktop.org/wayland/wayland/-/releases/${WAYLAND_VERSION}/downloads/wayland-${WAYLAND_VERSION}.tar.xz)
|
||||
set(WAYLAND_HASH 7410ab549e3928fce9381455b17b0803)
|
||||
set(WAYLAND_HASH_TYPE MD5)
|
||||
|
||||
set(WAYLAND_LIBDECOR_VERSION 0.1.0)
|
||||
|
File diff suppressed because it is too large
Load Diff
1735
build_files/build_environment/install_linux_packages.py
Executable file
1735
build_files/build_environment/install_linux_packages.py
Executable file
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# This script is part of the official build environment, see WIKI page for details.
|
||||
# https://wiki.blender.org/wiki/Building_Blender/Other/CentOS7ReleaseEnvironment
|
||||
# This script is part of the official build environment, see wiki page for details.
|
||||
# https://wiki.blender.org/wiki/Building_Blender/Other/Rocky8ReleaseEnvironment
|
||||
|
||||
set -e
|
||||
|
||||
@@ -59,7 +59,7 @@ PACKAGES_FOR_LIBS=(
|
||||
automake
|
||||
libtool
|
||||
|
||||
# TODO: why is this needed?
|
||||
# Used to set rpath on shared libraries
|
||||
patchelf
|
||||
|
||||
# Builds generated by meson use Ninja for the actual build.
|
||||
|
@@ -1,19 +1,8 @@
|
||||
diff -Naur org/kernels/rtcore_config.h.in embree-3.13.4/kernels/rtcore_config.h.in
|
||||
--- org/kernels/rtcore_config.h.in 2022-06-14 22:13:52 -0600
|
||||
+++ embree-3.13.4/kernels/rtcore_config.h.in 2022-06-24 15:20:12 -0600
|
||||
@@ -14,6 +14,7 @@
|
||||
#cmakedefine01 EMBREE_MIN_WIDTH
|
||||
#define RTC_MIN_WIDTH EMBREE_MIN_WIDTH
|
||||
|
||||
+#cmakedefine EMBREE_STATIC_LIB
|
||||
#cmakedefine EMBREE_API_NAMESPACE
|
||||
|
||||
#if defined(EMBREE_API_NAMESPACE)
|
||||
diff --git a/kernels/CMakeLists.txt b/kernels/CMakeLists.txt
|
||||
index 7c2f43d..106b1d5 100644
|
||||
--- a/kernels/CMakeLists.txt
|
||||
+++ b/kernels/CMakeLists.txt
|
||||
@@ -201,6 +201,12 @@ embree_files(EMBREE_LIBRARY_FILES_AVX512 ${AVX512})
|
||||
@@ -208,6 +208,12 @@ embree_files(EMBREE_LIBRARY_FILES_AVX512 ${AVX512})
|
||||
#message("AVX2: ${EMBREE_LIBRARY_FILES_AVX2}")
|
||||
#message("AVX512: ${EMBREE_LIBRARY_FILES_AVX512}")
|
||||
|
||||
@@ -26,7 +15,7 @@ index 7c2f43d..106b1d5 100644
|
||||
# replaces all .cpp files with a dummy file that includes that .cpp file
|
||||
# this is to work around an ICC name mangling issue related to lambda functions under windows
|
||||
MACRO (CreateISADummyFiles list isa)
|
||||
@@ -277,7 +283,7 @@ IF (EMBREE_ISA_AVX AND EMBREE_LIBRARY_FILES_AVX)
|
||||
@@ -311,7 +317,7 @@ IF (EMBREE_ISA_AVX AND EMBREE_LIBRARY_FILES_AVX)
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
|
||||
@@ -35,3 +24,128 @@ index 7c2f43d..106b1d5 100644
|
||||
DISABLE_STACK_PROTECTOR_FOR_INTERSECTORS(${EMBREE_LIBRARY_FILES_AVX2})
|
||||
ADD_LIBRARY(embree_avx2 STATIC ${EMBREE_LIBRARY_FILES_AVX2})
|
||||
TARGET_LINK_LIBRARIES(embree_avx2 PRIVATE tasking)
|
||||
diff --git a/include/embree4/rtcore_device.h b/include/embree4/rtcore_device.h
|
||||
index 45bf95583..62ee7787d 100644
|
||||
--- a/include/embree4/rtcore_device.h
|
||||
+++ b/include/embree4/rtcore_device.h
|
||||
@@ -55,6 +55,7 @@ enum RTCDeviceProperty
|
||||
RTC_DEVICE_PROPERTY_FILTER_FUNCTION_SUPPORTED = 66,
|
||||
RTC_DEVICE_PROPERTY_IGNORE_INVALID_RAYS_ENABLED = 67,
|
||||
RTC_DEVICE_PROPERTY_COMPACT_POLYS_ENABLED = 68,
|
||||
+ RTC_DEVICE_PROPERTY_BACKFACE_CULLING_SPHERES_ENABLED = 69,
|
||||
|
||||
RTC_DEVICE_PROPERTY_TRIANGLE_GEOMETRY_SUPPORTED = 96,
|
||||
RTC_DEVICE_PROPERTY_QUAD_GEOMETRY_SUPPORTED = 97,
|
||||
diff --git a/kernels/common/device.cpp b/kernels/common/device.cpp
|
||||
index 3ffac7e37..215ccc961 100644
|
||||
--- a/kernels/common/device.cpp
|
||||
+++ b/kernels/common/device.cpp
|
||||
@@ -170,6 +170,9 @@ namespace embree
|
||||
#if defined (EMBREE_BACKFACE_CULLING_CURVES)
|
||||
v += "backfacecullingcurves ";
|
||||
#endif
|
||||
+#if defined (EMBREE_BACKFACE_CULLING_SPHERES)
|
||||
+ v += "backfacecullingspheres ";
|
||||
+#endif
|
||||
#if defined(EMBREE_FILTER_FUNCTION)
|
||||
v += "intersection_filter ";
|
||||
#endif
|
||||
@@ -477,6 +480,12 @@ namespace embree
|
||||
case RTC_DEVICE_PROPERTY_BACKFACE_CULLING_CURVES_ENABLED: return 0;
|
||||
#endif
|
||||
|
||||
+#if defined(EMBREE_BACKFACE_CULLING_SPHERES)
|
||||
+ case RTC_DEVICE_PROPERTY_BACKFACE_CULLING_SPHERES_ENABLED: return 1;
|
||||
+#else
|
||||
+ case RTC_DEVICE_PROPERTY_BACKFACE_CULLING_SPHERES_ENABLED: return 0;
|
||||
+#endif
|
||||
+
|
||||
#if defined(EMBREE_COMPACT_POLYS)
|
||||
case RTC_DEVICE_PROPERTY_COMPACT_POLYS_ENABLED: return 1;
|
||||
#else
|
||||
diff --git a/kernels/config.h.in b/kernels/config.h.in
|
||||
index f02c90360..ba9acde56 100644
|
||||
--- a/kernels/config.h.in
|
||||
+++ b/kernels/config.h.in
|
||||
@@ -5,6 +5,7 @@
|
||||
#cmakedefine EMBREE_STAT_COUNTERS
|
||||
#cmakedefine EMBREE_BACKFACE_CULLING
|
||||
#cmakedefine EMBREE_BACKFACE_CULLING_CURVES
|
||||
+#cmakedefine EMBREE_BACKFACE_CULLING_SPHERES
|
||||
#cmakedefine EMBREE_FILTER_FUNCTION
|
||||
#cmakedefine EMBREE_IGNORE_INVALID_RAYS
|
||||
#cmakedefine EMBREE_GEOMETRY_TRIANGLE
|
||||
diff --git a/kernels/geometry/sphere_intersector.h b/kernels/geometry/sphere_intersector.h
|
||||
index 074f910a2..30f490818 100644
|
||||
--- a/kernels/geometry/sphere_intersector.h
|
||||
+++ b/kernels/geometry/sphere_intersector.h
|
||||
@@ -106,8 +106,13 @@ namespace embree
|
||||
const vbool<M> valid_front = valid & (ray.tnear() <= t_front) & (t_front <= ray.tfar);
|
||||
const vbool<M> valid_back = valid & (ray.tnear() <= t_back ) & (t_back <= ray.tfar);
|
||||
|
||||
+#if defined (EMBREE_BACKFACE_CULLING_SPHERES)
|
||||
+ /* check if there is a first hit */
|
||||
+ const vbool<M> valid_first = valid_front;
|
||||
+#else
|
||||
/* check if there is a first hit */
|
||||
const vbool<M> valid_first = valid_front | valid_back;
|
||||
+#endif
|
||||
if (unlikely(none(valid_first)))
|
||||
return false;
|
||||
|
||||
@@ -120,7 +125,8 @@ namespace embree
|
||||
|
||||
/* invoke intersection filter for first hit */
|
||||
const bool is_hit_first = epilog(valid_first, hit);
|
||||
-
|
||||
+
|
||||
+#if !defined (EMBREE_BACKFACE_CULLING_SPHERES)
|
||||
/* check for possible second hits before potentially accepted hit */
|
||||
const vfloat<M> t_second = t_back;
|
||||
const vbool<M> valid_second = valid_front & valid_back & (t_second <= ray.tfar);
|
||||
@@ -131,7 +137,9 @@ namespace embree
|
||||
const Vec3vf<M> Ng_second = td_back * ray_dir - perp;
|
||||
hit = SphereIntersectorHitM<M> (t_second, Ng_second);
|
||||
const bool is_hit_second = epilog(valid_second, hit);
|
||||
-
|
||||
+#else
|
||||
+ constexpr bool is_hit_second = false;
|
||||
+#endif
|
||||
return is_hit_first | is_hit_second;
|
||||
}
|
||||
|
||||
@@ -186,8 +194,13 @@ namespace embree
|
||||
const vbool<M> valid_front = valid & (ray.tnear()[k] <= t_front) & (t_front <= ray.tfar[k]);
|
||||
const vbool<M> valid_back = valid & (ray.tnear()[k] <= t_back ) & (t_back <= ray.tfar[k]);
|
||||
|
||||
+#if defined (EMBREE_BACKFACE_CULLING_SPHERES)
|
||||
+ /* check if there is a first hit */
|
||||
+ const vbool<M> valid_first = valid_front;
|
||||
+#else
|
||||
/* check if there is a first hit */
|
||||
const vbool<M> valid_first = valid_front | valid_back;
|
||||
+#endif
|
||||
if (unlikely(none(valid_first)))
|
||||
return false;
|
||||
|
||||
@@ -200,7 +213,8 @@ namespace embree
|
||||
|
||||
/* invoke intersection filter for first hit */
|
||||
const bool is_hit_first = epilog(valid_first, hit);
|
||||
-
|
||||
+
|
||||
+#if !defined (EMBREE_BACKFACE_CULLING_SPHERES)
|
||||
/* check for possible second hits before potentially accepted hit */
|
||||
const vfloat<M> t_second = t_back;
|
||||
const vbool<M> valid_second = valid_front & valid_back & (t_second <= ray.tfar[k]);
|
||||
@@ -211,7 +225,9 @@ namespace embree
|
||||
const Vec3vf<M> Ng_second = td_back * ray_dir - perp;
|
||||
hit = SphereIntersectorHitM<M> (t_second, Ng_second);
|
||||
const bool is_hit_second = epilog(valid_second, hit);
|
||||
-
|
||||
+#else
|
||||
+ constexpr bool is_hit_second = false;
|
||||
+#endif
|
||||
return is_hit_first | is_hit_second;
|
||||
}
|
||||
};
|
||||
|
8007
build_files/build_environment/patches/openvdb_metal.diff
Normal file
8007
build_files/build_environment/patches/openvdb_metal.diff
Normal file
File diff suppressed because it is too large
Load Diff
@@ -80,6 +80,7 @@ set(_CLANG_FIND_COMPONENTS
|
||||
clangAST
|
||||
clangLex
|
||||
clangBasic
|
||||
clangSupport
|
||||
)
|
||||
|
||||
set(_CLANG_LIBRARIES)
|
||||
@@ -94,7 +95,9 @@ foreach(COMPONENT ${_CLANG_FIND_COMPONENTS})
|
||||
PATH_SUFFIXES
|
||||
lib64 lib
|
||||
)
|
||||
list(APPEND _CLANG_LIBRARIES "${CLANG_${UPPERCOMPONENT}_LIBRARY}")
|
||||
if(CLANG_${UPPERCOMPONENT}_LIBRARY)
|
||||
list(APPEND _CLANG_LIBRARIES "${CLANG_${UPPERCOMPONENT}_LIBRARY}")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
|
||||
|
@@ -23,6 +23,7 @@ SET(_embree_SEARCH_DIRS
|
||||
|
||||
FIND_PATH(EMBREE_INCLUDE_DIR
|
||||
NAMES
|
||||
embree4/rtcore.h
|
||||
embree3/rtcore.h
|
||||
HINTS
|
||||
${_embree_SEARCH_DIRS}
|
||||
@@ -30,28 +31,67 @@ FIND_PATH(EMBREE_INCLUDE_DIR
|
||||
include
|
||||
)
|
||||
|
||||
IF(NOT (("${CMAKE_SYSTEM_PROCESSOR}" STREQUAL "aarch64") OR (APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))))
|
||||
SET(_embree_SIMD_COMPONENTS
|
||||
embree_sse42
|
||||
embree_avx
|
||||
embree_avx2
|
||||
)
|
||||
IF(EXISTS ${EMBREE_INCLUDE_DIR}/embree4/rtcore_config.h)
|
||||
SET(EMBREE_MAJOR_VERSION 4)
|
||||
ELSE()
|
||||
SET(EMBREE_MAJOR_VERSION 3)
|
||||
ENDIF()
|
||||
|
||||
SET(_embree_FIND_COMPONENTS
|
||||
embree3
|
||||
${_embree_SIMD_COMPONENTS}
|
||||
lexers
|
||||
math
|
||||
simd
|
||||
sys
|
||||
tasking
|
||||
)
|
||||
IF(EMBREE_INCLUDE_DIR)
|
||||
FILE(READ ${EMBREE_INCLUDE_DIR}/embree${EMBREE_MAJOR_VERSION}/rtcore_config.h _embree_config_header)
|
||||
IF(_embree_config_header MATCHES "#define EMBREE_STATIC_LIB")
|
||||
SET(EMBREE_STATIC_LIB TRUE)
|
||||
ELSE()
|
||||
SET(EMBREE_STATIC_LIB FALSE)
|
||||
ENDIF()
|
||||
IF(_embree_config_header MATCHES "#define EMBREE_SYCL_SUPPORT")
|
||||
SET(EMBREE_SYCL_SUPPORT TRUE)
|
||||
ELSE()
|
||||
SET(EMBREE_SYCL_SUPPORT FALSE)
|
||||
ENDIF()
|
||||
UNSET(_embree_config_header)
|
||||
ENDIF()
|
||||
|
||||
IF(EMBREE_STATIC_LIB)
|
||||
IF(NOT (("${CMAKE_SYSTEM_PROCESSOR}" STREQUAL "aarch64") OR (APPLE AND ("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64"))))
|
||||
SET(_embree_SIMD_COMPONENTS
|
||||
embree_sse42
|
||||
embree_avx
|
||||
embree_avx2
|
||||
)
|
||||
ENDIF()
|
||||
|
||||
IF(EMBREE_SYCL_SUPPORT)
|
||||
SET(_embree_GPU_COMPONENTS
|
||||
embree4_sycl
|
||||
embree_rthwif
|
||||
)
|
||||
ENDIF()
|
||||
|
||||
SET(_embree_FIND_COMPONENTS
|
||||
embree${EMBREE_MAJOR_VERSION}
|
||||
${_embree_SIMD_COMPONENTS}
|
||||
${_embree_GPU_COMPONENTS}
|
||||
lexers
|
||||
math
|
||||
simd
|
||||
sys
|
||||
tasking
|
||||
)
|
||||
ELSE()
|
||||
SET(_embree_FIND_COMPONENTS
|
||||
embree${EMBREE_MAJOR_VERSION}
|
||||
)
|
||||
IF(EMBREE_SYCL_SUPPORT)
|
||||
LIST(APPEND _embree_FIND_COMPONENTS
|
||||
embree4_sycl
|
||||
)
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
|
||||
SET(_embree_LIBRARIES)
|
||||
FOREACH(COMPONENT ${_embree_FIND_COMPONENTS})
|
||||
STRING(TOUPPER ${COMPONENT} UPPERCOMPONENT)
|
||||
|
||||
FIND_LIBRARY(EMBREE_${UPPERCOMPONENT}_LIBRARY
|
||||
NAMES
|
||||
${COMPONENT}
|
||||
@@ -60,18 +100,9 @@ FOREACH(COMPONENT ${_embree_FIND_COMPONENTS})
|
||||
PATH_SUFFIXES
|
||||
lib64 lib
|
||||
)
|
||||
IF(NOT EMBREE_${UPPERCOMPONENT}_LIBRARY)
|
||||
IF(EMBREE_EMBREE3_LIBRARY)
|
||||
# If we can't find all the static libraries, try to fall back to the shared library if found.
|
||||
# This allows building with a shared embree library
|
||||
SET(_embree_LIBRARIES ${EMBREE_EMBREE3_LIBRARY})
|
||||
BREAK()
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
LIST(APPEND _embree_LIBRARIES "${EMBREE_${UPPERCOMPONENT}_LIBRARY}")
|
||||
ENDFOREACH()
|
||||
|
||||
|
||||
# handle the QUIETLY and REQUIRED arguments and set EMBREE_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
INCLUDE(FindPackageHandleStandardArgs)
|
||||
@@ -85,6 +116,9 @@ ENDIF()
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
EMBREE_INCLUDE_DIR
|
||||
EMBREE_MAJOR_VERSION
|
||||
EMBREE_SYCL_SUPPORT
|
||||
EMBREE_STATIC_LIB
|
||||
)
|
||||
|
||||
FOREACH(COMPONENT ${_embree_FIND_COMPONENTS})
|
||||
|
@@ -120,9 +120,8 @@ UNSET(_openexr_libs_ver)
|
||||
|
||||
IF(OPENEXR_VERSION VERSION_GREATER_EQUAL "3.0.0")
|
||||
# For OpenEXR 3.x, we also need to find the now separate Imath library.
|
||||
# For simplicity we add it to the OpenEXR includes and libraries, as we
|
||||
# have no direct dependency on Imath and it's simpler to support both
|
||||
# 2.x and 3.x this way.
|
||||
# For simplicity we also add it to the OpenEXR includes and libraries,
|
||||
# as it's simpler to support both 2.x and 3.x this way.
|
||||
|
||||
# Find include directory
|
||||
FIND_PATH(IMATH_INCLUDE_DIR
|
||||
@@ -169,6 +168,12 @@ IF(OPENEXR_VERSION VERSION_GREATER_EQUAL "3.0.0")
|
||||
UNSET(_imath_build_specification)
|
||||
ENDIF()
|
||||
|
||||
IF(OPENEXR_VERSION VERSION_GREATER_EQUAL "3.0.0")
|
||||
SET(IMATH_LIBRARIES ${IMATH_LIBRARY})
|
||||
ELSE()
|
||||
SET(IMATH_LIBRARIES ${OPENEXR_IMATH_LIBRARY})
|
||||
ENDIF()
|
||||
|
||||
# handle the QUIETLY and REQUIRED arguments and set OPENEXR_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
INCLUDE(FindPackageHandleStandardArgs)
|
||||
@@ -196,6 +201,7 @@ MARK_AS_ADVANCED(
|
||||
OPENEXR_VERSION
|
||||
IMATH_INCLUDE_DIR
|
||||
IMATH_LIBRARY
|
||||
IMATH_LIBRARIES
|
||||
)
|
||||
FOREACH(COMPONENT ${_openexr_FIND_COMPONENTS})
|
||||
STRING(TOUPPER ${COMPONENT} UPPERCOMPONENT)
|
||||
|
@@ -108,6 +108,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(SYCL
|
||||
|
||||
IF(SYCL_FOUND)
|
||||
SET(SYCL_INCLUDE_DIR ${SYCL_INCLUDE_DIR} ${SYCL_INCLUDE_DIR}/sycl)
|
||||
SET(SYCL_LIBRARIES ${SYCL_LIBRARY})
|
||||
ELSE()
|
||||
SET(SYCL_SYCL_FOUND FALSE)
|
||||
ENDIF()
|
||||
|
@@ -36,6 +36,7 @@ set(WITH_IMAGE_WEBP OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_INPUT_IME OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_INPUT_NDOF OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_INTERNATIONAL OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IO_PLY OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IO_STL OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IO_WAVEFRONT_OBJ OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_IO_GPENCIL OFF CACHE BOOL "" FORCE)
|
||||
@@ -52,7 +53,6 @@ set(WITH_OPENAL OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENCOLLADA OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENCOLORIO OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENIMAGEDENOISE OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENIMAGEIO OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENMP OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENSUBDIV OFF CACHE BOOL "" FORCE)
|
||||
set(WITH_OPENVDB OFF CACHE BOOL "" FORCE)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2022 Blender Foundation. All rights reserved.
|
||||
# Copyright 2022 Blender Foundation
|
||||
|
||||
# This file is used to test the system for headers & symbols.
|
||||
# Variables should use the `HAVE_` prefix.
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2006 Blender Foundation. All rights reserved.
|
||||
# Copyright 2006 Blender Foundation
|
||||
|
||||
macro(list_insert_after
|
||||
list_id item_check item_add
|
||||
@@ -702,6 +702,7 @@ macro(remove_strict_flags)
|
||||
endif()
|
||||
|
||||
if(MSVC)
|
||||
remove_cc_flag(/w34100) # Restore warn C4100 (unreferenced formal parameter) back to w4
|
||||
remove_cc_flag(/w34189) # Restore warn C4189 (unused variable) back to w4
|
||||
endif()
|
||||
|
||||
@@ -721,7 +722,7 @@ macro(remove_extra_strict_flags)
|
||||
endif()
|
||||
|
||||
if(MSVC)
|
||||
# TODO
|
||||
remove_cc_flag(/w34100) # Restore warn C4100 (unreferenced formal parameter) back to w4
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
@@ -1090,7 +1091,7 @@ function(msgfmt_simple
|
||||
add_custom_command(
|
||||
OUTPUT ${_file_to}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${_file_to_path}
|
||||
COMMAND "$<TARGET_FILE:msgfmt>" ${_file_from} ${_file_to}
|
||||
COMMAND ${CMAKE_COMMAND} -E env ${PLATFORM_ENV_BUILD} "$<TARGET_FILE:msgfmt>" ${_file_from} ${_file_to}
|
||||
DEPENDS msgfmt ${_file_from})
|
||||
|
||||
set_source_files_properties(${_file_to} PROPERTIES GENERATED TRUE)
|
||||
@@ -1299,16 +1300,29 @@ macro(windows_install_shared_manifest)
|
||||
endif()
|
||||
if(WINDOWS_INSTALL_DEBUG)
|
||||
set(WINDOWS_CONFIGURATIONS "${WINDOWS_CONFIGURATIONS};Debug")
|
||||
list(APPEND WINDOWS_SHARED_MANIFEST_DEBUG ${WINDOWS_INSTALL_FILES})
|
||||
endif()
|
||||
if(WINDOWS_INSTALL_RELEASE)
|
||||
list(APPEND WINDOWS_SHARED_MANIFEST_RELEASE ${WINDOWS_INSTALL_FILES})
|
||||
set(WINDOWS_CONFIGURATIONS "${WINDOWS_CONFIGURATIONS};Release;RelWithDebInfo;MinSizeRel")
|
||||
endif()
|
||||
install(FILES ${WINDOWS_INSTALL_FILES}
|
||||
CONFIGURATIONS ${WINDOWS_CONFIGURATIONS}
|
||||
DESTINATION "./blender.shared"
|
||||
)
|
||||
if(NOT WITH_PYTHON_MODULE)
|
||||
# Blender executable with manifest.
|
||||
if(WINDOWS_INSTALL_DEBUG)
|
||||
list(APPEND WINDOWS_SHARED_MANIFEST_DEBUG ${WINDOWS_INSTALL_FILES})
|
||||
endif()
|
||||
if(WINDOWS_INSTALL_RELEASE)
|
||||
list(APPEND WINDOWS_SHARED_MANIFEST_RELEASE ${WINDOWS_INSTALL_FILES})
|
||||
endif()
|
||||
install(FILES ${WINDOWS_INSTALL_FILES}
|
||||
CONFIGURATIONS ${WINDOWS_CONFIGURATIONS}
|
||||
DESTINATION "./blender.shared"
|
||||
)
|
||||
else()
|
||||
# Python module without manifest.
|
||||
install(FILES ${WINDOWS_INSTALL_FILES}
|
||||
CONFIGURATIONS ${WINDOWS_CONFIGURATIONS}
|
||||
DESTINATION "./bpy"
|
||||
)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
macro(windows_generate_manifest)
|
||||
@@ -1325,24 +1339,49 @@ macro(windows_generate_manifest)
|
||||
endmacro()
|
||||
|
||||
macro(windows_generate_shared_manifest)
|
||||
windows_generate_manifest(
|
||||
FILES "${WINDOWS_SHARED_MANIFEST_DEBUG}"
|
||||
OUTPUT "${CMAKE_BINARY_DIR}/Debug/blender.shared.manifest"
|
||||
NAME "blender.shared"
|
||||
)
|
||||
windows_generate_manifest(
|
||||
FILES "${WINDOWS_SHARED_MANIFEST_RELEASE}"
|
||||
OUTPUT "${CMAKE_BINARY_DIR}/Release/blender.shared.manifest"
|
||||
NAME "blender.shared"
|
||||
)
|
||||
install(
|
||||
FILES ${CMAKE_BINARY_DIR}/Release/blender.shared.manifest
|
||||
DESTINATION "./blender.shared"
|
||||
CONFIGURATIONS Release;RelWithDebInfo;MinSizeRel
|
||||
)
|
||||
install(
|
||||
FILES ${CMAKE_BINARY_DIR}/Debug/blender.shared.manifest
|
||||
DESTINATION "./blender.shared"
|
||||
CONFIGURATIONS Debug
|
||||
)
|
||||
if(WINDOWS_SHARED_MANIFEST_DEBUG)
|
||||
windows_generate_manifest(
|
||||
FILES "${WINDOWS_SHARED_MANIFEST_DEBUG}"
|
||||
OUTPUT "${CMAKE_BINARY_DIR}/Debug/blender.shared.manifest"
|
||||
NAME "blender.shared"
|
||||
)
|
||||
install(
|
||||
FILES ${CMAKE_BINARY_DIR}/Debug/blender.shared.manifest
|
||||
DESTINATION "./blender.shared"
|
||||
CONFIGURATIONS Debug
|
||||
)
|
||||
endif()
|
||||
if(WINDOWS_SHARED_MANIFEST_RELEASE)
|
||||
windows_generate_manifest(
|
||||
FILES "${WINDOWS_SHARED_MANIFEST_RELEASE}"
|
||||
OUTPUT "${CMAKE_BINARY_DIR}/Release/blender.shared.manifest"
|
||||
NAME "blender.shared"
|
||||
)
|
||||
install(
|
||||
FILES ${CMAKE_BINARY_DIR}/Release/blender.shared.manifest
|
||||
DESTINATION "./blender.shared"
|
||||
CONFIGURATIONS Release;RelWithDebInfo;MinSizeRel
|
||||
)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
macro(windows_process_platform_bundled_libraries library_deps)
|
||||
if(NOT "${library_deps}" STREQUAL "")
|
||||
set(next_library_mode "ALL")
|
||||
foreach(library ${library_deps})
|
||||
string(TOUPPER "${library}" library_upper)
|
||||
if(("${library_upper}" STREQUAL "RELEASE") OR
|
||||
("${library_upper}" STREQUAL "DEBUG") OR
|
||||
("${library_upper}" STREQUAL "ALL"))
|
||||
set(next_library_mode "${library_upper}")
|
||||
else()
|
||||
windows_install_shared_manifest(
|
||||
FILES ${library}
|
||||
${next_library_mode}
|
||||
)
|
||||
set(next_library_mode "ALL")
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2016 Blender Foundation. All rights reserved.
|
||||
# Copyright 2016 Blender Foundation
|
||||
|
||||
# Libraries configuration for Apple.
|
||||
|
||||
@@ -245,6 +245,7 @@ if(WITH_BOOST)
|
||||
if(WITH_USD AND USD_PYTHON_SUPPORT)
|
||||
list(APPEND _boost_FIND_COMPONENTS python${PYTHON_VERSION_NO_DOTS})
|
||||
endif()
|
||||
set(Boost_NO_WARN_NEW_VERSIONS ON)
|
||||
find_package(Boost COMPONENTS ${_boost_FIND_COMPONENTS})
|
||||
|
||||
# Boost Python is separate to avoid linking Python into tests that don't need it.
|
||||
@@ -270,19 +271,7 @@ if(WITH_PUGIXML)
|
||||
find_package(PugiXML REQUIRED)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENIMAGEIO)
|
||||
find_package(OpenImageIO)
|
||||
list(APPEND OPENIMAGEIO_LIBRARIES
|
||||
${PNG_LIBRARIES}
|
||||
${JPEG_LIBRARIES}
|
||||
${TIFF_LIBRARY}
|
||||
${OPENEXR_LIBRARIES}
|
||||
${OPENJPEG_LIBRARIES}
|
||||
${ZLIB_LIBRARIES}
|
||||
)
|
||||
set(OPENIMAGEIO_DEFINITIONS "-DOIIO_STATIC_BUILD")
|
||||
set(OPENIMAGEIO_IDIFF "${LIBDIR}/openimageio/bin/idiff")
|
||||
endif()
|
||||
find_package(OpenImageIO REQUIRED)
|
||||
add_bundled_libraries(openimageio/lib)
|
||||
|
||||
if(WITH_OPENCOLORIO)
|
||||
@@ -341,6 +330,7 @@ if(WITH_CYCLES AND WITH_CYCLES_EMBREE)
|
||||
endforeach()
|
||||
set(EMBREE_LIBRARIES ${_embree_libraries_force_load})
|
||||
endif()
|
||||
add_bundled_libraries(embree/lib)
|
||||
|
||||
if(WITH_OPENIMAGEDENOISE)
|
||||
find_package(OpenImageDenoise REQUIRED)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2016 Blender Foundation. All rights reserved.
|
||||
# Copyright 2016 Blender Foundation
|
||||
|
||||
# Xcode and system configuration for Apple.
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2022 Blender Foundation. All rights reserved.
|
||||
# Copyright 2022 Blender Foundation
|
||||
|
||||
# Auto update existing CMake caches for new libraries.
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2016 Blender Foundation. All rights reserved.
|
||||
# Copyright 2016 Blender Foundation
|
||||
|
||||
# Libraries configuration for any *nix system including Linux and Unix (excluding APPLE).
|
||||
|
||||
@@ -317,7 +317,7 @@ if(WITH_CYCLES AND WITH_CYCLES_OSL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES AND WITH_CYCLES_DEVICE_ONEAPI)
|
||||
if(WITH_CYCLES AND (WITH_CYCLES_DEVICE_ONEAPI OR (WITH_CYCLES_EMBREE AND EMBREE_SYCL_SUPPORT)))
|
||||
set(CYCLES_LEVEL_ZERO ${LIBDIR}/level-zero CACHE PATH "Path to Level Zero installation")
|
||||
if(EXISTS ${CYCLES_LEVEL_ZERO} AND NOT LEVEL_ZERO_ROOT_DIR)
|
||||
set(LEVEL_ZERO_ROOT_DIR ${CYCLES_LEVEL_ZERO})
|
||||
@@ -394,6 +394,7 @@ if(WITH_BOOST)
|
||||
list(APPEND __boost_packages python${PYTHON_VERSION_NO_DOTS})
|
||||
endif()
|
||||
list(APPEND __boost_packages system)
|
||||
set(Boost_NO_WARN_NEW_VERSIONS ON)
|
||||
find_package(Boost 1.48 COMPONENTS ${__boost_packages})
|
||||
if(NOT Boost_FOUND)
|
||||
# try to find non-multithreaded if -mt not found, this flag
|
||||
@@ -438,32 +439,7 @@ if(WITH_IMAGE_WEBP)
|
||||
set_and_warn_library_found("WebP" WEBP_FOUND WITH_IMAGE_WEBP)
|
||||
endif()
|
||||
|
||||
if(WITH_OPENIMAGEIO)
|
||||
find_package_wrapper(OpenImageIO)
|
||||
set(OPENIMAGEIO_LIBRARIES
|
||||
${OPENIMAGEIO_LIBRARIES}
|
||||
${PNG_LIBRARIES}
|
||||
${JPEG_LIBRARIES}
|
||||
${ZLIB_LIBRARIES}
|
||||
)
|
||||
|
||||
set(OPENIMAGEIO_DEFINITIONS "")
|
||||
|
||||
if(WITH_BOOST)
|
||||
list(APPEND OPENIMAGEIO_LIBRARIES "${BOOST_LIBRARIES}")
|
||||
endif()
|
||||
if(WITH_IMAGE_TIFF)
|
||||
list(APPEND OPENIMAGEIO_LIBRARIES "${TIFF_LIBRARY}")
|
||||
endif()
|
||||
if(WITH_IMAGE_OPENEXR)
|
||||
list(APPEND OPENIMAGEIO_LIBRARIES "${OPENEXR_LIBRARIES}")
|
||||
endif()
|
||||
if(WITH_IMAGE_WEBP)
|
||||
list(APPEND OPENIMAGEIO_LIBRARIES "${WEBP_LIBRARIES}")
|
||||
endif()
|
||||
|
||||
set_and_warn_library_found("OPENIMAGEIO" OPENIMAGEIO_FOUND WITH_OPENIMAGEIO)
|
||||
endif()
|
||||
find_package_wrapper(OpenImageIO REQUIRED)
|
||||
add_bundled_libraries(openimageio/lib)
|
||||
|
||||
if(WITH_OPENCOLORIO)
|
||||
@@ -477,6 +453,7 @@ add_bundled_libraries(opencolorio/lib)
|
||||
if(WITH_CYCLES AND WITH_CYCLES_EMBREE)
|
||||
find_package(Embree 3.8.0 REQUIRED)
|
||||
endif()
|
||||
add_bundled_libraries(embree/lib)
|
||||
|
||||
if(WITH_OPENIMAGEDENOISE)
|
||||
find_package_wrapper(OpenImageDenoise)
|
||||
@@ -665,15 +642,29 @@ if(WITH_GHOST_WAYLAND)
|
||||
pkg_check_modules(wayland-egl wayland-egl)
|
||||
pkg_check_modules(wayland-scanner wayland-scanner)
|
||||
pkg_check_modules(wayland-cursor wayland-cursor)
|
||||
pkg_check_modules(wayland-protocols wayland-protocols>=1.15)
|
||||
pkg_check_modules(wayland-protocols wayland-protocols>=1.31)
|
||||
pkg_get_variable(WAYLAND_PROTOCOLS_DIR wayland-protocols pkgdatadir)
|
||||
else()
|
||||
# NOTE: this file must always refer to the newest API which is used, so older
|
||||
# `wayland-protocols` are never found and used which then fail to locate required protocols.
|
||||
set(_wayland_protocols_reference_file "staging/fractional-scale/fractional-scale-v1.xml")
|
||||
|
||||
# Reset the protocols directory the reference file from `wayland-protocols` is not found.
|
||||
# This avoids developers having build failures when a cached directory is used that no
|
||||
# longer contains the required file.
|
||||
if(DEFINED WAYLAND_PROTOCOLS_DIR)
|
||||
if(NOT EXISTS "${WAYLAND_PROTOCOLS_DIR}/${_wayland_protocols_reference_file}")
|
||||
unset(WAYLAND_PROTOCOLS_DIR CACHE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Rocky8 packages have too old a version, a newer version exist in the pre-compiled libraries.
|
||||
find_path(WAYLAND_PROTOCOLS_DIR
|
||||
NAMES unstable/xdg-decoration/xdg-decoration-unstable-v1.xml
|
||||
NAMES ${_wayland_protocols_reference_file}
|
||||
PATH_SUFFIXES share/wayland-protocols
|
||||
PATHS ${LIBDIR}/wayland-protocols
|
||||
)
|
||||
unset(_wayland_protocols_reference_file)
|
||||
|
||||
if(EXISTS ${WAYLAND_PROTOCOLS_DIR})
|
||||
set(wayland-protocols_FOUND ON)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2016 Blender Foundation. All rights reserved.
|
||||
# Copyright 2016 Blender Foundation
|
||||
|
||||
# Libraries configuration for Windows.
|
||||
|
||||
@@ -114,12 +114,13 @@ add_definitions(-D_WIN32_WINNT=0x603)
|
||||
# First generate the manifest for tests since it will not need the dependency on the CRT.
|
||||
configure_file(${CMAKE_SOURCE_DIR}/release/windows/manifest/blender.exe.manifest.in ${CMAKE_CURRENT_BINARY_DIR}/tests.exe.manifest @ONLY)
|
||||
|
||||
if(WITH_WINDOWS_BUNDLE_CRT)
|
||||
set(CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_SKIP TRUE)
|
||||
set(CMAKE_INSTALL_UCRT_LIBRARIES TRUE)
|
||||
set(CMAKE_INSTALL_OPENMP_LIBRARIES ${WITH_OPENMP})
|
||||
include(InstallRequiredSystemLibraries)
|
||||
# Always detect CRT paths, but only manually install with WITH_WINDOWS_BUNDLE_CRT.
|
||||
set(CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_SKIP TRUE)
|
||||
set(CMAKE_INSTALL_UCRT_LIBRARIES TRUE)
|
||||
set(CMAKE_INSTALL_OPENMP_LIBRARIES ${WITH_OPENMP})
|
||||
include(InstallRequiredSystemLibraries)
|
||||
|
||||
if(WITH_WINDOWS_BUNDLE_CRT)
|
||||
# ucrtbase(d).dll cannot be in the manifest, due to the way windows 10 handles
|
||||
# redirects for this dll, for details see #88813.
|
||||
foreach(lib ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS})
|
||||
@@ -141,7 +142,9 @@ if(WITH_WINDOWS_BUNDLE_CRT)
|
||||
install(FILES ${CMAKE_BINARY_DIR}/blender.crt.manifest DESTINATION ./blender.crt)
|
||||
set(BUNDLECRT "<dependency><dependentAssembly><assemblyIdentity type=\"win32\" name=\"blender.crt\" version=\"1.0.0.0\" /></dependentAssembly></dependency>")
|
||||
endif()
|
||||
set(BUNDLECRT "${BUNDLECRT}<dependency><dependentAssembly><assemblyIdentity type=\"win32\" name=\"blender.shared\" version=\"1.0.0.0\" /></dependentAssembly></dependency>")
|
||||
if(NOT WITH_PYTHON_MODULE)
|
||||
set(BUNDLECRT "${BUNDLECRT}<dependency><dependentAssembly><assemblyIdentity type=\"win32\" name=\"blender.shared\" version=\"1.0.0.0\" /></dependentAssembly></dependency>")
|
||||
endif()
|
||||
configure_file(${CMAKE_SOURCE_DIR}/release/windows/manifest/blender.exe.manifest.in ${CMAKE_CURRENT_BINARY_DIR}/blender.exe.manifest @ONLY)
|
||||
|
||||
|
||||
@@ -178,8 +181,8 @@ if(NOT MSVC_CLANG)
|
||||
endif()
|
||||
|
||||
if(WITH_WINDOWS_SCCACHE AND CMAKE_VS_MSBUILD_COMMAND)
|
||||
message(WARNING "Disabling sccache, sccache is not supported with msbuild")
|
||||
set(WITH_WINDOWS_SCCACHE OFF)
|
||||
message(WARNING "Disabling sccache, sccache is not supported with msbuild")
|
||||
set(WITH_WINDOWS_SCCACHE OFF)
|
||||
endif()
|
||||
|
||||
# Debug Symbol format
|
||||
@@ -522,6 +525,28 @@ if(WITH_PYTHON)
|
||||
set(PYTHON_LIBRARIES debug "${PYTHON_LIBRARY_DEBUG}" optimized "${PYTHON_LIBRARY}" )
|
||||
endif()
|
||||
|
||||
if(NOT WITH_WINDOWS_FIND_MODULES)
|
||||
# even if boost is off, we still need to install the dlls when we use our lib folder since
|
||||
# some of the other dependencies may need them. For this to work, BOOST_VERSION,
|
||||
# BOOST_POSTFIX, and BOOST_DEBUG_POSTFIX need to be set.
|
||||
set(BOOST ${LIBDIR}/boost)
|
||||
set(BOOST_INCLUDE_DIR ${BOOST}/include)
|
||||
set(BOOST_LIBPATH ${BOOST}/lib)
|
||||
set(BOOST_VERSION_HEADER ${BOOST_INCLUDE_DIR}/boost/version.hpp)
|
||||
if(EXISTS ${BOOST_VERSION_HEADER})
|
||||
file(STRINGS "${BOOST_VERSION_HEADER}" BOOST_LIB_VERSION REGEX "#define BOOST_LIB_VERSION ")
|
||||
if(BOOST_LIB_VERSION MATCHES "#define BOOST_LIB_VERSION \"([0-9_]+)\"")
|
||||
set(BOOST_VERSION "${CMAKE_MATCH_1}")
|
||||
endif()
|
||||
endif()
|
||||
if(NOT BOOST_VERSION)
|
||||
message(FATAL_ERROR "Unable to determine Boost version")
|
||||
endif()
|
||||
set(BOOST_POSTFIX "vc142-mt-x64-${BOOST_VERSION}")
|
||||
set(BOOST_DEBUG_POSTFIX "vc142-mt-gyd-x64-${BOOST_VERSION}")
|
||||
set(BOOST_PREFIX "")
|
||||
endif()
|
||||
|
||||
if(WITH_BOOST)
|
||||
if(WITH_CYCLES AND WITH_CYCLES_OSL)
|
||||
set(boost_extra_libs wave)
|
||||
@@ -537,22 +562,6 @@ if(WITH_BOOST)
|
||||
endif()
|
||||
if(NOT Boost_FOUND)
|
||||
warn_hardcoded_paths(BOOST)
|
||||
set(BOOST ${LIBDIR}/boost)
|
||||
set(BOOST_INCLUDE_DIR ${BOOST}/include)
|
||||
set(BOOST_LIBPATH ${BOOST}/lib)
|
||||
set(BOOST_VERSION_HEADER ${BOOST_INCLUDE_DIR}/boost/version.hpp)
|
||||
if(EXISTS ${BOOST_VERSION_HEADER})
|
||||
file(STRINGS "${BOOST_VERSION_HEADER}" BOOST_LIB_VERSION REGEX "#define BOOST_LIB_VERSION ")
|
||||
if(BOOST_LIB_VERSION MATCHES "#define BOOST_LIB_VERSION \"([0-9_]+)\"")
|
||||
set(BOOST_VERSION "${CMAKE_MATCH_1}")
|
||||
endif()
|
||||
endif()
|
||||
if(NOT BOOST_VERSION)
|
||||
message(FATAL_ERROR "Unable to determine Boost version")
|
||||
endif()
|
||||
set(BOOST_POSTFIX "vc142-mt-x64-${BOOST_VERSION}")
|
||||
set(BOOST_DEBUG_POSTFIX "vc142-mt-gyd-x64-${BOOST_VERSION}")
|
||||
set(BOOST_PREFIX "")
|
||||
# This is file new in 3.4 if it does not exist, assume we are building against 3.3 libs
|
||||
set(BOOST_34_TRIGGER_FILE ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_python310-${BOOST_DEBUG_POSTFIX}.lib)
|
||||
if(NOT EXISTS ${BOOST_34_TRIGGER_FILE})
|
||||
@@ -602,25 +611,18 @@ if(WITH_BOOST)
|
||||
set(BOOST_DEFINITIONS "-DBOOST_ALL_NO_LIB")
|
||||
endif()
|
||||
|
||||
if(WITH_OPENIMAGEIO)
|
||||
windows_find_package(OpenImageIO)
|
||||
if(NOT OpenImageIO_FOUND)
|
||||
set(OPENIMAGEIO ${LIBDIR}/OpenImageIO)
|
||||
set(OPENIMAGEIO_LIBPATH ${OPENIMAGEIO}/lib)
|
||||
set(OPENIMAGEIO_INCLUDE_DIR ${OPENIMAGEIO}/include)
|
||||
set(OPENIMAGEIO_INCLUDE_DIRS ${OPENIMAGEIO_INCLUDE_DIR})
|
||||
set(OIIO_OPTIMIZED optimized ${OPENIMAGEIO_LIBPATH}/OpenImageIO.lib optimized ${OPENIMAGEIO_LIBPATH}/OpenImageIO_Util.lib)
|
||||
set(OIIO_DEBUG debug ${OPENIMAGEIO_LIBPATH}/OpenImageIO_d.lib debug ${OPENIMAGEIO_LIBPATH}/OpenImageIO_Util_d.lib)
|
||||
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
|
||||
endif()
|
||||
set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0")
|
||||
windows_find_package(OpenImageIO)
|
||||
if(NOT OpenImageIO_FOUND)
|
||||
set(OPENIMAGEIO ${LIBDIR}/OpenImageIO)
|
||||
set(OPENIMAGEIO_LIBPATH ${OPENIMAGEIO}/lib)
|
||||
set(OPENIMAGEIO_INCLUDE_DIR ${OPENIMAGEIO}/include)
|
||||
set(OPENIMAGEIO_INCLUDE_DIRS ${OPENIMAGEIO_INCLUDE_DIR})
|
||||
set(OIIO_OPTIMIZED optimized ${OPENIMAGEIO_LIBPATH}/OpenImageIO.lib optimized ${OPENIMAGEIO_LIBPATH}/OpenImageIO_Util.lib)
|
||||
set(OIIO_DEBUG debug ${OPENIMAGEIO_LIBPATH}/OpenImageIO_d.lib debug ${OPENIMAGEIO_LIBPATH}/OpenImageIO_Util_d.lib)
|
||||
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
|
||||
set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe")
|
||||
# If the .dll does not exist, assume it is a static OIIO
|
||||
if(NOT EXISTS ${OPENIMAGEIO}/bin/OpenImageIO.dll)
|
||||
add_definitions(-DOIIO_STATIC_DEFINE)
|
||||
endif()
|
||||
add_definitions(-DOIIO_NO_SSE=1)
|
||||
endif()
|
||||
add_definitions(-DOIIO_NO_SSE=1)
|
||||
|
||||
if(WITH_LLVM)
|
||||
set(LLVM_ROOT_DIR ${LIBDIR}/llvm CACHE PATH "Path to the LLVM installation")
|
||||
@@ -848,27 +850,75 @@ endif()
|
||||
if(WITH_CYCLES AND WITH_CYCLES_EMBREE)
|
||||
windows_find_package(Embree)
|
||||
if(NOT Embree_FOUND)
|
||||
set(EMBREE_ROOT_DIR ${LIBDIR}/embree)
|
||||
set(EMBREE_INCLUDE_DIRS ${LIBDIR}/embree/include)
|
||||
set(EMBREE_LIBRARIES
|
||||
optimized ${LIBDIR}/embree/lib/embree3.lib
|
||||
optimized ${LIBDIR}/embree/lib/embree_avx2.lib
|
||||
optimized ${LIBDIR}/embree/lib/embree_avx.lib
|
||||
optimized ${LIBDIR}/embree/lib/embree_sse42.lib
|
||||
optimized ${LIBDIR}/embree/lib/lexers.lib
|
||||
optimized ${LIBDIR}/embree/lib/math.lib
|
||||
optimized ${LIBDIR}/embree/lib/simd.lib
|
||||
optimized ${LIBDIR}/embree/lib/sys.lib
|
||||
optimized ${LIBDIR}/embree/lib/tasking.lib
|
||||
|
||||
debug ${LIBDIR}/embree/lib/embree3_d.lib
|
||||
debug ${LIBDIR}/embree/lib/embree_avx2_d.lib
|
||||
debug ${LIBDIR}/embree/lib/embree_avx_d.lib
|
||||
debug ${LIBDIR}/embree/lib/embree_sse42_d.lib
|
||||
debug ${LIBDIR}/embree/lib/lexers_d.lib
|
||||
debug ${LIBDIR}/embree/lib/math_d.lib
|
||||
debug ${LIBDIR}/embree/lib/simd_d.lib
|
||||
debug ${LIBDIR}/embree/lib/sys_d.lib
|
||||
debug ${LIBDIR}/embree/lib/tasking_d.lib
|
||||
if(EXISTS ${LIBDIR}/embree/include/embree4/rtcore_config.h)
|
||||
set(EMBREE_MAJOR_VERSION 4)
|
||||
else()
|
||||
set(EMBREE_MAJOR_VERSION 3)
|
||||
endif()
|
||||
|
||||
file(READ ${LIBDIR}/embree/include/embree${EMBREE_MAJOR_VERSION}/rtcore_config.h _embree_config_header)
|
||||
if(_embree_config_header MATCHES "#define EMBREE_STATIC_LIB")
|
||||
set(EMBREE_STATIC_LIB TRUE)
|
||||
else()
|
||||
set(EMBREE_STATIC_LIB FALSE)
|
||||
endif()
|
||||
|
||||
if(_embree_config_header MATCHES "#define EMBREE_SYCL_SUPPORT")
|
||||
set(EMBREE_SYCL_SUPPORT TRUE)
|
||||
else()
|
||||
set(EMBREE_SYCL_SUPPORT FALSE)
|
||||
endif()
|
||||
|
||||
set(EMBREE_LIBRARIES
|
||||
optimized ${LIBDIR}/embree/lib/embree${EMBREE_MAJOR_VERSION}.lib
|
||||
debug ${LIBDIR}/embree/lib/embree${EMBREE_MAJOR_VERSION}_d.lib
|
||||
)
|
||||
|
||||
if(EMBREE_SYCL_SUPPORT)
|
||||
set(EMBREE_LIBRARIES
|
||||
${EMBREE_LIBRARIES}
|
||||
optimized ${LIBDIR}/embree/lib/embree4_sycl.lib
|
||||
debug ${LIBDIR}/embree/lib/embree4_sycl_d.lib
|
||||
)
|
||||
endif()
|
||||
|
||||
if(EMBREE_STATIC_LIB)
|
||||
set(EMBREE_LIBRARIES
|
||||
${EMBREE_LIBRARIES}
|
||||
optimized ${LIBDIR}/embree/lib/embree_avx2.lib
|
||||
optimized ${LIBDIR}/embree/lib/embree_avx.lib
|
||||
optimized ${LIBDIR}/embree/lib/embree_sse42.lib
|
||||
optimized ${LIBDIR}/embree/lib/lexers.lib
|
||||
optimized ${LIBDIR}/embree/lib/math.lib
|
||||
optimized ${LIBDIR}/embree/lib/simd.lib
|
||||
optimized ${LIBDIR}/embree/lib/sys.lib
|
||||
optimized ${LIBDIR}/embree/lib/tasking.lib
|
||||
debug ${LIBDIR}/embree/lib/embree_avx2_d.lib
|
||||
debug ${LIBDIR}/embree/lib/embree_avx_d.lib
|
||||
debug ${LIBDIR}/embree/lib/embree_sse42_d.lib
|
||||
debug ${LIBDIR}/embree/lib/lexers_d.lib
|
||||
debug ${LIBDIR}/embree/lib/math_d.lib
|
||||
debug ${LIBDIR}/embree/lib/simd_d.lib
|
||||
debug ${LIBDIR}/embree/lib/sys_d.lib
|
||||
debug ${LIBDIR}/embree/lib/tasking_d.lib
|
||||
)
|
||||
|
||||
if(EMBREE_SYCL_SUPPORT)
|
||||
set(EMBREE_LIBRARIES
|
||||
${EMBREE_LIBRARIES}
|
||||
optimized ${LIBDIR}/embree/lib/embree_rthwif.lib
|
||||
debug ${LIBDIR}/embree/lib/embree_rthwif_d.lib
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
if(NOT EMBREE_STATIC_LIB)
|
||||
list(APPEND PLATFORM_BUNDLED_LIBRARIES
|
||||
RELEASE ${EMBREE_ROOT_DIR}/bin/embree${EMBREE_MAJOR_VERSION}.dll
|
||||
DEBUG ${EMBREE_ROOT_DIR}/bin/embree${EMBREE_MAJOR_VERSION}_d.dll
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
@@ -901,11 +951,11 @@ endif()
|
||||
|
||||
if(WINDOWS_PYTHON_DEBUG)
|
||||
# Include the system scripts in the blender_python_system_scripts project.
|
||||
file(GLOB_RECURSE inFiles "${CMAKE_SOURCE_DIR}/release/scripts/*.*" )
|
||||
file(GLOB_RECURSE inFiles "${CMAKE_SOURCE_DIR}/scripts/*.*" )
|
||||
add_custom_target(blender_python_system_scripts SOURCES ${inFiles})
|
||||
foreach(_source IN ITEMS ${inFiles})
|
||||
get_filename_component(_source_path "${_source}" PATH)
|
||||
string(REPLACE "${CMAKE_SOURCE_DIR}/release/scripts/" "" _source_path "${_source_path}")
|
||||
string(REPLACE "${CMAKE_SOURCE_DIR}/scripts/" "" _source_path "${_source_path}")
|
||||
string(REPLACE "/" "\\" _group_path "${_source_path}")
|
||||
source_group("${_group_path}" FILES "${_source}")
|
||||
endforeach()
|
||||
@@ -940,7 +990,7 @@ if(WINDOWS_PYTHON_DEBUG)
|
||||
file(WRITE ${USER_PROPS_FILE} "<?xml version=\"1.0\" encoding=\"utf-8\"?>
|
||||
<Project DefaultTargets=\"Build\" xmlns=\"http://schemas.microsoft.com/developer/msbuild/2003\">
|
||||
<PropertyGroup>
|
||||
<LocalDebuggerCommandArguments>-con --env-system-scripts \"${CMAKE_SOURCE_DIR}/release/scripts\" </LocalDebuggerCommandArguments>
|
||||
<LocalDebuggerCommandArguments>-con --env-system-scripts \"${CMAKE_SOURCE_DIR}/scripts\" </LocalDebuggerCommandArguments>
|
||||
</PropertyGroup>
|
||||
</Project>")
|
||||
endif()
|
||||
@@ -994,6 +1044,23 @@ if(WITH_VULKAN_BACKEND)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_VULKAN_BACKEND)
|
||||
if(EXISTS ${LIBDIR}/shaderc)
|
||||
set(SHADERC_FOUND On)
|
||||
set(SHADERC_ROOT_DIR ${LIBDIR}/shaderc)
|
||||
set(SHADERC_INCLUDE_DIR ${SHADERC_ROOT_DIR}/include)
|
||||
set(SHADERC_INCLUDE_DIRS ${SHADERC_INCLUDE_DIR})
|
||||
set(SHADERC_LIBRARY
|
||||
DEBUG ${SHADERC_ROOT_DIR}/lib/shaderc_shared_d.lib
|
||||
OPTIMIZED ${SHADERC_ROOT_DIR}/lib/shaderc_shared.lib
|
||||
)
|
||||
set(SHADERC_LIBRARIES ${SHADERC_LIBRARY})
|
||||
else()
|
||||
message(WARNING "Shaderc was not found, disabling WITH_VULKAN_BACKEND")
|
||||
set(WITH_VULKAN_BACKEND OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES AND WITH_CYCLES_PATH_GUIDING)
|
||||
find_package(openpgl QUIET)
|
||||
if(openpgl_FOUND)
|
||||
@@ -1010,7 +1077,7 @@ endif()
|
||||
set(ZSTD_INCLUDE_DIRS ${LIBDIR}/zstd/include)
|
||||
set(ZSTD_LIBRARIES ${LIBDIR}/zstd/lib/zstd_static.lib)
|
||||
|
||||
if(WITH_CYCLES AND WITH_CYCLES_DEVICE_ONEAPI)
|
||||
if(WITH_CYCLES AND (WITH_CYCLES_DEVICE_ONEAPI OR (WITH_CYCLES_EMBREE AND EMBREE_SYCL_SUPPORT)))
|
||||
set(LEVEL_ZERO_ROOT_DIR ${LIBDIR}/level_zero)
|
||||
set(CYCLES_SYCL ${LIBDIR}/dpcpp CACHE PATH "Path to oneAPI DPC++ compiler")
|
||||
if(EXISTS ${CYCLES_SYCL} AND NOT SYCL_ROOT_DIR)
|
||||
@@ -1021,8 +1088,9 @@ if(WITH_CYCLES AND WITH_CYCLES_DEVICE_ONEAPI)
|
||||
${SYCL_ROOT_DIR}/bin/sycl[0-9].dll
|
||||
)
|
||||
foreach(sycl_runtime_library IN LISTS _sycl_runtime_libraries_glob)
|
||||
string(REPLACE ".dll" "$<$<CONFIG:Debug>:d>.dll" sycl_runtime_library ${sycl_runtime_library})
|
||||
list(APPEND _sycl_runtime_libraries ${sycl_runtime_library})
|
||||
string(REPLACE ".dll" "_d.dll" sycl_runtime_library_debug ${sycl_runtime_library})
|
||||
list(APPEND _sycl_runtime_libraries RELEASE ${sycl_runtime_library})
|
||||
list(APPEND _sycl_runtime_libraries DEBUG ${sycl_runtime_library_debug})
|
||||
endforeach()
|
||||
unset(_sycl_runtime_libraries_glob)
|
||||
|
||||
@@ -1035,12 +1103,14 @@ if(WITH_CYCLES AND WITH_CYCLES_DEVICE_ONEAPI)
|
||||
|
||||
list(APPEND PLATFORM_BUNDLED_LIBRARIES ${_sycl_runtime_libraries})
|
||||
unset(_sycl_runtime_libraries)
|
||||
|
||||
set(SYCL_LIBRARIES optimized ${SYCL_LIBRARY} debug ${SYCL_LIBRARY_DEBUG})
|
||||
endif()
|
||||
|
||||
|
||||
# Environment variables to run precompiled executables that needed libraries.
|
||||
list(JOIN PLATFORM_BUNDLED_LIBRARY_DIRS ";" _library_paths)
|
||||
set(PLATFORM_ENV_BUILD_DIRS "${LIBDIR}/OpenImageIO/bin\;${LIBDIR}/boost/lib\;${LIBDIR}/openexr/bin\;${LIBDIR}/imath/bin\;${PATH}")
|
||||
set(PLATFORM_ENV_BUILD_DIRS "${LIBDIR}/tbb/bin\;${LIBDIR}/OpenImageIO/bin\;${LIBDIR}/boost/lib\;${LIBDIR}/openexr/bin\;${LIBDIR}/imath/bin\;${PATH}")
|
||||
set(PLATFORM_ENV_BUILD "PATH=${PLATFORM_ENV_BUILD_DIRS}")
|
||||
# Install needs the additional folders from PLATFORM_ENV_BUILD_DIRS as well, as tools like idiff and abcls use the release mode dlls
|
||||
set(PLATFORM_ENV_INSTALL "PATH=${CMAKE_INSTALL_PREFIX_WITH_CONFIG}/blender.shared/\;${PLATFORM_ENV_BUILD_DIRS}\;$ENV{PATH}")
|
||||
|
@@ -1,53 +1,3 @@
|
||||
#
|
||||
# Used by Buildbot build pipeline make_update.py script only for now
|
||||
# We intended to update the make_update.py in the branches to use this file eventually
|
||||
#
|
||||
update-code:
|
||||
git:
|
||||
submodules:
|
||||
- branch: main
|
||||
commit_id: HEAD
|
||||
path: release/scripts/addons
|
||||
- branch: main
|
||||
commit_id: HEAD
|
||||
path: release/scripts/addons_contrib
|
||||
- branch: main
|
||||
commit_id: HEAD
|
||||
path: release/datafiles/locale
|
||||
- branch: main
|
||||
commit_id: HEAD
|
||||
path: source/tools
|
||||
svn:
|
||||
libraries:
|
||||
darwin-arm64:
|
||||
branch: trunk
|
||||
commit_id: HEAD
|
||||
path: lib/darwin_arm64
|
||||
darwin-x86_64:
|
||||
branch: trunk
|
||||
commit_id: HEAD
|
||||
path: lib/darwin
|
||||
linux-x86_64:
|
||||
branch: trunk
|
||||
commit_id: HEAD
|
||||
path: lib/linux_x86_64_glibc_228
|
||||
windows-amd64:
|
||||
branch: trunk
|
||||
commit_id: HEAD
|
||||
path: lib/win64_vc15
|
||||
tests:
|
||||
branch: trunk
|
||||
commit_id: HEAD
|
||||
path: lib/tests
|
||||
benchmarks:
|
||||
branch: trunk
|
||||
commit_id: HEAD
|
||||
path: lib/benchmarks
|
||||
assets:
|
||||
branch: trunk
|
||||
commit_id: HEAD
|
||||
path: lib/assets
|
||||
|
||||
#
|
||||
# Buildbot only configs
|
||||
#
|
||||
|
@@ -112,6 +112,7 @@ def create_manifest(
|
||||
print(f'Building manifest of files: "{outpath}"...', end="", flush=True)
|
||||
with outpath.open("w", encoding="utf-8") as outfile:
|
||||
main_files_to_manifest(blender_srcdir, outfile)
|
||||
assets_to_manifest(blender_srcdir, outfile)
|
||||
submodules_to_manifest(blender_srcdir, version, outfile)
|
||||
|
||||
if packages_dir:
|
||||
@@ -131,17 +132,27 @@ def submodules_to_manifest(
|
||||
skip_addon_contrib = version.is_release()
|
||||
assert not blender_srcdir.is_absolute()
|
||||
|
||||
for line in git_command("-C", blender_srcdir, "submodule"):
|
||||
submodule = line.split()[1]
|
||||
|
||||
for submodule in ("scripts/addons", "scripts/addons_contrib"):
|
||||
# Don't use native slashes as GIT for MS-Windows outputs forward slashes.
|
||||
if skip_addon_contrib and submodule == "release/scripts/addons_contrib":
|
||||
if skip_addon_contrib and submodule == "scripts/addons_contrib":
|
||||
continue
|
||||
|
||||
for path in git_ls_files(blender_srcdir / submodule):
|
||||
print(path, file=outfile)
|
||||
|
||||
|
||||
def assets_to_manifest(blender_srcdir: Path, outfile: TextIO) -> None:
|
||||
assert not blender_srcdir.is_absolute()
|
||||
|
||||
assets_dir = blender_srcdir.parent / "lib" / "assets"
|
||||
for path in assets_dir.glob("*"):
|
||||
if path.name == "working":
|
||||
continue
|
||||
if path.name in SKIP_NAMES:
|
||||
continue
|
||||
print(path, file=outfile)
|
||||
|
||||
|
||||
def packages_to_manifest(outfile: TextIO, packages_dir: Path) -> None:
|
||||
for path in packages_dir.glob("*"):
|
||||
if not path.is_file():
|
||||
@@ -172,7 +183,9 @@ def create_tarball(
|
||||
command += [
|
||||
"--transform",
|
||||
f"s,^{blender_srcdir.name}/,blender-{version}/,g",
|
||||
"--use-compress-program=xz -9",
|
||||
"--transform",
|
||||
f"s,^lib/assets/,blender-{version}/release/datafiles/assets/,g",
|
||||
"--use-compress-program=xz -1",
|
||||
"--create",
|
||||
f"--file={tarball}",
|
||||
f"--files-from={manifest}",
|
||||
|
@@ -16,14 +16,28 @@ import shutil
|
||||
import sys
|
||||
|
||||
import make_utils
|
||||
from pathlib import Path
|
||||
from make_utils import call, check_output
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from typing import (
|
||||
List,
|
||||
Iterable,
|
||||
Optional,
|
||||
)
|
||||
|
||||
|
||||
class Submodule:
|
||||
path: str
|
||||
branch: str
|
||||
branch_fallback: str
|
||||
|
||||
def __init__(self, path: str, branch: str, branch_fallback: str) -> None:
|
||||
self.path = path
|
||||
self.branch = branch
|
||||
self.branch_fallback = branch_fallback
|
||||
|
||||
|
||||
def print_stage(text: str) -> None:
|
||||
print("")
|
||||
print(text)
|
||||
@@ -198,73 +212,271 @@ def git_update_skip(args: argparse.Namespace, check_remote_exists: bool = True)
|
||||
return ""
|
||||
|
||||
|
||||
def use_upstream_workflow(args: argparse.Namespace) -> bool:
|
||||
return make_utils.git_remote_exist(args.git_command, "upstream")
|
||||
|
||||
|
||||
def work_tree_update_upstream_workflow(args: argparse.Namespace, use_fetch: bool = True) -> str:
|
||||
"""
|
||||
Update the Blender repository using the Github style of fork organization
|
||||
|
||||
Returns true if the current local branch has been updated to the upstream state.
|
||||
Otherwise false is returned.
|
||||
"""
|
||||
|
||||
branch_name = make_utils.git_branch(args.git_command)
|
||||
|
||||
if use_fetch:
|
||||
call((args.git_command, "fetch", "upstream"))
|
||||
|
||||
upstream_branch = f"upstream/{branch_name}"
|
||||
if not make_utils.git_branch_exists(args.git_command, upstream_branch):
|
||||
return "no_branch"
|
||||
|
||||
retcode = call((args.git_command, "merge", "--ff-only", upstream_branch), exit_on_error=False)
|
||||
if retcode != 0:
|
||||
return "Unable to fast forward\n"
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def work_tree_update(args: argparse.Namespace, use_fetch: bool = True) -> str:
|
||||
"""
|
||||
Update the Git working tree using the best strategy
|
||||
|
||||
This function detects whether it is a github style of fork remote organization is used, or
|
||||
is it a repository which origin is an upstream.
|
||||
"""
|
||||
|
||||
if use_upstream_workflow(args):
|
||||
message = work_tree_update_upstream_workflow(args, use_fetch)
|
||||
if message != "no_branch":
|
||||
return message
|
||||
|
||||
# If there is upstream configured but the local branch is not in the upstream, try to
|
||||
# update the branch from the fork.
|
||||
|
||||
update_command = [args.git_command, "pull", "--rebase"]
|
||||
|
||||
call(update_command)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
# Update blender repository.
|
||||
def blender_update(args: argparse.Namespace) -> None:
|
||||
def blender_update(args: argparse.Namespace) -> str:
|
||||
print_stage("Updating Blender Git Repository")
|
||||
call([args.git_command, "pull", "--rebase"])
|
||||
|
||||
return work_tree_update(args)
|
||||
|
||||
|
||||
# Update submodules.
|
||||
def submodules_update(
|
||||
args: argparse.Namespace,
|
||||
release_version: Optional[str],
|
||||
branch: Optional[str],
|
||||
) -> str:
|
||||
print_stage("Updating Submodules")
|
||||
if make_utils.command_missing(args.git_command):
|
||||
sys.stderr.write("git not found, can't update code\n")
|
||||
sys.exit(1)
|
||||
def resolve_external_url(blender_url: str, repo_name: str) -> str:
|
||||
return urljoin(blender_url + "/", "../" + repo_name)
|
||||
|
||||
# Update submodules to appropriate given branch,
|
||||
# falling back to main if none is given and/or found in a sub-repository.
|
||||
|
||||
def external_script_copy_old_submodule_over(args: argparse.Namespace, directory_name: str) -> None:
|
||||
blender_git_root = Path(get_blender_git_root())
|
||||
scripts_dir = blender_git_root / "scripts"
|
||||
external_dir = scripts_dir / directory_name
|
||||
|
||||
old_submodule_relative_dir = Path("release") / "scripts" / directory_name
|
||||
print(f"Moving {old_submodule_relative_dir} to scripts/{directory_name} ...")
|
||||
|
||||
old_submodule_dir = blender_git_root / old_submodule_relative_dir
|
||||
shutil.move(old_submodule_dir, external_dir)
|
||||
|
||||
# Remove old ".git" which is a file with path to a submodule bare repo inside of main
|
||||
# repo .git/modules directory.
|
||||
(external_dir / ".git").unlink()
|
||||
|
||||
bare_repo_relative_dir = Path(".git") / "modules" / "release" / "scripts" / directory_name
|
||||
print(f"Copying {bare_repo_relative_dir} to scripts/{directory_name}/.git ...")
|
||||
bare_repo_dir = blender_git_root / bare_repo_relative_dir
|
||||
shutil.copytree(bare_repo_dir, external_dir / ".git")
|
||||
|
||||
git_config = external_dir / ".git" / "config"
|
||||
call((args.git_command, "config", "--file", str(git_config), "--unset", "core.worktree"))
|
||||
|
||||
|
||||
def external_script_initialize_if_needed(args: argparse.Namespace,
|
||||
repo_name: str,
|
||||
directory_name: str) -> None:
|
||||
"""Initialize checkout of an external repository scripts directory"""
|
||||
|
||||
blender_git_root = Path(get_blender_git_root())
|
||||
blender_dot_git = blender_git_root / ".git"
|
||||
scripts_dir = blender_git_root / "scripts"
|
||||
external_dir = scripts_dir / directory_name
|
||||
|
||||
if external_dir.exists():
|
||||
return
|
||||
|
||||
print(f"Initializing scripts/{directory_name} ...")
|
||||
|
||||
old_submodule_dot_git = blender_git_root / "release" / "scripts" / directory_name / ".git"
|
||||
if old_submodule_dot_git.exists() and blender_dot_git.is_dir():
|
||||
external_script_copy_old_submodule_over(args, directory_name)
|
||||
return
|
||||
|
||||
origin_name = "upstream" if use_upstream_workflow(args) else "origin"
|
||||
blender_url = make_utils.git_get_remote_url(args.git_command, origin_name)
|
||||
external_url = resolve_external_url(blender_url, repo_name)
|
||||
|
||||
# When running `make update` from a freshly cloned fork check whether the fork of the submodule is
|
||||
# available, If not, switch to the submodule relative to the main blender repository.
|
||||
if origin_name == "origin" and not make_utils.git_is_remote_repository(args.git_command, external_url):
|
||||
external_url = resolve_external_url("https://projects.blender.org/blender/blender", repo_name)
|
||||
|
||||
call((args.git_command, "clone", "--origin", origin_name, external_url, str(external_dir)))
|
||||
|
||||
|
||||
def external_script_add_origin_if_needed(args: argparse.Namespace,
|
||||
repo_name: str,
|
||||
directory_name: str) -> None:
|
||||
"""
|
||||
Add remote called 'origin' if there is a fork of the external repository available
|
||||
|
||||
This is only done when using Github style upstream workflow in the main repository.
|
||||
"""
|
||||
|
||||
if not use_upstream_workflow(args):
|
||||
return
|
||||
|
||||
cwd = os.getcwd()
|
||||
|
||||
blender_git_root = Path(get_blender_git_root())
|
||||
scripts_dir = blender_git_root / "scripts"
|
||||
external_dir = scripts_dir / directory_name
|
||||
|
||||
origin_blender_url = make_utils.git_get_remote_url(args.git_command, "origin")
|
||||
origin_external_url = resolve_external_url(origin_blender_url, repo_name)
|
||||
|
||||
try:
|
||||
os.chdir(external_dir)
|
||||
|
||||
if (make_utils.git_remote_exist(args.git_command, "origin") or
|
||||
not make_utils.git_remote_exist(args.git_command, "upstream")):
|
||||
return
|
||||
|
||||
if not make_utils.git_is_remote_repository(args.git_command, origin_external_url):
|
||||
return
|
||||
|
||||
print(f"Adding origin remote to {directory_name} pointing to fork ...")
|
||||
|
||||
# Non-obvious tricks to introduce the new remote called "origin" to the existing
|
||||
# submodule configuration.
|
||||
#
|
||||
# This is all within the content of creating a fork of a submodule after `make update`
|
||||
# has been run and possibly local branches tracking upstream were added.
|
||||
#
|
||||
# The idea here goes as following:
|
||||
#
|
||||
# - Rename remote "upstream" to "origin", which takes care of changing the names of
|
||||
# remotes the local branches are tracking.
|
||||
#
|
||||
# - Change the URL to the "origin", which so was was still pointing to upstream.
|
||||
#
|
||||
# - Re-introduce the "upstream" remote, with the same URL as it had prior to rename.
|
||||
|
||||
upstream_url = make_utils.git_get_remote_url(args.git_command, "upstream")
|
||||
|
||||
call((args.git_command, "remote", "rename", "upstream", "origin"))
|
||||
make_utils.git_set_config(args.git_command, f"remote.origin.url", origin_external_url)
|
||||
|
||||
call((args.git_command, "remote", "add", "upstream", upstream_url))
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def external_scripts_update(args: argparse.Namespace,
|
||||
repo_name: str,
|
||||
directory_name: str,
|
||||
branch: Optional[str]) -> str:
|
||||
"""Update a single external checkout with the given name in the scripts folder"""
|
||||
|
||||
external_script_initialize_if_needed(args, repo_name, directory_name)
|
||||
external_script_add_origin_if_needed(args, repo_name, directory_name)
|
||||
|
||||
print(f"Updating scripts/{directory_name} ...")
|
||||
|
||||
cwd = os.getcwd()
|
||||
|
||||
blender_git_root = Path(get_blender_git_root())
|
||||
scripts_dir = blender_git_root / "scripts"
|
||||
external_dir = scripts_dir / directory_name
|
||||
|
||||
# Update externals to appropriate given branch, falling back to main if none is given and/or
|
||||
# found in a sub-repository.
|
||||
branch_fallback = "main"
|
||||
if not branch:
|
||||
branch = branch_fallback
|
||||
|
||||
submodules = [
|
||||
("release/scripts/addons", branch, branch_fallback),
|
||||
("release/scripts/addons_contrib", branch, branch_fallback),
|
||||
("release/datafiles/locale", branch, branch_fallback),
|
||||
("source/tools", branch, branch_fallback),
|
||||
]
|
||||
|
||||
# Initialize submodules only if needed.
|
||||
for submodule_path, submodule_branch, submodule_branch_fallback in submodules:
|
||||
if not os.path.exists(os.path.join(submodule_path, ".git")):
|
||||
call([args.git_command, "submodule", "update", "--init", "--recursive"])
|
||||
break
|
||||
|
||||
# Checkout appropriate branch and pull changes.
|
||||
skip_msg = ""
|
||||
for submodule_path, submodule_branch, submodule_branch_fallback in submodules:
|
||||
cwd = os.getcwd()
|
||||
try:
|
||||
os.chdir(submodule_path)
|
||||
msg = git_update_skip(args, check_remote_exists=False)
|
||||
if msg:
|
||||
skip_msg += submodule_path + " skipped: " + msg + "\n"
|
||||
else:
|
||||
# Find a matching branch that exists.
|
||||
call([args.git_command, "fetch", "origin"])
|
||||
if make_utils.git_branch_exists(args.git_command, submodule_branch):
|
||||
pass
|
||||
elif make_utils.git_branch_exists(args.git_command, submodule_branch_fallback):
|
||||
submodule_branch = submodule_branch_fallback
|
||||
else:
|
||||
# Skip.
|
||||
submodule_branch = ""
|
||||
|
||||
# Switch to branch and pull.
|
||||
if submodule_branch:
|
||||
if make_utils.git_branch(args.git_command) != submodule_branch:
|
||||
try:
|
||||
os.chdir(external_dir)
|
||||
msg = git_update_skip(args, check_remote_exists=False)
|
||||
if msg:
|
||||
skip_msg += directory_name + " skipped: " + msg + "\n"
|
||||
else:
|
||||
# Find a matching branch that exists.
|
||||
for remote in ("origin", "upstream"):
|
||||
if make_utils.git_remote_exist(args.git_command, remote):
|
||||
call([args.git_command, "fetch", remote])
|
||||
|
||||
submodule_branch = branch
|
||||
|
||||
if make_utils.git_branch_exists(args.git_command, submodule_branch):
|
||||
pass
|
||||
elif make_utils.git_branch_exists(args.git_command, branch_fallback):
|
||||
submodule_branch = branch_fallback
|
||||
else:
|
||||
# Skip.
|
||||
submodule_branch = ""
|
||||
|
||||
# Switch to branch and pull.
|
||||
if submodule_branch:
|
||||
if make_utils.git_branch(args.git_command) != submodule_branch:
|
||||
# If the local branch exists just check out to it.
|
||||
# If there is no local branch but only remote specify an explicit remote.
|
||||
# Without this explicit specification Git attempts to set-up tracking
|
||||
# automatically and fails when the branch is available in multiple remotes.
|
||||
if make_utils.git_local_branch_exists(args.git_command, submodule_branch):
|
||||
call([args.git_command, "checkout", submodule_branch])
|
||||
call([args.git_command, "pull", "--rebase", "origin", submodule_branch])
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
elif make_utils.git_remote_exist(args.git_command, "origin"):
|
||||
call([args.git_command, "checkout", "-t", f"origin/{submodule_branch}"])
|
||||
elif make_utils.git_remote_exist(args.git_command, "upstream"):
|
||||
call([args.git_command, "checkout", "-t", f"upstream/{submodule_branch}"])
|
||||
# Don't use extra fetch since all remotes of interest have been already fetched
|
||||
# some lines above.
|
||||
skip_msg += work_tree_update(args, use_fetch=False)
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
return skip_msg
|
||||
|
||||
|
||||
def scripts_submodules_update(args: argparse.Namespace, branch: Optional[str]) -> str:
|
||||
"""Update working trees of addons and addons_contrib within the scripts/ directory"""
|
||||
msg = ""
|
||||
|
||||
msg += external_scripts_update(args, "blender-addons", "addons", branch)
|
||||
msg += external_scripts_update(args, "blender-addons-contrib", "addons_contrib", branch)
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
def submodules_update(args: argparse.Namespace, branch: Optional[str]) -> str:
|
||||
"""Update submodules or other externally tracked source trees"""
|
||||
msg = ""
|
||||
|
||||
msg += scripts_submodules_update(args, branch)
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = parse_arguments()
|
||||
blender_skip_msg = ""
|
||||
@@ -284,12 +496,12 @@ if __name__ == "__main__":
|
||||
svn_update(args, release_version)
|
||||
if not args.no_blender:
|
||||
blender_skip_msg = git_update_skip(args)
|
||||
if not blender_skip_msg:
|
||||
blender_skip_msg = blender_update(args)
|
||||
if blender_skip_msg:
|
||||
blender_skip_msg = "Blender repository skipped: " + blender_skip_msg + "\n"
|
||||
else:
|
||||
blender_update(args)
|
||||
if not args.no_submodules:
|
||||
submodules_skip_msg = submodules_update(args, release_version, branch)
|
||||
submodules_skip_msg = submodules_update(args, branch)
|
||||
|
||||
# Report any skipped repositories at the end, so it's not as easy to miss.
|
||||
skip_msg = blender_skip_msg + submodules_skip_msg
|
||||
|
@@ -9,7 +9,9 @@ import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from typing import (
|
||||
Sequence,
|
||||
@@ -19,7 +21,7 @@ from typing import (
|
||||
|
||||
def call(cmd: Sequence[str], exit_on_error: bool = True, silent: bool = False) -> int:
|
||||
if not silent:
|
||||
print(" ".join(cmd))
|
||||
print(" ".join([str(x) for x in cmd]))
|
||||
|
||||
# Flush to ensure correct order output on Windows.
|
||||
sys.stdout.flush()
|
||||
@@ -52,13 +54,57 @@ def check_output(cmd: Sequence[str], exit_on_error: bool = True) -> str:
|
||||
return output.strip()
|
||||
|
||||
|
||||
def git_local_branch_exists(git_command: str, branch: str) -> bool:
|
||||
return (
|
||||
call([git_command, "rev-parse", "--verify", branch], exit_on_error=False, silent=True) == 0
|
||||
)
|
||||
|
||||
|
||||
def git_branch_exists(git_command: str, branch: str) -> bool:
|
||||
return (
|
||||
call([git_command, "rev-parse", "--verify", branch], exit_on_error=False, silent=True) == 0 or
|
||||
git_local_branch_exists(git_command, branch) or
|
||||
call([git_command, "rev-parse", "--verify", "remotes/upstream/" + branch], exit_on_error=False, silent=True) == 0 or
|
||||
call([git_command, "rev-parse", "--verify", "remotes/origin/" + branch], exit_on_error=False, silent=True) == 0
|
||||
)
|
||||
|
||||
|
||||
def git_get_remote_url(git_command: str, remote_name: str) -> str:
|
||||
return check_output((git_command, "ls-remote", "--get-url", remote_name))
|
||||
|
||||
|
||||
def git_remote_exist(git_command: str, remote_name: str) -> bool:
|
||||
"""Check whether there is a remote with the given name"""
|
||||
# `git ls-remote --get-url upstream` will print an URL if there is such remote configured, and
|
||||
# otherwise will print "upstream".
|
||||
remote_url = check_output((git_command, "ls-remote", "--get-url", remote_name))
|
||||
return remote_url != remote_name
|
||||
|
||||
|
||||
def git_get_resolved_submodule_url(git_command: str, blender_url: str, submodule_path: str) -> str:
|
||||
git_root = check_output([git_command, "rev-parse", "--show-toplevel"])
|
||||
dot_gitmodules = os.path.join(git_root, ".gitmodules")
|
||||
|
||||
submodule_key_prefix = f"submodule.{submodule_path}"
|
||||
submodule_key_url = f"{submodule_key_prefix}.url"
|
||||
|
||||
gitmodule_url = git_get_config(
|
||||
git_command, submodule_key_url, file=dot_gitmodules)
|
||||
|
||||
# A bit of a trickery to construct final URL.
|
||||
# Only works for the relative submodule URLs.
|
||||
#
|
||||
# Note that unless the LHS URL ends up with a slash urljoin treats the last component as a
|
||||
# file.
|
||||
assert gitmodule_url.startswith('..')
|
||||
return urljoin(blender_url + "/", gitmodule_url)
|
||||
|
||||
|
||||
def git_is_remote_repository(git_command: str, repo: str) -> bool:
|
||||
"""Returns true if the given repository is a valid/clonable git repo"""
|
||||
exit_code = call((git_command, "ls-remote", repo, "HEAD"), exit_on_error=False, silent=True)
|
||||
return exit_code == 0
|
||||
|
||||
|
||||
def git_branch(git_command: str) -> str:
|
||||
# Get current branch name.
|
||||
try:
|
||||
@@ -70,6 +116,20 @@ def git_branch(git_command: str) -> str:
|
||||
return branch.strip().decode('utf8')
|
||||
|
||||
|
||||
def git_get_config(git_command: str, key: str, file: Optional[str] = None) -> str:
|
||||
if file:
|
||||
return check_output([git_command, "config", "--file", file, "--get", key])
|
||||
|
||||
return check_output([git_command, "config", "--get", key])
|
||||
|
||||
|
||||
def git_set_config(git_command: str, key: str, value: str, file: Optional[str] = None) -> str:
|
||||
if file:
|
||||
return check_output([git_command, "config", "--file", file, key, value])
|
||||
|
||||
return check_output([git_command, "config", key, value])
|
||||
|
||||
|
||||
def git_tag(git_command: str) -> Optional[str]:
|
||||
# Get current tag name.
|
||||
try:
|
||||
|
@@ -1,20 +0,0 @@
|
||||
if NOT exist "%BLENDER_DIR%\source\tools\.git" (
|
||||
echo Checking out sub-modules
|
||||
if not "%GIT%" == "" (
|
||||
"%GIT%" submodule update --init --recursive --progress
|
||||
if errorlevel 1 goto FAIL
|
||||
"%GIT%" submodule foreach git checkout main
|
||||
if errorlevel 1 goto FAIL
|
||||
"%GIT%" submodule foreach git pull --rebase origin main
|
||||
if errorlevel 1 goto FAIL
|
||||
goto EOF
|
||||
) else (
|
||||
echo Blender submodules not found, and git not found in path to retrieve them.
|
||||
goto FAIL
|
||||
)
|
||||
)
|
||||
goto EOF
|
||||
|
||||
:FAIL
|
||||
exit /b 1
|
||||
:EOF
|
@@ -14,7 +14,7 @@ if NOT EXIST %PYTHON% (
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
set FORMAT_PATHS=%BLENDER_DIR%\source\tools\utils_maintenance\clang_format_paths.py
|
||||
set FORMAT_PATHS=%BLENDER_DIR%\tools\utils_maintenance\clang_format_paths.py
|
||||
|
||||
REM The formatting script expects clang-format to be in the current PATH.
|
||||
set PATH=%CF_PATH%;%PATH%
|
||||
|
@@ -4,9 +4,9 @@ if "%GIT%" == "" (
|
||||
)
|
||||
cd "%BLENDER_DIR%"
|
||||
for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Branch_hash=%%i
|
||||
cd "%BLENDER_DIR%/release/datafiles/locale"
|
||||
cd "%BLENDER_DIR%/locale"
|
||||
for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Locale_hash=%%i
|
||||
cd "%BLENDER_DIR%/release/scripts/addons"
|
||||
cd "%BLENDER_DIR%/scripts/addons"
|
||||
for /f "delims=" %%i in ('"%GIT%" rev-parse HEAD') do echo Addons_Hash=%%i
|
||||
cd "%BLENDER_DIR%"
|
||||
:EOF
|
@@ -231,7 +231,7 @@ class FileBlockHeader:
|
||||
self.SDNAIndex = 0
|
||||
self.Count = 0
|
||||
self.FileOffset = handle.tell()
|
||||
#self.Code += ' ' * (4 - len(self.Code))
|
||||
# self.Code += ' ' * (4 - len(self.Code))
|
||||
log.debug("found blend-file-block-fileheader {0} {1}".format(self.Code, self.FileOffset))
|
||||
|
||||
def skip(self, handle):
|
||||
|
@@ -31,7 +31,7 @@ For an overview of BMesh data types and how they reference each other see:
|
||||
Example Script
|
||||
--------------
|
||||
|
||||
.. literalinclude:: __/__/__/release/scripts/templates_py/bmesh_simple.py
|
||||
.. literalinclude:: __/__/__/scripts/templates_py/bmesh_simple.py
|
||||
|
||||
|
||||
Standalone Module
|
||||
|
@@ -288,7 +288,7 @@ In Python, this is done by defining a class, which is a subclass of an existing
|
||||
Example Operator
|
||||
----------------
|
||||
|
||||
.. literalinclude:: __/__/__/release/scripts/templates_py/operator_simple.py
|
||||
.. literalinclude:: __/__/__/scripts/templates_py/operator_simple.py
|
||||
|
||||
Once this script runs, ``SimpleOperator`` is registered with Blender
|
||||
and can be called from Operator Search or added to the toolbar.
|
||||
@@ -320,7 +320,7 @@ Example Panel
|
||||
Panels are registered as a class, like an operator.
|
||||
Notice the extra ``bl_`` variables used to set the context they display in.
|
||||
|
||||
.. literalinclude:: __/__/__/release/scripts/templates_py/ui_panel_simple.py
|
||||
.. literalinclude:: __/__/__/scripts/templates_py/ui_panel_simple.py
|
||||
|
||||
To run the script:
|
||||
|
||||
|
@@ -367,13 +367,13 @@ except ImportError:
|
||||
# Note that ".." is replaced by "__" in the RST files,
|
||||
# to avoid having to match Blender's source tree.
|
||||
EXTRA_SOURCE_FILES = (
|
||||
"../../../release/scripts/templates_py/bmesh_simple.py",
|
||||
"../../../release/scripts/templates_py/gizmo_operator.py",
|
||||
"../../../release/scripts/templates_py/gizmo_operator_target.py",
|
||||
"../../../release/scripts/templates_py/gizmo_simple.py",
|
||||
"../../../release/scripts/templates_py/operator_simple.py",
|
||||
"../../../release/scripts/templates_py/ui_panel_simple.py",
|
||||
"../../../release/scripts/templates_py/ui_previews_custom_icon.py",
|
||||
"../../../scripts/templates_py/bmesh_simple.py",
|
||||
"../../../scripts/templates_py/gizmo_operator.py",
|
||||
"../../../scripts/templates_py/gizmo_operator_target.py",
|
||||
"../../../scripts/templates_py/gizmo_simple.py",
|
||||
"../../../scripts/templates_py/operator_simple.py",
|
||||
"../../../scripts/templates_py/ui_panel_simple.py",
|
||||
"../../../scripts/templates_py/ui_previews_custom_icon.py",
|
||||
"../examples/bmesh.ops.1.py",
|
||||
"../examples/bpy.app.translations.py",
|
||||
)
|
||||
@@ -1816,7 +1816,7 @@ def pyrna2sphinx(basepath):
|
||||
|
||||
# operators
|
||||
def write_ops():
|
||||
API_BASEURL = "https://projects.blender.org/blender/blender/src/branch/main/release/scripts"
|
||||
API_BASEURL = "https://projects.blender.org/blender/blender/src/branch/main/scripts"
|
||||
API_BASEURL_ADDON = "https://projects.blender.org/blender/blender-addons"
|
||||
API_BASEURL_ADDON_CONTRIB = "https://projects.blender.org/blender/blender-addons-contrib"
|
||||
|
||||
@@ -1865,7 +1865,7 @@ def pyrna2sphinx(basepath):
|
||||
else:
|
||||
url_base = API_BASEURL
|
||||
|
||||
fw(" :file: `%s\\:%d <%s/%s$%d>`_\n\n" %
|
||||
fw(" :file:`%s\\:%d <%s/%s#L%d>`_\n\n" %
|
||||
(location[0], location[1], url_base, location[0], location[1]))
|
||||
|
||||
file.close()
|
||||
|
2
extern/CMakeLists.txt
vendored
2
extern/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2006 Blender Foundation. All rights reserved.
|
||||
# Copyright 2006 Blender Foundation
|
||||
|
||||
# Libs that adhere to strict flags
|
||||
add_subdirectory(curve_fit_nd)
|
||||
|
2
extern/binreloc/CMakeLists.txt
vendored
2
extern/binreloc/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2008 Blender Foundation. All rights reserved.
|
||||
# Copyright 2008 Blender Foundation
|
||||
|
||||
set(INC
|
||||
include
|
||||
|
2
extern/bullet2/CMakeLists.txt
vendored
2
extern/bullet2/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2006 Blender Foundation. All rights reserved.
|
||||
# Copyright 2006 Blender Foundation
|
||||
|
||||
# avoid noisy warnings
|
||||
if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
|
2
extern/ceres/CMakeLists.txt
vendored
2
extern/ceres/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2012 Blender Foundation. All rights reserved.
|
||||
# Copyright 2012 Blender Foundation
|
||||
|
||||
set(INC
|
||||
.
|
||||
|
2
extern/clew/CMakeLists.txt
vendored
2
extern/clew/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2006 Blender Foundation. All rights reserved.
|
||||
# Copyright 2006 Blender Foundation
|
||||
|
||||
set(INC
|
||||
.
|
||||
|
2
extern/cuew/CMakeLists.txt
vendored
2
extern/cuew/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2006 Blender Foundation. All rights reserved.
|
||||
# Copyright 2006 Blender Foundation
|
||||
|
||||
set(INC
|
||||
.
|
||||
|
2
extern/draco/CMakeLists.txt
vendored
2
extern/draco/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2019 Blender Foundation. All rights reserved.
|
||||
# Copyright 2019 Blender Foundation
|
||||
|
||||
# Build Draco library.
|
||||
add_subdirectory(draco)
|
||||
|
2
extern/fast_float/README.blender
vendored
2
extern/fast_float/README.blender
vendored
@@ -1,7 +1,7 @@
|
||||
Project: fast_float
|
||||
URL: https://github.com/fastfloat/fast_float
|
||||
License: MIT
|
||||
Upstream version: 3.4.0 (b7f9d6c)
|
||||
Upstream version: 4.0.0 (fbd5bd7, 2023 Mar 31)
|
||||
Local modifications:
|
||||
|
||||
- Took only the fast_float.h header and the license/readme files
|
||||
|
131
extern/fast_float/README.md
vendored
131
extern/fast_float/README.md
vendored
@@ -1,12 +1,5 @@
|
||||
## fast_float number parsing library: 4x faster than strtod
|
||||
|
||||
/badge.svg)
|
||||
/badge.svg)
|
||||

|
||||

|
||||

|
||||
[](https://github.com/fastfloat/fast_float/actions/workflows/vs16-ci.yml)
|
||||
|
||||
The fast_float library provides fast header-only implementations for the C++ from_chars
|
||||
functions for `float` and `double` types. These functions convert ASCII strings representing
|
||||
decimal values (e.g., `1.3e10`) into binary types. We provide exact rounding (including
|
||||
@@ -28,8 +21,8 @@ struct from_chars_result {
|
||||
```
|
||||
|
||||
It parses the character sequence [first,last) for a number. It parses floating-point numbers expecting
|
||||
a locale-independent format equivalent to the C++17 from_chars function.
|
||||
The resulting floating-point value is the closest floating-point values (using either float or double),
|
||||
a locale-independent format equivalent to the C++17 from_chars function.
|
||||
The resulting floating-point value is the closest floating-point values (using either float or double),
|
||||
using the "round to even" convention for values that would otherwise fall right in-between two values.
|
||||
That is, we provide exact parsing according to the IEEE standard.
|
||||
|
||||
@@ -47,7 +40,7 @@ Example:
|
||||
``` C++
|
||||
#include "fast_float/fast_float.h"
|
||||
#include <iostream>
|
||||
|
||||
|
||||
int main() {
|
||||
const std::string input = "3.1416 xyz ";
|
||||
double result;
|
||||
@@ -60,39 +53,60 @@ int main() {
|
||||
|
||||
|
||||
Like the C++17 standard, the `fast_float::from_chars` functions take an optional last argument of
|
||||
the type `fast_float::chars_format`. It is a bitset value: we check whether
|
||||
the type `fast_float::chars_format`. It is a bitset value: we check whether
|
||||
`fmt & fast_float::chars_format::fixed` and `fmt & fast_float::chars_format::scientific` are set
|
||||
to determine whether we allow the fixed point and scientific notation respectively.
|
||||
The default is `fast_float::chars_format::general` which allows both `fixed` and `scientific`.
|
||||
|
||||
The library seeks to follow the C++17 (see [20.19.3](http://eel.is/c++draft/charconv.from.chars).(7.1)) specification.
|
||||
The library seeks to follow the C++17 (see [20.19.3](http://eel.is/c++draft/charconv.from.chars).(7.1)) specification.
|
||||
* The `from_chars` function does not skip leading white-space characters.
|
||||
* [A leading `+` sign](https://en.cppreference.com/w/cpp/utility/from_chars) is forbidden.
|
||||
* It is generally impossible to represent a decimal value exactly as binary floating-point number (`float` and `double` types). We seek the nearest value. We round to an even mantissa when we are in-between two binary floating-point numbers.
|
||||
* It is generally impossible to represent a decimal value exactly as binary floating-point number (`float` and `double` types). We seek the nearest value. We round to an even mantissa when we are in-between two binary floating-point numbers.
|
||||
|
||||
Furthermore, we have the following restrictions:
|
||||
* We only support `float` and `double` types at this time.
|
||||
* We only support the decimal format: we do not support hexadecimal strings.
|
||||
* For values that are either very large or very small (e.g., `1e9999`), we represent it using the infinity or negative infinity value.
|
||||
* For values that are either very large or very small (e.g., `1e9999`), we represent it using the infinity or negative infinity value and the returned `ec` is set to `std::errc::result_out_of_range`.
|
||||
|
||||
We support Visual Studio, macOS, Linux, freeBSD. We support big and little endian. We support 32-bit and 64-bit systems.
|
||||
|
||||
We assume that the rounding mode is set to nearest (`std::fegetround() == FE_TONEAREST`).
|
||||
|
||||
## C++20: compile-time evaluation (constexpr)
|
||||
|
||||
In C++20, you may use `fast_float::from_chars` to parse strings
|
||||
at compile-time, as in the following example:
|
||||
|
||||
```C++
|
||||
// consteval forces compile-time evaluation of the function in C++20.
|
||||
consteval double parse(std::string_view input) {
|
||||
double result;
|
||||
auto answer = fast_float::from_chars(input.data(), input.data()+input.size(), result);
|
||||
if(answer.ec != std::errc()) { return -1.0; }
|
||||
return result;
|
||||
}
|
||||
|
||||
// This function should compile to a function which
|
||||
// merely returns 3.1415.
|
||||
constexpr double constexptest() {
|
||||
return parse("3.1415 input");
|
||||
}
|
||||
```
|
||||
|
||||
## Using commas as decimal separator
|
||||
|
||||
|
||||
The C++ standard stipulate that `from_chars` has to be locale-independent. In
|
||||
particular, the decimal separator has to be the period (`.`). However,
|
||||
some users still want to use the `fast_float` library with in a locale-dependent
|
||||
particular, the decimal separator has to be the period (`.`). However,
|
||||
some users still want to use the `fast_float` library with in a locale-dependent
|
||||
manner. Using a separate function called `from_chars_advanced`, we allow the users
|
||||
to pass a `parse_options` instance which contains a custom decimal separator (e.g.,
|
||||
to pass a `parse_options` instance which contains a custom decimal separator (e.g.,
|
||||
the comma). You may use it as follows.
|
||||
|
||||
```C++
|
||||
#include "fast_float/fast_float.h"
|
||||
#include <iostream>
|
||||
|
||||
|
||||
int main() {
|
||||
const std::string input = "3,1416 xyz ";
|
||||
double result;
|
||||
@@ -104,25 +118,62 @@ int main() {
|
||||
}
|
||||
```
|
||||
|
||||
You can parse delimited numbers:
|
||||
```C++
|
||||
const std::string input = "234532.3426362,7869234.9823,324562.645";
|
||||
double result;
|
||||
auto answer = fast_float::from_chars(input.data(), input.data()+input.size(), result);
|
||||
if(answer.ec != std::errc()) {
|
||||
// check error
|
||||
}
|
||||
// we have result == 234532.3426362.
|
||||
if(answer.ptr[0] != ',') {
|
||||
// unexpected delimiter
|
||||
}
|
||||
answer = fast_float::from_chars(answer.ptr + 1, input.data()+input.size(), result);
|
||||
if(answer.ec != std::errc()) {
|
||||
// check error
|
||||
}
|
||||
// we have result == 7869234.9823.
|
||||
if(answer.ptr[0] != ',') {
|
||||
// unexpected delimiter
|
||||
}
|
||||
answer = fast_float::from_chars(answer.ptr + 1, input.data()+input.size(), result);
|
||||
if(answer.ec != std::errc()) {
|
||||
// check error
|
||||
}
|
||||
// we have result == 324562.645.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
- Daniel Lemire, [Number Parsing at a Gigabyte per Second](https://arxiv.org/abs/2101.11408), Software: Pratice and Experience 51 (8), 2021.
|
||||
## Relation With Other Work
|
||||
|
||||
The fast_float library is part of:
|
||||
|
||||
- GCC (as of version 12): the `from_chars` function in GCC relies on fast_float.
|
||||
- [WebKit](https://github.com/WebKit/WebKit), the engine behind Safari (Apple's web browser)
|
||||
|
||||
|
||||
The fastfloat algorithm is part of the [LLVM standard libraries](https://github.com/llvm/llvm-project/commit/87c016078ad72c46505461e4ff8bfa04819fe7ba).
|
||||
|
||||
There is a [derived implementation part of AdaCore](https://github.com/AdaCore/VSS).
|
||||
|
||||
|
||||
The fast_float library provides a performance similar to that of the [fast_double_parser](https://github.com/lemire/fast_double_parser) library but using an updated algorithm reworked from the ground up, and while offering an API more in line with the expectations of C++ programmers. The fast_double_parser library is part of the [Microsoft LightGBM machine-learning framework](https://github.com/microsoft/LightGBM).
|
||||
|
||||
## References
|
||||
|
||||
- Daniel Lemire, [Number Parsing at a Gigabyte per Second](https://arxiv.org/abs/2101.11408), Software: Practice and Experience 51 (8), 2021.
|
||||
- Noble Mushtak, Daniel Lemire, [Fast Number Parsing Without Fallback](https://arxiv.org/abs/2212.06644), Software: Practice and Experience (to appear)
|
||||
|
||||
## Other programming languages
|
||||
|
||||
- [There is an R binding](https://github.com/eddelbuettel/rcppfastfloat) called `rcppfastfloat`.
|
||||
- [There is a Rust port of the fast_float library](https://github.com/aldanor/fast-float-rust/) called `fast-float-rust`.
|
||||
- [There is a Java port of the fast_float library](https://github.com/wrandelshofer/FastDoubleParser) called `FastDoubleParser`.
|
||||
- [There is a Java port of the fast_float library](https://github.com/wrandelshofer/FastDoubleParser) called `FastDoubleParser`. It used for important systems such as [Jackson](https://github.com/FasterXML/jackson-core).
|
||||
- [There is a C# port of the fast_float library](https://github.com/CarlVerret/csFastFloat) called `csFastFloat`.
|
||||
|
||||
|
||||
## Relation With Other Work
|
||||
|
||||
The fastfloat algorithm is part of the [LLVM standard libraries](https://github.com/llvm/llvm-project/commit/87c016078ad72c46505461e4ff8bfa04819fe7ba).
|
||||
|
||||
The fast_float library provides a performance similar to that of the [fast_double_parser](https://github.com/lemire/fast_double_parser) library but using an updated algorithm reworked from the ground up, and while offering an API more in line with the expectations of C++ programmers. The fast_double_parser library is part of the [Microsoft LightGBM machine-learning framework](https://github.com/microsoft/LightGBM).
|
||||
|
||||
## Users
|
||||
|
||||
The fast_float library is used by [Apache Arrow](https://github.com/apache/arrow/pull/8494) where it multiplied the number parsing speed by two or three times. It is also used by [Yandex ClickHouse](https://github.com/ClickHouse/ClickHouse) and by [Google Jsonnet](https://github.com/google/jsonnet).
|
||||
@@ -135,14 +186,14 @@ It can parse random floating-point numbers at a speed of 1 GB/s on some systems.
|
||||
<img src="http://lemire.me/blog/wp-content/uploads/2020/11/fastfloat_speed.png" width="400">
|
||||
|
||||
```
|
||||
$ ./build/benchmarks/benchmark
|
||||
$ ./build/benchmarks/benchmark
|
||||
# parsing random integers in the range [0,1)
|
||||
volume = 2.09808 MB
|
||||
netlib : 271.18 MB/s (+/- 1.2 %) 12.93 Mfloat/s
|
||||
doubleconversion : 225.35 MB/s (+/- 1.2 %) 10.74 Mfloat/s
|
||||
strtod : 190.94 MB/s (+/- 1.6 %) 9.10 Mfloat/s
|
||||
abseil : 430.45 MB/s (+/- 2.2 %) 20.52 Mfloat/s
|
||||
fastfloat : 1042.38 MB/s (+/- 9.9 %) 49.68 Mfloat/s
|
||||
volume = 2.09808 MB
|
||||
netlib : 271.18 MB/s (+/- 1.2 %) 12.93 Mfloat/s
|
||||
doubleconversion : 225.35 MB/s (+/- 1.2 %) 10.74 Mfloat/s
|
||||
strtod : 190.94 MB/s (+/- 1.6 %) 9.10 Mfloat/s
|
||||
abseil : 430.45 MB/s (+/- 2.2 %) 20.52 Mfloat/s
|
||||
fastfloat : 1042.38 MB/s (+/- 9.9 %) 49.68 Mfloat/s
|
||||
```
|
||||
|
||||
See https://github.com/lemire/simple_fastfloat_benchmark for our benchmarking code.
|
||||
@@ -183,23 +234,23 @@ You should change the `GIT_TAG` line so that you recover the version you wish to
|
||||
|
||||
## Using as single header
|
||||
|
||||
The script `script/amalgamate.py` may be used to generate a single header
|
||||
The script `script/amalgamate.py` may be used to generate a single header
|
||||
version of the library if so desired.
|
||||
Just run the script from the root directory of this repository.
|
||||
Just run the script from the root directory of this repository.
|
||||
You can customize the license type and output file if desired as described in
|
||||
the command line help.
|
||||
|
||||
You may directly download automatically generated single-header files:
|
||||
|
||||
https://github.com/fastfloat/fast_float/releases/download/v1.1.2/fast_float.h
|
||||
https://github.com/fastfloat/fast_float/releases/download/v3.4.0/fast_float.h
|
||||
|
||||
## Credit
|
||||
|
||||
Though this work is inspired by many different people, this work benefited especially from exchanges with
|
||||
Michael Eisel, who motivated the original research with his key insights, and with Nigel Tao who provided
|
||||
Though this work is inspired by many different people, this work benefited especially from exchanges with
|
||||
Michael Eisel, who motivated the original research with his key insights, and with Nigel Tao who provided
|
||||
invaluable feedback. Rémy Oudompheng first implemented a fast path we use in the case of long digits.
|
||||
|
||||
The library includes code adapted from Google Wuffs (written by Nigel Tao) which was originally published
|
||||
The library includes code adapted from Google Wuffs (written by Nigel Tao) which was originally published
|
||||
under the Apache 2.0 license.
|
||||
|
||||
## License
|
||||
|
2111
extern/fast_float/fast_float.h
vendored
2111
extern/fast_float/fast_float.h
vendored
File diff suppressed because it is too large
Load Diff
2
extern/gflags/CMakeLists.txt
vendored
2
extern/gflags/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2016 Blender Foundation. All rights reserved.
|
||||
# Copyright 2016 Blender Foundation
|
||||
|
||||
# Too noisy for code we don't maintain.
|
||||
if(CMAKE_COMPILER_IS_GNUCC)
|
||||
|
2
extern/glog/CMakeLists.txt
vendored
2
extern/glog/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2016 Blender Foundation. All rights reserved.
|
||||
# Copyright 2016 Blender Foundation
|
||||
|
||||
set(INC
|
||||
src
|
||||
|
2
extern/gmock/CMakeLists.txt
vendored
2
extern/gmock/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2014 Blender Foundation. All rights reserved.
|
||||
# Copyright 2014 Blender Foundation
|
||||
|
||||
set(INC
|
||||
.
|
||||
|
2
extern/gtest/CMakeLists.txt
vendored
2
extern/gtest/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2014 Blender Foundation. All rights reserved.
|
||||
# Copyright 2014 Blender Foundation
|
||||
|
||||
# avoid noisy warnings
|
||||
if(CMAKE_COMPILER_IS_GNUCC)
|
||||
|
2
extern/hipew/CMakeLists.txt
vendored
2
extern/hipew/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2021 Blender Foundation. All rights reserved.
|
||||
# Copyright 2021 Blender Foundation
|
||||
|
||||
set(INC
|
||||
.
|
||||
|
2
extern/lzma/CMakeLists.txt
vendored
2
extern/lzma/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2006 Blender Foundation. All rights reserved.
|
||||
# Copyright 2006 Blender Foundation
|
||||
|
||||
# avoid noisy warnings
|
||||
if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
|
2
extern/lzo/CMakeLists.txt
vendored
2
extern/lzo/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2006 Blender Foundation. All rights reserved.
|
||||
# Copyright 2006 Blender Foundation
|
||||
|
||||
remove_strict_flags()
|
||||
|
||||
|
2
extern/mantaflow/CMakeLists.txt
vendored
2
extern/mantaflow/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2016 Blender Foundation. All rights reserved.
|
||||
# Copyright 2016 Blender Foundation
|
||||
# The Original Code is: all of this file.
|
||||
# Contributor(s): Sebastian Barschkis (sebbas).
|
||||
|
||||
|
2
extern/quadriflow/CMakeLists.txt
vendored
2
extern/quadriflow/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2019 Blender Foundation. All rights reserved.
|
||||
# Copyright 2019 Blender Foundation
|
||||
|
||||
# avoid noisy warnings
|
||||
if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
|
19
extern/quadriflow/patches/blender.patch
vendored
19
extern/quadriflow/patches/blender.patch
vendored
@@ -231,3 +231,22 @@ index 355ee008246..a770bbee60c 100644
|
||||
}
|
||||
allocator.deallocate(values, capacity);
|
||||
capacity = 0;
|
||||
diff --git a/extern/quadriflow/src/hierarchy.cpp b/extern/quadriflow/src/hierarchy.cpp
|
||||
index 8cc41da23d0..70a9628320f 100644
|
||||
--- a/extern/quadriflow/src/hierarchy.cpp
|
||||
+++ b/extern/quadriflow/src/hierarchy.cpp
|
||||
@@ -269,7 +269,13 @@ void Hierarchy::DownsampleGraph(const AdjacentMatrix adj, const MatrixXd& V, con
|
||||
for (auto it = ad.begin(); it != ad.end(); ++it, ++entry_it) {
|
||||
int k = it->id;
|
||||
double dp = N.col(i).dot(N.col(k));
|
||||
- double ratio = A[i] > A[k] ? (A[i] / A[k]) : (A[k] / A[i]);
|
||||
+ double ratio;
|
||||
+ if (A[i] > A[k]) {
|
||||
+ ratio = (A[k] == 0.0f) ? 1.0f : A[i] / A[k];
|
||||
+ }
|
||||
+ else {
|
||||
+ ratio = (A[i] == 0.0f) ? 1.0f : A[k] / A[i];
|
||||
+ }
|
||||
*entry_it = Entry(i, k, dp * ratio);
|
||||
}
|
||||
}
|
8
extern/quadriflow/src/hierarchy.cpp
vendored
8
extern/quadriflow/src/hierarchy.cpp
vendored
@@ -269,7 +269,13 @@ void Hierarchy::DownsampleGraph(const AdjacentMatrix adj, const MatrixXd& V, con
|
||||
for (auto it = ad.begin(); it != ad.end(); ++it, ++entry_it) {
|
||||
int k = it->id;
|
||||
double dp = N.col(i).dot(N.col(k));
|
||||
double ratio = A[i] > A[k] ? (A[i] / A[k]) : (A[k] / A[i]);
|
||||
double ratio;
|
||||
if (A[i] > A[k]) {
|
||||
ratio = (A[k] == 0.0f) ? 1.0f : A[i] / A[k];
|
||||
}
|
||||
else {
|
||||
ratio = (A[i] == 0.0f) ? 1.0f : A[k] / A[i];
|
||||
}
|
||||
*entry_it = Entry(i, k, dp * ratio);
|
||||
}
|
||||
}
|
||||
|
1
extern/quadriflow/src/loader.cpp
vendored
1
extern/quadriflow/src/loader.cpp
vendored
@@ -8,6 +8,7 @@
|
||||
|
||||
#include "loader.hpp"
|
||||
|
||||
#include <cstdint>
|
||||
#include <fstream>
|
||||
#include <unordered_map>
|
||||
|
||||
|
5
extern/renderdoc/README.blender
vendored
Normal file
5
extern/renderdoc/README.blender
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
Project: Renderdoc APP
|
||||
URL: https://github.com/baldurk/renderdoc/
|
||||
License: MIT
|
||||
Upstream version: d47e79ae079783935b8857d6a1730440eafb0b38
|
||||
Local modifications: None
|
723
extern/renderdoc/include/renderdoc_app.h
vendored
Normal file
723
extern/renderdoc/include/renderdoc_app.h
vendored
Normal file
@@ -0,0 +1,723 @@
|
||||
/******************************************************************************
|
||||
* The MIT License (MIT)
|
||||
*
|
||||
* Copyright (c) 2019-2023 Baldur Karlsson
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
******************************************************************************/
|
||||
|
||||
#pragma once
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
// Documentation for the API is available at https://renderdoc.org/docs/in_application_api.html
|
||||
//
|
||||
|
||||
#if !defined(RENDERDOC_NO_STDINT)
|
||||
#include <stdint.h>
|
||||
#endif
|
||||
|
||||
#if defined(WIN32) || defined(__WIN32__) || defined(_WIN32) || defined(_MSC_VER)
|
||||
#define RENDERDOC_CC __cdecl
|
||||
#elif defined(__linux__)
|
||||
#define RENDERDOC_CC
|
||||
#elif defined(__APPLE__)
|
||||
#define RENDERDOC_CC
|
||||
#else
|
||||
#error "Unknown platform"
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Constants not used directly in below API
|
||||
|
||||
// This is a GUID/magic value used for when applications pass a path where shader debug
|
||||
// information can be found to match up with a stripped shader.
|
||||
// the define can be used like so: const GUID RENDERDOC_ShaderDebugMagicValue =
|
||||
// RENDERDOC_ShaderDebugMagicValue_value
|
||||
#define RENDERDOC_ShaderDebugMagicValue_struct \
|
||||
{ \
|
||||
0xeab25520, 0x6670, 0x4865, 0x84, 0x29, 0x6c, 0x8, 0x51, 0x54, 0x00, 0xff \
|
||||
}
|
||||
|
||||
// as an alternative when you want a byte array (assuming x86 endianness):
|
||||
#define RENDERDOC_ShaderDebugMagicValue_bytearray \
|
||||
{ \
|
||||
0x20, 0x55, 0xb2, 0xea, 0x70, 0x66, 0x65, 0x48, 0x84, 0x29, 0x6c, 0x8, 0x51, 0x54, 0x00, 0xff \
|
||||
}
|
||||
|
||||
// truncated version when only a uint64_t is available (e.g. Vulkan tags):
|
||||
#define RENDERDOC_ShaderDebugMagicValue_truncated 0x48656670eab25520ULL
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// RenderDoc capture options
|
||||
//
|
||||
|
||||
typedef enum RENDERDOC_CaptureOption {
|
||||
// Allow the application to enable vsync
|
||||
//
|
||||
// Default - enabled
|
||||
//
|
||||
// 1 - The application can enable or disable vsync at will
|
||||
// 0 - vsync is force disabled
|
||||
eRENDERDOC_Option_AllowVSync = 0,
|
||||
|
||||
// Allow the application to enable fullscreen
|
||||
//
|
||||
// Default - enabled
|
||||
//
|
||||
// 1 - The application can enable or disable fullscreen at will
|
||||
// 0 - fullscreen is force disabled
|
||||
eRENDERDOC_Option_AllowFullscreen = 1,
|
||||
|
||||
// Record API debugging events and messages
|
||||
//
|
||||
// Default - disabled
|
||||
//
|
||||
// 1 - Enable built-in API debugging features and records the results into
|
||||
// the capture, which is matched up with events on replay
|
||||
// 0 - no API debugging is forcibly enabled
|
||||
eRENDERDOC_Option_APIValidation = 2,
|
||||
eRENDERDOC_Option_DebugDeviceMode = 2, // deprecated name of this enum
|
||||
|
||||
// Capture CPU callstacks for API events
|
||||
//
|
||||
// Default - disabled
|
||||
//
|
||||
// 1 - Enables capturing of callstacks
|
||||
// 0 - no callstacks are captured
|
||||
eRENDERDOC_Option_CaptureCallstacks = 3,
|
||||
|
||||
// When capturing CPU callstacks, only capture them from actions.
|
||||
// This option does nothing without the above option being enabled
|
||||
//
|
||||
// Default - disabled
|
||||
//
|
||||
// 1 - Only captures callstacks for actions.
|
||||
// Ignored if CaptureCallstacks is disabled
|
||||
// 0 - Callstacks, if enabled, are captured for every event.
|
||||
eRENDERDOC_Option_CaptureCallstacksOnlyDraws = 4,
|
||||
eRENDERDOC_Option_CaptureCallstacksOnlyActions = 4,
|
||||
|
||||
// Specify a delay in seconds to wait for a debugger to attach, after
|
||||
// creating or injecting into a process, before continuing to allow it to run.
|
||||
//
|
||||
// 0 indicates no delay, and the process will run immediately after injection
|
||||
//
|
||||
// Default - 0 seconds
|
||||
//
|
||||
eRENDERDOC_Option_DelayForDebugger = 5,
|
||||
|
||||
// Verify buffer access. This includes checking the memory returned by a Map() call to
|
||||
// detect any out-of-bounds modification, as well as initialising buffers with undefined contents
|
||||
// to a marker value to catch use of uninitialised memory.
|
||||
//
|
||||
// NOTE: This option is only valid for OpenGL and D3D11. Explicit APIs such as D3D12 and Vulkan do
|
||||
// not do the same kind of interception & checking and undefined contents are really undefined.
|
||||
//
|
||||
// Default - disabled
|
||||
//
|
||||
// 1 - Verify buffer access
|
||||
// 0 - No verification is performed, and overwriting bounds may cause crashes or corruption in
|
||||
// RenderDoc.
|
||||
eRENDERDOC_Option_VerifyBufferAccess = 6,
|
||||
|
||||
// The old name for eRENDERDOC_Option_VerifyBufferAccess was eRENDERDOC_Option_VerifyMapWrites.
|
||||
// This option now controls the filling of uninitialised buffers with 0xdddddddd which was
|
||||
// previously always enabled
|
||||
eRENDERDOC_Option_VerifyMapWrites = eRENDERDOC_Option_VerifyBufferAccess,
|
||||
|
||||
// Hooks any system API calls that create child processes, and injects
|
||||
// RenderDoc into them recursively with the same options.
|
||||
//
|
||||
// Default - disabled
|
||||
//
|
||||
// 1 - Hooks into spawned child processes
|
||||
// 0 - Child processes are not hooked by RenderDoc
|
||||
eRENDERDOC_Option_HookIntoChildren = 7,
|
||||
|
||||
// By default RenderDoc only includes resources in the final capture necessary
|
||||
// for that frame, this allows you to override that behaviour.
|
||||
//
|
||||
// Default - disabled
|
||||
//
|
||||
// 1 - all live resources at the time of capture are included in the capture
|
||||
// and available for inspection
|
||||
// 0 - only the resources referenced by the captured frame are included
|
||||
eRENDERDOC_Option_RefAllResources = 8,
|
||||
|
||||
// **NOTE**: As of RenderDoc v1.1 this option has been deprecated. Setting or
|
||||
// getting it will be ignored, to allow compatibility with older versions.
|
||||
// In v1.1 the option acts as if it's always enabled.
|
||||
//
|
||||
// By default RenderDoc skips saving initial states for resources where the
|
||||
// previous contents don't appear to be used, assuming that writes before
|
||||
// reads indicate previous contents aren't used.
|
||||
//
|
||||
// Default - disabled
|
||||
//
|
||||
// 1 - initial contents at the start of each captured frame are saved, even if
|
||||
// they are later overwritten or cleared before being used.
|
||||
// 0 - unless a read is detected, initial contents will not be saved and will
|
||||
// appear as black or empty data.
|
||||
eRENDERDOC_Option_SaveAllInitials = 9,
|
||||
|
||||
// In APIs that allow for the recording of command lists to be replayed later,
|
||||
// RenderDoc may choose to not capture command lists before a frame capture is
|
||||
// triggered, to reduce overheads. This means any command lists recorded once
|
||||
// and replayed many times will not be available and may cause a failure to
|
||||
// capture.
|
||||
//
|
||||
// NOTE: This is only true for APIs where multithreading is difficult or
|
||||
// discouraged. Newer APIs like Vulkan and D3D12 will ignore this option
|
||||
// and always capture all command lists since the API is heavily oriented
|
||||
// around it and the overheads have been reduced by API design.
|
||||
//
|
||||
// 1 - All command lists are captured from the start of the application
|
||||
// 0 - Command lists are only captured if their recording begins during
|
||||
// the period when a frame capture is in progress.
|
||||
eRENDERDOC_Option_CaptureAllCmdLists = 10,
|
||||
|
||||
// Mute API debugging output when the API validation mode option is enabled
|
||||
//
|
||||
// Default - enabled
|
||||
//
|
||||
// 1 - Mute any API debug messages from being displayed or passed through
|
||||
// 0 - API debugging is displayed as normal
|
||||
eRENDERDOC_Option_DebugOutputMute = 11,
|
||||
|
||||
// Option to allow vendor extensions to be used even when they may be
|
||||
// incompatible with RenderDoc and cause corrupted replays or crashes.
|
||||
//
|
||||
// Default - inactive
|
||||
//
|
||||
// No values are documented, this option should only be used when absolutely
|
||||
// necessary as directed by a RenderDoc developer.
|
||||
eRENDERDOC_Option_AllowUnsupportedVendorExtensions = 12,
|
||||
|
||||
} RENDERDOC_CaptureOption;
|
||||
|
||||
// Sets an option that controls how RenderDoc behaves on capture.
|
||||
//
|
||||
// Returns 1 if the option and value are valid
|
||||
// Returns 0 if either is invalid and the option is unchanged
|
||||
typedef int(RENDERDOC_CC *pRENDERDOC_SetCaptureOptionU32)(RENDERDOC_CaptureOption opt, uint32_t val);
|
||||
typedef int(RENDERDOC_CC *pRENDERDOC_SetCaptureOptionF32)(RENDERDOC_CaptureOption opt, float val);
|
||||
|
||||
// Gets the current value of an option as a uint32_t
|
||||
//
|
||||
// If the option is invalid, 0xffffffff is returned
|
||||
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_GetCaptureOptionU32)(RENDERDOC_CaptureOption opt);
|
||||
|
||||
// Gets the current value of an option as a float
|
||||
//
|
||||
// If the option is invalid, -FLT_MAX is returned
|
||||
typedef float(RENDERDOC_CC *pRENDERDOC_GetCaptureOptionF32)(RENDERDOC_CaptureOption opt);
|
||||
|
||||
typedef enum RENDERDOC_InputButton {
|
||||
// '0' - '9' matches ASCII values
|
||||
eRENDERDOC_Key_0 = 0x30,
|
||||
eRENDERDOC_Key_1 = 0x31,
|
||||
eRENDERDOC_Key_2 = 0x32,
|
||||
eRENDERDOC_Key_3 = 0x33,
|
||||
eRENDERDOC_Key_4 = 0x34,
|
||||
eRENDERDOC_Key_5 = 0x35,
|
||||
eRENDERDOC_Key_6 = 0x36,
|
||||
eRENDERDOC_Key_7 = 0x37,
|
||||
eRENDERDOC_Key_8 = 0x38,
|
||||
eRENDERDOC_Key_9 = 0x39,
|
||||
|
||||
// 'A' - 'Z' matches ASCII values
|
||||
eRENDERDOC_Key_A = 0x41,
|
||||
eRENDERDOC_Key_B = 0x42,
|
||||
eRENDERDOC_Key_C = 0x43,
|
||||
eRENDERDOC_Key_D = 0x44,
|
||||
eRENDERDOC_Key_E = 0x45,
|
||||
eRENDERDOC_Key_F = 0x46,
|
||||
eRENDERDOC_Key_G = 0x47,
|
||||
eRENDERDOC_Key_H = 0x48,
|
||||
eRENDERDOC_Key_I = 0x49,
|
||||
eRENDERDOC_Key_J = 0x4A,
|
||||
eRENDERDOC_Key_K = 0x4B,
|
||||
eRENDERDOC_Key_L = 0x4C,
|
||||
eRENDERDOC_Key_M = 0x4D,
|
||||
eRENDERDOC_Key_N = 0x4E,
|
||||
eRENDERDOC_Key_O = 0x4F,
|
||||
eRENDERDOC_Key_P = 0x50,
|
||||
eRENDERDOC_Key_Q = 0x51,
|
||||
eRENDERDOC_Key_R = 0x52,
|
||||
eRENDERDOC_Key_S = 0x53,
|
||||
eRENDERDOC_Key_T = 0x54,
|
||||
eRENDERDOC_Key_U = 0x55,
|
||||
eRENDERDOC_Key_V = 0x56,
|
||||
eRENDERDOC_Key_W = 0x57,
|
||||
eRENDERDOC_Key_X = 0x58,
|
||||
eRENDERDOC_Key_Y = 0x59,
|
||||
eRENDERDOC_Key_Z = 0x5A,
|
||||
|
||||
// leave the rest of the ASCII range free
|
||||
// in case we want to use it later
|
||||
eRENDERDOC_Key_NonPrintable = 0x100,
|
||||
|
||||
eRENDERDOC_Key_Divide,
|
||||
eRENDERDOC_Key_Multiply,
|
||||
eRENDERDOC_Key_Subtract,
|
||||
eRENDERDOC_Key_Plus,
|
||||
|
||||
eRENDERDOC_Key_F1,
|
||||
eRENDERDOC_Key_F2,
|
||||
eRENDERDOC_Key_F3,
|
||||
eRENDERDOC_Key_F4,
|
||||
eRENDERDOC_Key_F5,
|
||||
eRENDERDOC_Key_F6,
|
||||
eRENDERDOC_Key_F7,
|
||||
eRENDERDOC_Key_F8,
|
||||
eRENDERDOC_Key_F9,
|
||||
eRENDERDOC_Key_F10,
|
||||
eRENDERDOC_Key_F11,
|
||||
eRENDERDOC_Key_F12,
|
||||
|
||||
eRENDERDOC_Key_Home,
|
||||
eRENDERDOC_Key_End,
|
||||
eRENDERDOC_Key_Insert,
|
||||
eRENDERDOC_Key_Delete,
|
||||
eRENDERDOC_Key_PageUp,
|
||||
eRENDERDOC_Key_PageDn,
|
||||
|
||||
eRENDERDOC_Key_Backspace,
|
||||
eRENDERDOC_Key_Tab,
|
||||
eRENDERDOC_Key_PrtScrn,
|
||||
eRENDERDOC_Key_Pause,
|
||||
|
||||
eRENDERDOC_Key_Max,
|
||||
} RENDERDOC_InputButton;
|
||||
|
||||
// Sets which key or keys can be used to toggle focus between multiple windows
|
||||
//
|
||||
// If keys is NULL or num is 0, toggle keys will be disabled
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_SetFocusToggleKeys)(RENDERDOC_InputButton *keys, int num);
|
||||
|
||||
// Sets which key or keys can be used to capture the next frame
|
||||
//
|
||||
// If keys is NULL or num is 0, captures keys will be disabled
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_SetCaptureKeys)(RENDERDOC_InputButton *keys, int num);
|
||||
|
||||
typedef enum RENDERDOC_OverlayBits {
|
||||
// This single bit controls whether the overlay is enabled or disabled globally
|
||||
eRENDERDOC_Overlay_Enabled = 0x1,
|
||||
|
||||
// Show the average framerate over several seconds as well as min/max
|
||||
eRENDERDOC_Overlay_FrameRate = 0x2,
|
||||
|
||||
// Show the current frame number
|
||||
eRENDERDOC_Overlay_FrameNumber = 0x4,
|
||||
|
||||
// Show a list of recent captures, and how many captures have been made
|
||||
eRENDERDOC_Overlay_CaptureList = 0x8,
|
||||
|
||||
// Default values for the overlay mask
|
||||
eRENDERDOC_Overlay_Default = (eRENDERDOC_Overlay_Enabled | eRENDERDOC_Overlay_FrameRate |
|
||||
eRENDERDOC_Overlay_FrameNumber | eRENDERDOC_Overlay_CaptureList),
|
||||
|
||||
// Enable all bits
|
||||
eRENDERDOC_Overlay_All = ~0U,
|
||||
|
||||
// Disable all bits
|
||||
eRENDERDOC_Overlay_None = 0,
|
||||
} RENDERDOC_OverlayBits;
|
||||
|
||||
// returns the overlay bits that have been set
|
||||
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_GetOverlayBits)();
|
||||
// sets the overlay bits with an and & or mask
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_MaskOverlayBits)(uint32_t And, uint32_t Or);
|
||||
|
||||
// this function will attempt to remove RenderDoc's hooks in the application.
|
||||
//
|
||||
// Note: that this can only work correctly if done immediately after
|
||||
// the module is loaded, before any API work happens. RenderDoc will remove its
|
||||
// injected hooks and shut down. Behaviour is undefined if this is called
|
||||
// after any API functions have been called, and there is still no guarantee of
|
||||
// success.
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_RemoveHooks)();
|
||||
|
||||
// DEPRECATED: compatibility for code compiled against pre-1.4.1 headers.
|
||||
typedef pRENDERDOC_RemoveHooks pRENDERDOC_Shutdown;
|
||||
|
||||
// This function will unload RenderDoc's crash handler.
|
||||
//
|
||||
// If you use your own crash handler and don't want RenderDoc's handler to
|
||||
// intercede, you can call this function to unload it and any unhandled
|
||||
// exceptions will pass to the next handler.
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_UnloadCrashHandler)();
|
||||
|
||||
// Sets the capture file path template
|
||||
//
|
||||
// pathtemplate is a UTF-8 string that gives a template for how captures will be named
|
||||
// and where they will be saved.
|
||||
//
|
||||
// Any extension is stripped off the path, and captures are saved in the directory
|
||||
// specified, and named with the filename and the frame number appended. If the
|
||||
// directory does not exist it will be created, including any parent directories.
|
||||
//
|
||||
// If pathtemplate is NULL, the template will remain unchanged
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// SetCaptureFilePathTemplate("my_captures/example");
|
||||
//
|
||||
// Capture #1 -> my_captures/example_frame123.rdc
|
||||
// Capture #2 -> my_captures/example_frame456.rdc
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_SetCaptureFilePathTemplate)(const char *pathtemplate);
|
||||
|
||||
// returns the current capture path template, see SetCaptureFileTemplate above, as a UTF-8 string
|
||||
typedef const char *(RENDERDOC_CC *pRENDERDOC_GetCaptureFilePathTemplate)();
|
||||
|
||||
// DEPRECATED: compatibility for code compiled against pre-1.1.2 headers.
|
||||
typedef pRENDERDOC_SetCaptureFilePathTemplate pRENDERDOC_SetLogFilePathTemplate;
|
||||
typedef pRENDERDOC_GetCaptureFilePathTemplate pRENDERDOC_GetLogFilePathTemplate;
|
||||
|
||||
// returns the number of captures that have been made
|
||||
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_GetNumCaptures)();
|
||||
|
||||
// This function returns the details of a capture, by index. New captures are added
|
||||
// to the end of the list.
|
||||
//
|
||||
// filename will be filled with the absolute path to the capture file, as a UTF-8 string
|
||||
// pathlength will be written with the length in bytes of the filename string
|
||||
// timestamp will be written with the time of the capture, in seconds since the Unix epoch
|
||||
//
|
||||
// Any of the parameters can be NULL and they'll be skipped.
|
||||
//
|
||||
// The function will return 1 if the capture index is valid, or 0 if the index is invalid
|
||||
// If the index is invalid, the values will be unchanged
|
||||
//
|
||||
// Note: when captures are deleted in the UI they will remain in this list, so the
|
||||
// capture path may not exist anymore.
|
||||
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_GetCapture)(uint32_t idx, char *filename,
|
||||
uint32_t *pathlength, uint64_t *timestamp);
|
||||
|
||||
// Sets the comments associated with a capture file. These comments are displayed in the
|
||||
// UI program when opening.
|
||||
//
|
||||
// filePath should be a path to the capture file to add comments to. If set to NULL or ""
|
||||
// the most recent capture file created made will be used instead.
|
||||
// comments should be a NULL-terminated UTF-8 string to add as comments.
|
||||
//
|
||||
// Any existing comments will be overwritten.
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_SetCaptureFileComments)(const char *filePath,
|
||||
const char *comments);
|
||||
|
||||
// returns 1 if the RenderDoc UI is connected to this application, 0 otherwise
|
||||
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_IsTargetControlConnected)();
|
||||
|
||||
// DEPRECATED: compatibility for code compiled against pre-1.1.1 headers.
|
||||
// This was renamed to IsTargetControlConnected in API 1.1.1, the old typedef is kept here for
|
||||
// backwards compatibility with old code, it is castable either way since it's ABI compatible
|
||||
// as the same function pointer type.
|
||||
typedef pRENDERDOC_IsTargetControlConnected pRENDERDOC_IsRemoteAccessConnected;
|
||||
|
||||
// This function will launch the Replay UI associated with the RenderDoc library injected
|
||||
// into the running application.
|
||||
//
|
||||
// if connectTargetControl is 1, the Replay UI will be launched with a command line parameter
|
||||
// to connect to this application
|
||||
// cmdline is the rest of the command line, as a UTF-8 string. E.g. a captures to open
|
||||
// if cmdline is NULL, the command line will be empty.
|
||||
//
|
||||
// returns the PID of the replay UI if successful, 0 if not successful.
|
||||
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_LaunchReplayUI)(uint32_t connectTargetControl,
|
||||
const char *cmdline);
|
||||
|
||||
// RenderDoc can return a higher version than requested if it's backwards compatible,
|
||||
// this function returns the actual version returned. If a parameter is NULL, it will be
|
||||
// ignored and the others will be filled out.
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_GetAPIVersion)(int *major, int *minor, int *patch);
|
||||
|
||||
// Requests that the replay UI show itself (if hidden or not the current top window). This can be
|
||||
// used in conjunction with IsTargetControlConnected and LaunchReplayUI to intelligently handle
|
||||
// showing the UI after making a capture.
|
||||
//
|
||||
// This will return 1 if the request was successfully passed on, though it's not guaranteed that
|
||||
// the UI will be on top in all cases depending on OS rules. It will return 0 if there is no current
|
||||
// target control connection to make such a request, or if there was another error
|
||||
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_ShowReplayUI)();
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// Capturing functions
|
||||
//
|
||||
|
||||
// A device pointer is a pointer to the API's root handle.
|
||||
//
|
||||
// This would be an ID3D11Device, HGLRC/GLXContext, ID3D12Device, etc
|
||||
typedef void *RENDERDOC_DevicePointer;
|
||||
|
||||
// A window handle is the OS's native window handle
|
||||
//
|
||||
// This would be an HWND, GLXDrawable, etc
|
||||
typedef void *RENDERDOC_WindowHandle;
|
||||
|
||||
// A helper macro for Vulkan, where the device handle cannot be used directly.
|
||||
//
|
||||
// Passing the VkInstance to this macro will return the RENDERDOC_DevicePointer to use.
|
||||
//
|
||||
// Specifically, the value needed is the dispatch table pointer, which sits as the first
|
||||
// pointer-sized object in the memory pointed to by the VkInstance. Thus we cast to a void** and
|
||||
// indirect once.
|
||||
#define RENDERDOC_DEVICEPOINTER_FROM_VKINSTANCE(inst) (*((void **)(inst)))
|
||||
|
||||
// This sets the RenderDoc in-app overlay in the API/window pair as 'active' and it will
|
||||
// respond to keypresses. Neither parameter can be NULL
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_SetActiveWindow)(RENDERDOC_DevicePointer device,
|
||||
RENDERDOC_WindowHandle wndHandle);
|
||||
|
||||
// capture the next frame on whichever window and API is currently considered active
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_TriggerCapture)();
|
||||
|
||||
// capture the next N frames on whichever window and API is currently considered active
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_TriggerMultiFrameCapture)(uint32_t numFrames);
|
||||
|
||||
// When choosing either a device pointer or a window handle to capture, you can pass NULL.
|
||||
// Passing NULL specifies a 'wildcard' match against anything. This allows you to specify
|
||||
// any API rendering to a specific window, or a specific API instance rendering to any window,
|
||||
// or in the simplest case of one window and one API, you can just pass NULL for both.
|
||||
//
|
||||
// In either case, if there are two or more possible matching (device,window) pairs it
|
||||
// is undefined which one will be captured.
|
||||
//
|
||||
// Note: for headless rendering you can pass NULL for the window handle and either specify
|
||||
// a device pointer or leave it NULL as above.
|
||||
|
||||
// Immediately starts capturing API calls on the specified device pointer and window handle.
|
||||
//
|
||||
// If there is no matching thing to capture (e.g. no supported API has been initialised),
|
||||
// this will do nothing.
|
||||
//
|
||||
// The results are undefined (including crashes) if two captures are started overlapping,
|
||||
// even on separate devices and/oror windows.
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_StartFrameCapture)(RENDERDOC_DevicePointer device,
|
||||
RENDERDOC_WindowHandle wndHandle);
|
||||
|
||||
// Returns whether or not a frame capture is currently ongoing anywhere.
|
||||
//
|
||||
// This will return 1 if a capture is ongoing, and 0 if there is no capture running
|
||||
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_IsFrameCapturing)();
|
||||
|
||||
// Ends capturing immediately.
|
||||
//
|
||||
// This will return 1 if the capture succeeded, and 0 if there was an error capturing.
|
||||
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_EndFrameCapture)(RENDERDOC_DevicePointer device,
|
||||
RENDERDOC_WindowHandle wndHandle);
|
||||
|
||||
// Ends capturing immediately and discard any data stored without saving to disk.
|
||||
//
|
||||
// This will return 1 if the capture was discarded, and 0 if there was an error or no capture
|
||||
// was in progress
|
||||
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_DiscardFrameCapture)(RENDERDOC_DevicePointer device,
|
||||
RENDERDOC_WindowHandle wndHandle);
|
||||
|
||||
// Only valid to be called between a call to StartFrameCapture and EndFrameCapture. Gives a custom
|
||||
// title to the capture produced which will be displayed in the UI.
|
||||
//
|
||||
// If multiple captures are ongoing, this title will be applied to the first capture to end after
|
||||
// this call. The second capture to end will have no title, unless this function is called again.
|
||||
//
|
||||
// Calling this function has no effect if no capture is currently running
|
||||
typedef void(RENDERDOC_CC *pRENDERDOC_SetCaptureTitle)(const char *title);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// RenderDoc API versions
|
||||
//
|
||||
|
||||
// RenderDoc uses semantic versioning (http://semver.org/).
|
||||
//
|
||||
// MAJOR version is incremented when incompatible API changes happen.
|
||||
// MINOR version is incremented when functionality is added in a backwards-compatible manner.
|
||||
// PATCH version is incremented when backwards-compatible bug fixes happen.
|
||||
//
|
||||
// Note that this means the API returned can be higher than the one you might have requested.
|
||||
// e.g. if you are running against a newer RenderDoc that supports 1.0.1, it will be returned
|
||||
// instead of 1.0.0. You can check this with the GetAPIVersion entry point
|
||||
typedef enum RENDERDOC_Version {
|
||||
eRENDERDOC_API_Version_1_0_0 = 10000, // RENDERDOC_API_1_0_0 = 1 00 00
|
||||
eRENDERDOC_API_Version_1_0_1 = 10001, // RENDERDOC_API_1_0_1 = 1 00 01
|
||||
eRENDERDOC_API_Version_1_0_2 = 10002, // RENDERDOC_API_1_0_2 = 1 00 02
|
||||
eRENDERDOC_API_Version_1_1_0 = 10100, // RENDERDOC_API_1_1_0 = 1 01 00
|
||||
eRENDERDOC_API_Version_1_1_1 = 10101, // RENDERDOC_API_1_1_1 = 1 01 01
|
||||
eRENDERDOC_API_Version_1_1_2 = 10102, // RENDERDOC_API_1_1_2 = 1 01 02
|
||||
eRENDERDOC_API_Version_1_2_0 = 10200, // RENDERDOC_API_1_2_0 = 1 02 00
|
||||
eRENDERDOC_API_Version_1_3_0 = 10300, // RENDERDOC_API_1_3_0 = 1 03 00
|
||||
eRENDERDOC_API_Version_1_4_0 = 10400, // RENDERDOC_API_1_4_0 = 1 04 00
|
||||
eRENDERDOC_API_Version_1_4_1 = 10401, // RENDERDOC_API_1_4_1 = 1 04 01
|
||||
eRENDERDOC_API_Version_1_4_2 = 10402, // RENDERDOC_API_1_4_2 = 1 04 02
|
||||
eRENDERDOC_API_Version_1_5_0 = 10500, // RENDERDOC_API_1_5_0 = 1 05 00
|
||||
eRENDERDOC_API_Version_1_6_0 = 10600, // RENDERDOC_API_1_6_0 = 1 06 00
|
||||
} RENDERDOC_Version;
|
||||
|
||||
// API version changelog:
|
||||
//
|
||||
// 1.0.0 - initial release
|
||||
// 1.0.1 - Bugfix: IsFrameCapturing() was returning false for captures that were triggered
|
||||
// by keypress or TriggerCapture, instead of Start/EndFrameCapture.
|
||||
// 1.0.2 - Refactor: Renamed eRENDERDOC_Option_DebugDeviceMode to eRENDERDOC_Option_APIValidation
|
||||
// 1.1.0 - Add feature: TriggerMultiFrameCapture(). Backwards compatible with 1.0.x since the new
|
||||
// function pointer is added to the end of the struct, the original layout is identical
|
||||
// 1.1.1 - Refactor: Renamed remote access to target control (to better disambiguate from remote
|
||||
// replay/remote server concept in replay UI)
|
||||
// 1.1.2 - Refactor: Renamed "log file" in function names to just capture, to clarify that these
|
||||
// are captures and not debug logging files. This is the first API version in the v1.0
|
||||
// branch.
|
||||
// 1.2.0 - Added feature: SetCaptureFileComments() to add comments to a capture file that will be
|
||||
// displayed in the UI program on load.
|
||||
// 1.3.0 - Added feature: New capture option eRENDERDOC_Option_AllowUnsupportedVendorExtensions
|
||||
// which allows users to opt-in to allowing unsupported vendor extensions to function.
|
||||
// Should be used at the user's own risk.
|
||||
// Refactor: Renamed eRENDERDOC_Option_VerifyMapWrites to
|
||||
// eRENDERDOC_Option_VerifyBufferAccess, which now also controls initialisation to
|
||||
// 0xdddddddd of uninitialised buffer contents.
|
||||
// 1.4.0 - Added feature: DiscardFrameCapture() to discard a frame capture in progress and stop
|
||||
// capturing without saving anything to disk.
|
||||
// 1.4.1 - Refactor: Renamed Shutdown to RemoveHooks to better clarify what is happening
|
||||
// 1.4.2 - Refactor: Renamed 'draws' to 'actions' in callstack capture option.
|
||||
// 1.5.0 - Added feature: ShowReplayUI() to request that the replay UI show itself if connected
|
||||
// 1.6.0 - Added feature: SetCaptureTitle() which can be used to set a title for a
|
||||
// capture made with StartFrameCapture() or EndFrameCapture()
|
||||
|
||||
typedef struct RENDERDOC_API_1_6_0
|
||||
{
|
||||
pRENDERDOC_GetAPIVersion GetAPIVersion;
|
||||
|
||||
pRENDERDOC_SetCaptureOptionU32 SetCaptureOptionU32;
|
||||
pRENDERDOC_SetCaptureOptionF32 SetCaptureOptionF32;
|
||||
|
||||
pRENDERDOC_GetCaptureOptionU32 GetCaptureOptionU32;
|
||||
pRENDERDOC_GetCaptureOptionF32 GetCaptureOptionF32;
|
||||
|
||||
pRENDERDOC_SetFocusToggleKeys SetFocusToggleKeys;
|
||||
pRENDERDOC_SetCaptureKeys SetCaptureKeys;
|
||||
|
||||
pRENDERDOC_GetOverlayBits GetOverlayBits;
|
||||
pRENDERDOC_MaskOverlayBits MaskOverlayBits;
|
||||
|
||||
// Shutdown was renamed to RemoveHooks in 1.4.1.
|
||||
// These unions allow old code to continue compiling without changes
|
||||
union
|
||||
{
|
||||
pRENDERDOC_Shutdown Shutdown;
|
||||
pRENDERDOC_RemoveHooks RemoveHooks;
|
||||
};
|
||||
pRENDERDOC_UnloadCrashHandler UnloadCrashHandler;
|
||||
|
||||
// Get/SetLogFilePathTemplate was renamed to Get/SetCaptureFilePathTemplate in 1.1.2.
|
||||
// These unions allow old code to continue compiling without changes
|
||||
union
|
||||
{
|
||||
// deprecated name
|
||||
pRENDERDOC_SetLogFilePathTemplate SetLogFilePathTemplate;
|
||||
// current name
|
||||
pRENDERDOC_SetCaptureFilePathTemplate SetCaptureFilePathTemplate;
|
||||
};
|
||||
union
|
||||
{
|
||||
// deprecated name
|
||||
pRENDERDOC_GetLogFilePathTemplate GetLogFilePathTemplate;
|
||||
// current name
|
||||
pRENDERDOC_GetCaptureFilePathTemplate GetCaptureFilePathTemplate;
|
||||
};
|
||||
|
||||
pRENDERDOC_GetNumCaptures GetNumCaptures;
|
||||
pRENDERDOC_GetCapture GetCapture;
|
||||
|
||||
pRENDERDOC_TriggerCapture TriggerCapture;
|
||||
|
||||
// IsRemoteAccessConnected was renamed to IsTargetControlConnected in 1.1.1.
|
||||
// This union allows old code to continue compiling without changes
|
||||
union
|
||||
{
|
||||
// deprecated name
|
||||
pRENDERDOC_IsRemoteAccessConnected IsRemoteAccessConnected;
|
||||
// current name
|
||||
pRENDERDOC_IsTargetControlConnected IsTargetControlConnected;
|
||||
};
|
||||
pRENDERDOC_LaunchReplayUI LaunchReplayUI;
|
||||
|
||||
pRENDERDOC_SetActiveWindow SetActiveWindow;
|
||||
|
||||
pRENDERDOC_StartFrameCapture StartFrameCapture;
|
||||
pRENDERDOC_IsFrameCapturing IsFrameCapturing;
|
||||
pRENDERDOC_EndFrameCapture EndFrameCapture;
|
||||
|
||||
// new function in 1.1.0
|
||||
pRENDERDOC_TriggerMultiFrameCapture TriggerMultiFrameCapture;
|
||||
|
||||
// new function in 1.2.0
|
||||
pRENDERDOC_SetCaptureFileComments SetCaptureFileComments;
|
||||
|
||||
// new function in 1.4.0
|
||||
pRENDERDOC_DiscardFrameCapture DiscardFrameCapture;
|
||||
|
||||
// new function in 1.5.0
|
||||
pRENDERDOC_ShowReplayUI ShowReplayUI;
|
||||
|
||||
// new function in 1.6.0
|
||||
pRENDERDOC_SetCaptureTitle SetCaptureTitle;
|
||||
} RENDERDOC_API_1_6_0;
|
||||
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_0_0;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_0_1;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_0_2;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_1_0;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_1_1;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_1_2;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_2_0;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_3_0;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_4_0;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_4_1;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_4_2;
|
||||
typedef RENDERDOC_API_1_6_0 RENDERDOC_API_1_5_0;
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// RenderDoc API entry point
|
||||
//
|
||||
// This entry point can be obtained via GetProcAddress/dlsym if RenderDoc is available.
|
||||
//
|
||||
// The name is the same as the typedef - "RENDERDOC_GetAPI"
|
||||
//
|
||||
// This function is not thread safe, and should not be called on multiple threads at once.
|
||||
// Ideally, call this once as early as possible in your application's startup, before doing
|
||||
// any API work, since some configuration functionality etc has to be done also before
|
||||
// initialising any APIs.
|
||||
//
|
||||
// Parameters:
|
||||
// version is a single value from the RENDERDOC_Version above.
|
||||
//
|
||||
// outAPIPointers will be filled out with a pointer to the corresponding struct of function
|
||||
// pointers.
|
||||
//
|
||||
// Returns:
|
||||
// 1 - if the outAPIPointers has been filled with a pointer to the API struct requested
|
||||
// 0 - if the requested version is not supported or the arguments are invalid.
|
||||
//
|
||||
typedef int(RENDERDOC_CC *pRENDERDOC_GetAPI)(RENDERDOC_Version version, void **outAPIPointers);
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
#endif
|
4
extern/sdlew/CMakeLists.txt
vendored
4
extern/sdlew/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2006 Blender Foundation. All rights reserved.
|
||||
# Copyright 2006 Blender Foundation
|
||||
|
||||
set(INC
|
||||
.
|
||||
@@ -7,7 +7,7 @@ set(INC
|
||||
)
|
||||
|
||||
set(INC_SYS
|
||||
|
||||
${X11_X11_INCLUDE_PATH}
|
||||
)
|
||||
|
||||
set(SRC
|
||||
|
2
extern/smaa_areatex/CMakeLists.txt
vendored
2
extern/smaa_areatex/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2017 Blender Foundation. All rights reserved.
|
||||
# Copyright 2017 Blender Foundation
|
||||
# The Original Code is: all of this file.
|
||||
# Contributor(s): IRIE Shinsuke.
|
||||
|
||||
|
5
extern/tinygltf/README.blender
vendored
5
extern/tinygltf/README.blender
vendored
@@ -1,6 +1,5 @@
|
||||
Project: TinyGLTF
|
||||
URL: https://github.com/syoyo/tinygltf
|
||||
License: MIT
|
||||
Upstream version: 2.5.0, 19a41d20ec0
|
||||
Local modifications:
|
||||
* Silence "enum value not handled in switch" warnings due to JSON dependency.
|
||||
Upstream version: 2.8.3, 84a83d39f55d
|
||||
Local modifications: None
|
||||
|
BIN
extern/tinygltf/patches/TinyGLTF.diff
vendored
BIN
extern/tinygltf/patches/TinyGLTF.diff
vendored
Binary file not shown.
2085
extern/tinygltf/tiny_gltf.h
vendored
2085
extern/tinygltf/tiny_gltf.h
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2022 Blender Foundation. All rights reserved.
|
||||
# Copyright 2022 Blender Foundation
|
||||
|
||||
set(INC
|
||||
.
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/* SPDX-License-Identifier: GPL-2.0-or-later
|
||||
* Copyright 2022 Blender Foundation. All rights reserved. */
|
||||
* Copyright 2022 Blender Foundation */
|
||||
|
||||
#ifdef __APPLE__
|
||||
# include <MoltenVK/vk_mvk_moltenvk.h>
|
||||
|
2
extern/wcwidth/CMakeLists.txt
vendored
2
extern/wcwidth/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2013 Blender Foundation. All rights reserved.
|
||||
# Copyright 2013 Blender Foundation
|
||||
|
||||
set(INC
|
||||
.
|
||||
|
2
extern/xdnd/CMakeLists.txt
vendored
2
extern/xdnd/CMakeLists.txt
vendored
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2012 Blender Foundation. All rights reserved.
|
||||
# Copyright 2012 Blender Foundation
|
||||
|
||||
set(INC
|
||||
.
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2006 Blender Foundation. All rights reserved.
|
||||
# Copyright 2006 Blender Foundation
|
||||
|
||||
# add_subdirectory(atomic) # header only
|
||||
add_subdirectory(atomic)
|
||||
@@ -67,6 +67,10 @@ if(UNIX AND NOT APPLE)
|
||||
add_subdirectory(libc_compat)
|
||||
endif()
|
||||
|
||||
if (WITH_RENDERDOC)
|
||||
add_subdirectory(renderdoc_dynload)
|
||||
endif()
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
# Important this comes after "ghost" as it uses includes defined by GHOST's CMake.
|
||||
if(WITH_GHOST_WAYLAND AND WITH_GHOST_WAYLAND_DYNLOAD)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright 2020 Blender Foundation. All rights reserved.
|
||||
# Copyright 2020 Blender Foundation
|
||||
|
||||
set(INC
|
||||
.
|
||||
|
@@ -38,10 +38,8 @@
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*
|
||||
* The Original Code is Copyright (C) 2016 Blender Foundation.
|
||||
* All rights reserved.
|
||||
*
|
||||
* The Original Code is: adapted from jemalloc.
|
||||
* The Original Code is adapted from jemalloc.
|
||||
* Modifications Copyright (C) 2016 Blender Foundation.
|
||||
*/
|
||||
|
||||
/** \file
|
||||
|
@@ -38,10 +38,8 @@
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*
|
||||
* The Original Code is Copyright (C) 2016 Blender Foundation.
|
||||
* All rights reserved.
|
||||
*
|
||||
* The Original Code is: adapted from jemalloc.
|
||||
* The Original Code is adapted from jemalloc.
|
||||
* Modifications Copyright (C) 2016 Blender Foundation.
|
||||
*/
|
||||
|
||||
/** \file
|
||||
|
@@ -38,10 +38,8 @@
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*
|
||||
* The Original Code is Copyright (C) 2016 Blender Foundation.
|
||||
* All rights reserved.
|
||||
*
|
||||
* The Original Code is: adapted from jemalloc.
|
||||
* The Original Code is adapted from jemalloc.
|
||||
* Modifications Copyright (C) 2016 Blender Foundation.
|
||||
*/
|
||||
|
||||
/** \file
|
||||
|
@@ -38,10 +38,8 @@
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*
|
||||
* The Original Code is Copyright (C) 2016 Blender Foundation.
|
||||
* All rights reserved.
|
||||
*
|
||||
* The Original Code is: adapted from jemalloc.
|
||||
* The Original Code is adapted from jemalloc.
|
||||
* Modifications Copyright (C) 2016 Blender Foundation.
|
||||
*/
|
||||
|
||||
/** \file
|
||||
|
@@ -281,6 +281,7 @@ endif()
|
||||
|
||||
if(WITH_CYCLES_EMBREE)
|
||||
add_definitions(-DWITH_EMBREE)
|
||||
add_definitions(-DEMBREE_MAJOR_VERSION=${EMBREE_MAJOR_VERSION})
|
||||
include_directories(
|
||||
SYSTEM
|
||||
${EMBREE_INCLUDE_DIRS}
|
||||
@@ -393,6 +394,10 @@ if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
unset(_has_no_error_unused_macros)
|
||||
endif()
|
||||
|
||||
if(WITH_USD)
|
||||
add_definitions(-DWITH_USD)
|
||||
endif()
|
||||
|
||||
if(WITH_CYCLES_HYDRA_RENDER_DELEGATE AND (NOT WITH_USD))
|
||||
set_and_warn_library_found("USD" WITH_USD WITH_CYCLES_HYDRA_RENDER_DELEGATE)
|
||||
endif()
|
||||
|
@@ -50,6 +50,16 @@ if(WITH_CYCLES_STANDALONE AND WITH_CYCLES_STANDALONE_GUI)
|
||||
list(APPEND LIB ${Epoxy_LIBRARIES} ${SDL2_LIBRARIES})
|
||||
endif()
|
||||
|
||||
if(WITH_USD)
|
||||
list(APPEND INC_SYS
|
||||
${USD_INCLUDE_DIRS}
|
||||
)
|
||||
list(APPEND LIB
|
||||
cycles_hydra
|
||||
${USD_LIBRARIES}
|
||||
)
|
||||
endif()
|
||||
|
||||
cycles_external_libraries_append(LIB)
|
||||
|
||||
# Common configuration.
|
||||
|
@@ -17,9 +17,7 @@ OIIOOutputDriver::OIIOOutputDriver(const string_view filepath,
|
||||
{
|
||||
}
|
||||
|
||||
OIIOOutputDriver::~OIIOOutputDriver()
|
||||
{
|
||||
}
|
||||
OIIOOutputDriver::~OIIOOutputDriver() {}
|
||||
|
||||
void OIIOOutputDriver::write_render_tile(const Tile &tile)
|
||||
{
|
||||
|
@@ -22,9 +22,7 @@ OpenGLDisplayDriver::OpenGLDisplayDriver(const function<bool()> &gl_context_enab
|
||||
{
|
||||
}
|
||||
|
||||
OpenGLDisplayDriver::~OpenGLDisplayDriver()
|
||||
{
|
||||
}
|
||||
OpenGLDisplayDriver::~OpenGLDisplayDriver() {}
|
||||
|
||||
/* --------------------------------------------------------------------
|
||||
* Update procedure.
|
||||
|
@@ -41,7 +41,7 @@ static const char *FRAGMENT_SHADER =
|
||||
"void main()\n"
|
||||
"{\n"
|
||||
" vec4 rgba = texture(image_texture, texCoord_interp);\n"
|
||||
/* Harcoded Rec.709 gamma, should use OpenColorIO eventually. */
|
||||
/* Hard-coded Rec.709 gamma, should use OpenColorIO eventually. */
|
||||
" fragColor = pow(rgba, vec4(0.45, 0.45, 0.45, 1.0));\n"
|
||||
"}\n\0";
|
||||
|
||||
@@ -149,9 +149,7 @@ void OpenGLShader::bind(int width, int height)
|
||||
glUniform2f(fullscreen_location_, width, height);
|
||||
}
|
||||
|
||||
void OpenGLShader::unbind()
|
||||
{
|
||||
}
|
||||
void OpenGLShader::unbind() {}
|
||||
|
||||
uint OpenGLShader::get_shader_program()
|
||||
{
|
||||
|
@@ -403,7 +403,7 @@ class CyclesRenderSettings(bpy.types.PropertyGroup):
|
||||
|
||||
time_limit: FloatProperty(
|
||||
name="Time Limit",
|
||||
description="Limit the render time (excluding synchronization time)."
|
||||
description="Limit the render time (excluding synchronization time). "
|
||||
"Zero disables the limit",
|
||||
min=0.0,
|
||||
default=0.0,
|
||||
@@ -1673,7 +1673,7 @@ class CyclesPreferences(bpy.types.AddonPreferences):
|
||||
driver_version = "470"
|
||||
col.label(text=iface_("Requires NVIDIA GPU with compute capability %s") % compute_capability,
|
||||
icon='BLANK1', translate=False)
|
||||
col.label(text="and NVIDIA driver version %s or newer" % driver_version,
|
||||
col.label(text=iface_("and NVIDIA driver version %s or newer") % driver_version,
|
||||
icon='BLANK1', translate=False)
|
||||
elif device_type == 'HIP':
|
||||
if True:
|
||||
@@ -1719,7 +1719,8 @@ class CyclesPreferences(bpy.types.AddonPreferences):
|
||||
.replace('(TM)', unicodedata.lookup('TRADE MARK SIGN'))
|
||||
.replace('(tm)', unicodedata.lookup('TRADE MARK SIGN'))
|
||||
.replace('(R)', unicodedata.lookup('REGISTERED SIGN'))
|
||||
.replace('(C)', unicodedata.lookup('COPYRIGHT SIGN'))
|
||||
.replace('(C)', unicodedata.lookup('COPYRIGHT SIGN')),
|
||||
translate=False
|
||||
)
|
||||
|
||||
def draw_impl(self, layout, context):
|
||||
|
@@ -20,13 +20,9 @@
|
||||
|
||||
CCL_NAMESPACE_BEGIN
|
||||
|
||||
ParticleCurveData::ParticleCurveData()
|
||||
{
|
||||
}
|
||||
ParticleCurveData::ParticleCurveData() {}
|
||||
|
||||
ParticleCurveData::~ParticleCurveData()
|
||||
{
|
||||
}
|
||||
ParticleCurveData::~ParticleCurveData() {}
|
||||
|
||||
static float shaperadius(float shape, float root, float tip, float time)
|
||||
{
|
||||
|
@@ -54,44 +54,10 @@ int BlenderDisplayShader::get_tex_coord_attrib_location()
|
||||
/* --------------------------------------------------------------------
|
||||
* BlenderFallbackDisplayShader.
|
||||
*/
|
||||
|
||||
/* TODO move shaders to standalone .glsl file. */
|
||||
static const char *FALLBACK_VERTEX_SHADER =
|
||||
"uniform vec2 fullscreen;\n"
|
||||
"in vec2 texCoord;\n"
|
||||
"in vec2 pos;\n"
|
||||
"out vec2 texCoord_interp;\n"
|
||||
"\n"
|
||||
"vec2 normalize_coordinates()\n"
|
||||
"{\n"
|
||||
" return (vec2(2.0) * (pos / fullscreen)) - vec2(1.0);\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"void main()\n"
|
||||
"{\n"
|
||||
" gl_Position = vec4(normalize_coordinates(), 0.0, 1.0);\n"
|
||||
" texCoord_interp = texCoord;\n"
|
||||
"}\n\0";
|
||||
|
||||
static const char *FALLBACK_FRAGMENT_SHADER =
|
||||
"uniform sampler2D image_texture;\n"
|
||||
"in vec2 texCoord_interp;\n"
|
||||
"out vec4 fragColor;\n"
|
||||
"\n"
|
||||
"void main()\n"
|
||||
"{\n"
|
||||
" fragColor = texture(image_texture, texCoord_interp);\n"
|
||||
"}\n\0";
|
||||
|
||||
static GPUShader *compile_fallback_shader(void)
|
||||
{
|
||||
/* NOTE: Compilation errors are logged to console. */
|
||||
GPUShader *shader = GPU_shader_create(FALLBACK_VERTEX_SHADER,
|
||||
FALLBACK_FRAGMENT_SHADER,
|
||||
nullptr,
|
||||
nullptr,
|
||||
nullptr,
|
||||
"FallbackCyclesBlitShader");
|
||||
GPUShader *shader = GPU_shader_create_from_info_name("gpu_shader_cycles_display_fallback");
|
||||
return shader;
|
||||
}
|
||||
|
||||
@@ -250,8 +216,13 @@ class DisplayGPUTexture {
|
||||
height = texture_height;
|
||||
|
||||
/* Texture must have a minimum size of 1x1. */
|
||||
gpu_texture = GPU_texture_create_2d(
|
||||
"CyclesBlitTexture", max(width, 1), max(height, 1), 1, GPU_RGBA16F, nullptr);
|
||||
gpu_texture = GPU_texture_create_2d("CyclesBlitTexture",
|
||||
max(width, 1),
|
||||
max(height, 1),
|
||||
1,
|
||||
GPU_RGBA16F,
|
||||
GPU_TEXTURE_USAGE_GENERAL,
|
||||
nullptr);
|
||||
|
||||
if (!gpu_texture) {
|
||||
LOG(ERROR) << "Error creating texture.";
|
||||
@@ -259,7 +230,7 @@ class DisplayGPUTexture {
|
||||
}
|
||||
|
||||
GPU_texture_filter_mode(gpu_texture, false);
|
||||
GPU_texture_wrap_mode(gpu_texture, false, true);
|
||||
GPU_texture_extend_mode(gpu_texture, GPU_SAMPLER_EXTEND_MODE_EXTEND);
|
||||
|
||||
++num_used;
|
||||
|
||||
@@ -734,14 +705,14 @@ static void draw_tile(const float2 &zoom,
|
||||
const float zoomed_height = draw_tile.params.size.y * zoom.y;
|
||||
if (texture.width != draw_tile.params.size.x || texture.height != draw_tile.params.size.y) {
|
||||
/* Resolution divider is different from 1, force nearest interpolation. */
|
||||
GPU_texture_bind_ex(texture.gpu_texture, GPU_SAMPLER_DEFAULT, 0, false);
|
||||
GPU_texture_bind_ex(texture.gpu_texture, GPUSamplerState::default_sampler(), 0);
|
||||
}
|
||||
else if (zoomed_width - draw_tile.params.size.x > 0.5f ||
|
||||
zoomed_height - draw_tile.params.size.y > 0.5f) {
|
||||
GPU_texture_bind_ex(texture.gpu_texture, GPU_SAMPLER_DEFAULT, 0, false);
|
||||
GPU_texture_bind_ex(texture.gpu_texture, GPUSamplerState::default_sampler(), 0);
|
||||
}
|
||||
else {
|
||||
GPU_texture_bind_ex(texture.gpu_texture, GPU_SAMPLER_FILTER, 0, false);
|
||||
GPU_texture_bind_ex(texture.gpu_texture, {GPU_SAMPLER_FILTERING_LINEAR}, 0);
|
||||
}
|
||||
|
||||
/* Draw at the parameters for which the texture has been updated for. This allows to always draw
|
||||
|
@@ -22,9 +22,7 @@ CCL_NAMESPACE_BEGIN
|
||||
|
||||
template<typename K, typename T, typename Flags = uint> class id_map {
|
||||
public:
|
||||
id_map(Scene *scene_) : scene(scene_)
|
||||
{
|
||||
}
|
||||
id_map(Scene *scene_) : scene(scene_) {}
|
||||
|
||||
~id_map()
|
||||
{
|
||||
@@ -262,9 +260,7 @@ struct GeometryKey {
|
||||
void *id;
|
||||
Geometry::Type geometry_type;
|
||||
|
||||
GeometryKey(void *id, Geometry::Type geometry_type) : id(id), geometry_type(geometry_type)
|
||||
{
|
||||
}
|
||||
GeometryKey(void *id, Geometry::Type geometry_type) : id(id), geometry_type(geometry_type) {}
|
||||
|
||||
bool operator<(const GeometryKey &k) const
|
||||
{
|
||||
|
@@ -246,22 +246,25 @@ static void fill_generic_attribute(BL::Mesh &b_mesh,
|
||||
if (polys_num == 0) {
|
||||
return;
|
||||
}
|
||||
const MPoly *polys = static_cast<const MPoly *>(b_mesh.polygons[0].ptr.data);
|
||||
const int *poly_offsets = static_cast<const int *>(b_mesh.polygons[0].ptr.data);
|
||||
for (int i = 0; i < polys_num; i++) {
|
||||
const MPoly &b_poly = polys[i];
|
||||
for (int j = 0; j < b_poly.totloop; j++) {
|
||||
*data = get_value_at_index(b_poly.loopstart + j);
|
||||
const int poly_start = poly_offsets[i];
|
||||
const int poly_size = poly_offsets[i + 1] - poly_start;
|
||||
for (int j = 0; j < poly_size; j++) {
|
||||
*data = get_value_at_index(poly_start + j);
|
||||
data++;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (BL::MeshLoopTriangle &t : b_mesh.loop_triangles) {
|
||||
const int index = t.index() * 3;
|
||||
BL::Array<int, 3> loops = t.loops();
|
||||
data[index] = get_value_at_index(loops[0]);
|
||||
data[index + 1] = get_value_at_index(loops[1]);
|
||||
data[index + 2] = get_value_at_index(loops[2]);
|
||||
const int tris_num = b_mesh.loop_triangles.length();
|
||||
const MLoopTri *looptris = static_cast<const MLoopTri *>(
|
||||
b_mesh.loop_triangles[0].ptr.data);
|
||||
for (int i = 0; i < tris_num; i++) {
|
||||
const MLoopTri &tri = looptris[i];
|
||||
data[i * 3 + 0] = get_value_at_index(tri.tri[0]);
|
||||
data[i * 3 + 1] = get_value_at_index(tri.tri[1]);
|
||||
data[i * 3 + 2] = get_value_at_index(tri.tri[2]);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@@ -315,8 +318,11 @@ static void fill_generic_attribute(BL::Mesh &b_mesh,
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (BL::MeshLoopTriangle &t : b_mesh.loop_triangles) {
|
||||
data[t.index()] = get_value_at_index(t.polygon_index());
|
||||
const int tris_num = b_mesh.loop_triangles.length();
|
||||
const MLoopTri *looptris = static_cast<const MLoopTri *>(
|
||||
b_mesh.loop_triangles[0].ptr.data);
|
||||
for (int i = 0; i < tris_num; i++) {
|
||||
data[i] = get_value_at_index(looptris[i].poly);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@@ -412,43 +418,58 @@ static void attr_create_generic(Scene *scene,
|
||||
switch (b_data_type) {
|
||||
case BL::Attribute::data_type_FLOAT: {
|
||||
BL::FloatAttribute b_float_attribute{b_attribute};
|
||||
if (b_float_attribute.data.length() == 0) {
|
||||
continue;
|
||||
}
|
||||
const float *src = static_cast<const float *>(b_float_attribute.data[0].ptr.data);
|
||||
Attribute *attr = attributes.add(name, TypeFloat, element);
|
||||
float *data = attr->data_float();
|
||||
fill_generic_attribute(b_mesh, data, b_domain, subdivision, [&](int i) {
|
||||
return b_float_attribute.data[i].value();
|
||||
});
|
||||
fill_generic_attribute(b_mesh, data, b_domain, subdivision, [&](int i) { return src[i]; });
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_BOOLEAN: {
|
||||
BL::BoolAttribute b_bool_attribute{b_attribute};
|
||||
if (b_bool_attribute.data.length() == 0) {
|
||||
continue;
|
||||
}
|
||||
const bool *src = static_cast<const bool *>(b_bool_attribute.data[0].ptr.data);
|
||||
Attribute *attr = attributes.add(name, TypeFloat, element);
|
||||
float *data = attr->data_float();
|
||||
fill_generic_attribute(b_mesh, data, b_domain, subdivision, [&](int i) {
|
||||
return (float)b_bool_attribute.data[i].value();
|
||||
});
|
||||
fill_generic_attribute(
|
||||
b_mesh, data, b_domain, subdivision, [&](int i) { return (float)src[i]; });
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_INT: {
|
||||
BL::IntAttribute b_int_attribute{b_attribute};
|
||||
if (b_int_attribute.data.length() == 0) {
|
||||
continue;
|
||||
}
|
||||
const int *src = static_cast<const int *>(b_int_attribute.data[0].ptr.data);
|
||||
Attribute *attr = attributes.add(name, TypeFloat, element);
|
||||
float *data = attr->data_float();
|
||||
fill_generic_attribute(b_mesh, data, b_domain, subdivision, [&](int i) {
|
||||
return (float)b_int_attribute.data[i].value();
|
||||
});
|
||||
fill_generic_attribute(
|
||||
b_mesh, data, b_domain, subdivision, [&](int i) { return (float)src[i]; });
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_FLOAT_VECTOR: {
|
||||
BL::FloatVectorAttribute b_vector_attribute{b_attribute};
|
||||
if (b_vector_attribute.data.length() == 0) {
|
||||
continue;
|
||||
}
|
||||
const float(*src)[3] = static_cast<const float(*)[3]>(b_vector_attribute.data[0].ptr.data);
|
||||
Attribute *attr = attributes.add(name, TypeVector, element);
|
||||
float3 *data = attr->data_float3();
|
||||
fill_generic_attribute(b_mesh, data, b_domain, subdivision, [&](int i) {
|
||||
BL::Array<float, 3> v = b_vector_attribute.data[i].vector();
|
||||
return make_float3(v[0], v[1], v[2]);
|
||||
return make_float3(src[i][0], src[i][1], src[i][2]);
|
||||
});
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_BYTE_COLOR: {
|
||||
BL::ByteColorAttribute b_color_attribute{b_attribute};
|
||||
if (b_color_attribute.data.length() == 0) {
|
||||
continue;
|
||||
}
|
||||
const uchar(*src)[4] = static_cast<const uchar(*)[4]>(b_color_attribute.data[0].ptr.data);
|
||||
|
||||
if (element == ATTR_ELEMENT_CORNER) {
|
||||
element = ATTR_ELEMENT_CORNER_BYTE;
|
||||
@@ -462,21 +483,26 @@ static void attr_create_generic(Scene *scene,
|
||||
uchar4 *data = attr->data_uchar4();
|
||||
fill_generic_attribute(b_mesh, data, b_domain, subdivision, [&](int i) {
|
||||
/* Compress/encode vertex color using the sRGB curve. */
|
||||
const float4 c = get_float4(b_color_attribute.data[i].color());
|
||||
return color_float4_to_uchar4(color_linear_to_srgb_v4(c));
|
||||
return make_uchar4(src[i][0], src[i][1], src[i][2], src[i][3]);
|
||||
});
|
||||
}
|
||||
else {
|
||||
float4 *data = attr->data_float4();
|
||||
fill_generic_attribute(b_mesh, data, b_domain, subdivision, [&](int i) {
|
||||
BL::Array<float, 4> v = b_color_attribute.data[i].color();
|
||||
return make_float4(v[0], v[1], v[2], v[3]);
|
||||
return make_float4(color_srgb_to_linear(byte_to_float(src[i][0])),
|
||||
color_srgb_to_linear(byte_to_float(src[i][1])),
|
||||
color_srgb_to_linear(byte_to_float(src[i][2])),
|
||||
color_srgb_to_linear(byte_to_float(src[i][3])));
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_FLOAT_COLOR: {
|
||||
BL::FloatColorAttribute b_color_attribute{b_attribute};
|
||||
if (b_color_attribute.data.length() == 0) {
|
||||
continue;
|
||||
}
|
||||
const float(*src)[4] = static_cast<const float(*)[4]>(b_color_attribute.data[0].ptr.data);
|
||||
|
||||
Attribute *attr = attributes.add(name, TypeRGBA, element);
|
||||
if (is_render_color) {
|
||||
@@ -485,18 +511,20 @@ static void attr_create_generic(Scene *scene,
|
||||
|
||||
float4 *data = attr->data_float4();
|
||||
fill_generic_attribute(b_mesh, data, b_domain, subdivision, [&](int i) {
|
||||
BL::Array<float, 4> v = b_color_attribute.data[i].color();
|
||||
return make_float4(v[0], v[1], v[2], v[3]);
|
||||
return make_float4(src[i][0], src[i][1], src[i][2], src[i][3]);
|
||||
});
|
||||
break;
|
||||
}
|
||||
case BL::Attribute::data_type_FLOAT2: {
|
||||
BL::Float2Attribute b_float2_attribute{b_attribute};
|
||||
if (b_float2_attribute.data.length() == 0) {
|
||||
continue;
|
||||
}
|
||||
const float(*src)[2] = static_cast<const float(*)[2]>(b_float2_attribute.data[0].ptr.data);
|
||||
Attribute *attr = attributes.add(name, TypeFloat2, element);
|
||||
float2 *data = attr->data_float2();
|
||||
fill_generic_attribute(b_mesh, data, b_domain, subdivision, [&](int i) {
|
||||
BL::Array<float, 2> v = b_float2_attribute.data[i].vector();
|
||||
return make_float2(v[0], v[1]);
|
||||
return make_float2(src[i][0], src[i][1]);
|
||||
});
|
||||
break;
|
||||
}
|
||||
@@ -511,6 +539,9 @@ static void attr_create_generic(Scene *scene,
|
||||
static void attr_create_uv_map(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh)
|
||||
{
|
||||
if (!b_mesh.uv_layers.empty()) {
|
||||
const int tris_num = b_mesh.loop_triangles.length();
|
||||
const MLoopTri *looptris = static_cast<const MLoopTri *>(b_mesh.loop_triangles[0].ptr.data);
|
||||
|
||||
for (BL::MeshUVLoopLayer &l : b_mesh.uv_layers) {
|
||||
const bool active_render = l.active_render();
|
||||
AttributeStandard uv_std = (active_render) ? ATTR_STD_UV : ATTR_STD_NONE;
|
||||
@@ -538,14 +569,13 @@ static void attr_create_uv_map(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh)
|
||||
uv_attr = mesh->attributes.add(uv_name, TypeFloat2, ATTR_ELEMENT_CORNER);
|
||||
}
|
||||
|
||||
const float(*b_uv_map)[2] = static_cast<const float(*)[2]>(l.uv[0].ptr.data);
|
||||
float2 *fdata = uv_attr->data_float2();
|
||||
|
||||
for (BL::MeshLoopTriangle &t : b_mesh.loop_triangles) {
|
||||
int3 li = get_int3(t.loops());
|
||||
fdata[0] = get_float2(l.data[li[0]].uv());
|
||||
fdata[1] = get_float2(l.data[li[1]].uv());
|
||||
fdata[2] = get_float2(l.data[li[2]].uv());
|
||||
fdata += 3;
|
||||
for (int i = 0; i < tris_num; i++) {
|
||||
const MLoopTri &tri = looptris[i];
|
||||
fdata[i * 3 + 0] = make_float2(b_uv_map[tri.tri[0]][0], b_uv_map[tri.tri[0]][1]);
|
||||
fdata[i * 3 + 1] = make_float2(b_uv_map[tri.tri[1]][0], b_uv_map[tri.tri[1]][1]);
|
||||
fdata[i * 3 + 2] = make_float2(b_uv_map[tri.tri[2]][0], b_uv_map[tri.tri[2]][1]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -578,7 +608,7 @@ static void attr_create_subd_uv_map(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh,
|
||||
if (polys_num == 0) {
|
||||
return;
|
||||
}
|
||||
const MPoly *polys = static_cast<const MPoly *>(b_mesh.polygons[0].ptr.data);
|
||||
const int *poly_offsets = static_cast<const int *>(b_mesh.polygons[0].ptr.data);
|
||||
|
||||
if (!b_mesh.uv_layers.empty()) {
|
||||
BL::Mesh::uv_layers_iterator l;
|
||||
@@ -614,9 +644,10 @@ static void attr_create_subd_uv_map(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh,
|
||||
float2 *fdata = uv_attr->data_float2();
|
||||
|
||||
for (int i = 0; i < polys_num; i++) {
|
||||
const MPoly &b_poly = polys[i];
|
||||
for (int j = 0; j < b_poly.totloop; j++) {
|
||||
*(fdata++) = get_float2(l->data[b_poly.loopstart + j].uv());
|
||||
const int poly_start = poly_offsets[i];
|
||||
const int poly_size = poly_offsets[i + 1] - poly_start;
|
||||
for (int j = 0; j < poly_size; j++) {
|
||||
*(fdata++) = get_float2(l->data[poly_start + j].uv());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -649,9 +680,7 @@ static void attr_create_subd_uv_map(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh,
|
||||
/* Compare vertices by sum of their coordinates. */
|
||||
class VertexAverageComparator {
|
||||
public:
|
||||
VertexAverageComparator(const array<float3> &verts) : verts_(verts)
|
||||
{
|
||||
}
|
||||
VertexAverageComparator(const array<float3> &verts) : verts_(verts) {}
|
||||
|
||||
bool operator()(const int &vert_idx_a, const int &vert_idx_b)
|
||||
{
|
||||
@@ -818,6 +847,27 @@ static void attr_create_pointiness(Scene *scene, Mesh *mesh, BL::Mesh &b_mesh, b
|
||||
}
|
||||
}
|
||||
|
||||
static const int *find_corner_vert_attribute(BL::Mesh b_mesh)
|
||||
{
|
||||
for (BL::Attribute &b_attribute : b_mesh.attributes) {
|
||||
if (b_attribute.domain() != BL::Attribute::domain_CORNER) {
|
||||
continue;
|
||||
}
|
||||
if (b_attribute.data_type() != BL::Attribute::data_type_INT) {
|
||||
continue;
|
||||
}
|
||||
if (b_attribute.name() != ".corner_vert") {
|
||||
continue;
|
||||
}
|
||||
BL::IntAttribute b_int_attribute{b_attribute};
|
||||
if (b_int_attribute.data.length() == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
return static_cast<const int *>(b_int_attribute.data[0].ptr.data);
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* The Random Per Island attribute is a random float associated with each
|
||||
* connected component (island) of the mesh. The attribute is computed by
|
||||
* first classifying the vertices into different sets using a Disjoint Set
|
||||
@@ -837,7 +887,6 @@ static void attr_create_random_per_island(Scene *scene,
|
||||
return;
|
||||
}
|
||||
|
||||
const int polys_num = b_mesh.polygons.length();
|
||||
int number_of_vertices = b_mesh.vertices.length();
|
||||
if (number_of_vertices == 0) {
|
||||
return;
|
||||
@@ -847,6 +896,7 @@ static void attr_create_random_per_island(Scene *scene,
|
||||
|
||||
const MEdge *edges = static_cast<MEdge *>(b_mesh.edges[0].ptr.data);
|
||||
const int edges_num = b_mesh.edges.length();
|
||||
const int *corner_verts = find_corner_vert_attribute(b_mesh);
|
||||
|
||||
for (int i = 0; i < edges_num; i++) {
|
||||
vertices_sets.join(edges[i].v1, edges[i].v2);
|
||||
@@ -857,18 +907,22 @@ static void attr_create_random_per_island(Scene *scene,
|
||||
float *data = attribute->data_float();
|
||||
|
||||
if (!subdivision) {
|
||||
for (BL::MeshLoopTriangle &t : b_mesh.loop_triangles) {
|
||||
data[t.index()] = hash_uint_to_float(vertices_sets.find(t.vertices()[0]));
|
||||
const int tris_num = b_mesh.loop_triangles.length();
|
||||
if (tris_num != 0) {
|
||||
const MLoopTri *looptris = static_cast<const MLoopTri *>(b_mesh.loop_triangles[0].ptr.data);
|
||||
for (int i = 0; i < tris_num; i++) {
|
||||
const int vert = corner_verts[looptris[i].tri[0]];
|
||||
data[i] = hash_uint_to_float(vertices_sets.find(vert));
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
const int polys_num = b_mesh.polygons.length();
|
||||
if (polys_num != 0) {
|
||||
const MPoly *polys = static_cast<const MPoly *>(b_mesh.polygons[0].ptr.data);
|
||||
const MLoop *loops = static_cast<const MLoop *>(b_mesh.loops[0].ptr.data);
|
||||
const int *poly_offsets = static_cast<const int *>(b_mesh.polygons[0].ptr.data);
|
||||
for (int i = 0; i < polys_num; i++) {
|
||||
const MPoly &b_poly = polys[i];
|
||||
const MLoop &b_loop = loops[b_poly.loopstart];
|
||||
data[i] = hash_uint_to_float(vertices_sets.find(b_loop.v));
|
||||
const int vert = corner_verts[poly_offsets[i]];
|
||||
data[i] = hash_uint_to_float(vertices_sets.find(vert));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -876,7 +930,7 @@ static void attr_create_random_per_island(Scene *scene,
|
||||
|
||||
/* Create Mesh */
|
||||
|
||||
static std::optional<BL::IntAttribute> find_material_index_attribute(BL::Mesh b_mesh)
|
||||
static const int *find_material_index_attribute(BL::Mesh b_mesh)
|
||||
{
|
||||
for (BL::Attribute &b_attribute : b_mesh.attributes) {
|
||||
if (b_attribute.domain() != BL::Attribute::domain_FACE) {
|
||||
@@ -888,9 +942,34 @@ static std::optional<BL::IntAttribute> find_material_index_attribute(BL::Mesh b_
|
||||
if (b_attribute.name() != "material_index") {
|
||||
continue;
|
||||
}
|
||||
return BL::IntAttribute{b_attribute};
|
||||
BL::IntAttribute b_int_attribute{b_attribute};
|
||||
if (b_int_attribute.data.length() == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
return static_cast<const int *>(b_int_attribute.data[0].ptr.data);
|
||||
}
|
||||
return std::nullopt;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
static const bool *find_sharp_face_attribute(BL::Mesh b_mesh)
|
||||
{
|
||||
for (BL::Attribute &b_attribute : b_mesh.attributes) {
|
||||
if (b_attribute.domain() != BL::Attribute::domain_FACE) {
|
||||
continue;
|
||||
}
|
||||
if (b_attribute.data_type() != BL::Attribute::data_type_BOOLEAN) {
|
||||
continue;
|
||||
}
|
||||
if (b_attribute.name() != "sharp_face") {
|
||||
continue;
|
||||
}
|
||||
BL::IntAttribute b_int_attribute{b_attribute};
|
||||
if (b_int_attribute.data.length() == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
return static_cast<const bool *>(b_int_attribute.data[0].ptr.data);
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
static void create_mesh(Scene *scene,
|
||||
@@ -902,13 +981,10 @@ static void create_mesh(Scene *scene,
|
||||
const bool subdivision = false,
|
||||
const bool subdivide_uvs = true)
|
||||
{
|
||||
/* count vertices and faces */
|
||||
int numverts = b_mesh.vertices.length();
|
||||
const int numverts = b_mesh.vertices.length();
|
||||
const int polys_num = b_mesh.polygons.length();
|
||||
int numfaces = (!subdivision) ? b_mesh.loop_triangles.length() : b_mesh.polygons.length();
|
||||
int numtris = 0;
|
||||
int numcorners = 0;
|
||||
int numngons = 0;
|
||||
const int numcorners = b_mesh.loops.length();
|
||||
bool use_loop_normals = b_mesh.use_auto_smooth() &&
|
||||
(mesh->get_subdivision_type() != Mesh::SUBDIVISION_CATMULL_CLARK);
|
||||
|
||||
@@ -918,36 +994,44 @@ static void create_mesh(Scene *scene,
|
||||
}
|
||||
|
||||
const float(*positions)[3] = static_cast<const float(*)[3]>(b_mesh.vertices[0].ptr.data);
|
||||
const int *corner_verts = find_corner_vert_attribute(b_mesh);
|
||||
const int *material_indices = find_material_index_attribute(b_mesh);
|
||||
const bool *sharp_faces = find_sharp_face_attribute(b_mesh);
|
||||
const float(*corner_normals)[3] = nullptr;
|
||||
if (use_loop_normals) {
|
||||
corner_normals = static_cast<const float(*)[3]>(b_mesh.corner_normals[0].ptr.data);
|
||||
}
|
||||
|
||||
int numngons = 0;
|
||||
int numtris = 0;
|
||||
if (!subdivision) {
|
||||
numtris = numfaces;
|
||||
}
|
||||
else {
|
||||
const MPoly *polys = static_cast<const MPoly *>(b_mesh.polygons[0].ptr.data);
|
||||
const int *poly_offsets = static_cast<const int *>(b_mesh.polygons[0].ptr.data);
|
||||
for (int i = 0; i < polys_num; i++) {
|
||||
const MPoly &b_poly = polys[i];
|
||||
numngons += (b_poly.totloop == 4) ? 0 : 1;
|
||||
numcorners += b_poly.totloop;
|
||||
const int poly_start = poly_offsets[i];
|
||||
const int poly_size = poly_offsets[i + 1] - poly_start;
|
||||
numngons += (poly_size == 4) ? 0 : 1;
|
||||
}
|
||||
}
|
||||
|
||||
/* allocate memory */
|
||||
if (subdivision) {
|
||||
mesh->reserve_subd_faces(numfaces, numngons, numcorners);
|
||||
mesh->resize_subd_faces(numfaces, numngons, numcorners);
|
||||
}
|
||||
mesh->resize_mesh(numverts, numtris);
|
||||
|
||||
mesh->reserve_mesh(numverts, numtris);
|
||||
|
||||
/* create vertex coordinates and normals */
|
||||
float3 *verts = mesh->get_verts().data();
|
||||
for (int i = 0; i < numverts; i++) {
|
||||
mesh->add_vertex(make_float3(positions[i][0], positions[i][1], positions[i][2]));
|
||||
verts[i] = make_float3(positions[i][0], positions[i][1], positions[i][2]);
|
||||
}
|
||||
|
||||
AttributeSet &attributes = (subdivision) ? mesh->subd_attributes : mesh->attributes;
|
||||
Attribute *attr_N = attributes.add(ATTR_STD_VERTEX_NORMAL);
|
||||
float3 *N = attr_N->data_float3();
|
||||
|
||||
if (subdivision || !use_loop_normals) {
|
||||
if (subdivision || !(use_loop_normals && corner_normals)) {
|
||||
const float(*b_vert_normals)[3] = static_cast<const float(*)[3]>(
|
||||
b_mesh.vertex_normals[0].ptr.data);
|
||||
for (int i = 0; i < numverts; i++) {
|
||||
@@ -975,61 +1059,107 @@ static void create_mesh(Scene *scene,
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<BL::IntAttribute> material_indices = find_material_index_attribute(b_mesh);
|
||||
auto get_material_index = [&](const int poly_index) -> int {
|
||||
if (material_indices) {
|
||||
return clamp(material_indices->data[poly_index].value(), 0, used_shaders.size() - 1);
|
||||
}
|
||||
return 0;
|
||||
auto clamp_material_index = [&](const int material_index) -> int {
|
||||
return clamp(material_index, 0, used_shaders.size() - 1);
|
||||
};
|
||||
|
||||
/* create faces */
|
||||
const MPoly *polys = static_cast<const MPoly *>(b_mesh.polygons[0].ptr.data);
|
||||
if (!subdivision) {
|
||||
for (BL::MeshLoopTriangle &t : b_mesh.loop_triangles) {
|
||||
const int poly_index = t.polygon_index();
|
||||
const MPoly &b_poly = polys[poly_index];
|
||||
int3 vi = get_int3(t.vertices());
|
||||
int *triangles = mesh->get_triangles().data();
|
||||
bool *smooth = mesh->get_smooth().data();
|
||||
int *shader = mesh->get_shader().data();
|
||||
|
||||
int shader = get_material_index(poly_index);
|
||||
bool smooth = (b_poly.flag & ME_SMOOTH) || use_loop_normals;
|
||||
const MLoopTri *looptris = static_cast<const MLoopTri *>(b_mesh.loop_triangles[0].ptr.data);
|
||||
for (int i = 0; i < numtris; i++) {
|
||||
const MLoopTri &tri = looptris[i];
|
||||
triangles[i * 3 + 0] = corner_verts[tri.tri[0]];
|
||||
triangles[i * 3 + 1] = corner_verts[tri.tri[1]];
|
||||
triangles[i * 3 + 2] = corner_verts[tri.tri[2]];
|
||||
}
|
||||
|
||||
if (use_loop_normals) {
|
||||
BL::Array<float, 9> loop_normals = t.split_normals();
|
||||
if (material_indices) {
|
||||
for (int i = 0; i < numtris; i++) {
|
||||
const int poly_index = looptris[i].poly;
|
||||
shader[i] = clamp_material_index(material_indices[poly_index]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::fill(shader, shader + numtris, 0);
|
||||
}
|
||||
|
||||
if (sharp_faces && !(use_loop_normals && corner_normals)) {
|
||||
for (int i = 0; i < numtris; i++) {
|
||||
const int poly_index = looptris[i].poly;
|
||||
smooth[i] = !sharp_faces[poly_index];
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::fill(smooth, smooth + numtris, true);
|
||||
}
|
||||
|
||||
if (use_loop_normals && corner_normals) {
|
||||
for (int i = 0; i < numtris; i++) {
|
||||
const MLoopTri &tri = looptris[i];
|
||||
for (int i = 0; i < 3; i++) {
|
||||
N[vi[i]] = make_float3(
|
||||
loop_normals[i * 3], loop_normals[i * 3 + 1], loop_normals[i * 3 + 2]);
|
||||
const int corner = tri.tri[i];
|
||||
const int vert = corner_verts[corner];
|
||||
const float *normal = corner_normals[corner];
|
||||
N[vert] = make_float3(normal[0], normal[1], normal[2]);
|
||||
}
|
||||
}
|
||||
|
||||
/* Create triangles.
|
||||
*
|
||||
* NOTE: Autosmooth is already taken care about.
|
||||
*/
|
||||
mesh->add_triangle(vi[0], vi[1], vi[2], shader, smooth);
|
||||
}
|
||||
|
||||
mesh->tag_triangles_modified();
|
||||
mesh->tag_shader_modified();
|
||||
mesh->tag_smooth_modified();
|
||||
}
|
||||
else {
|
||||
vector<int> vi;
|
||||
int *subd_start_corner = mesh->get_subd_start_corner().data();
|
||||
int *subd_num_corners = mesh->get_subd_num_corners().data();
|
||||
int *subd_shader = mesh->get_subd_shader().data();
|
||||
bool *subd_smooth = mesh->get_subd_smooth().data();
|
||||
int *subd_ptex_offset = mesh->get_subd_ptex_offset().data();
|
||||
int *subd_face_corners = mesh->get_subd_face_corners().data();
|
||||
|
||||
const MLoop *loops = static_cast<const MLoop *>(b_mesh.loops[0].ptr.data);
|
||||
|
||||
for (int i = 0; i < numfaces; i++) {
|
||||
const MPoly &b_poly = polys[i];
|
||||
int n = b_poly.totloop;
|
||||
int shader = get_material_index(i);
|
||||
bool smooth = (b_poly.flag & ME_SMOOTH) || use_loop_normals;
|
||||
|
||||
vi.resize(n);
|
||||
for (int i = 0; i < n; i++) {
|
||||
/* NOTE: Autosmooth is already taken care about. */
|
||||
|
||||
vi[i] = loops[b_poly.loopstart + i].v;
|
||||
if (sharp_faces && !use_loop_normals) {
|
||||
for (int i = 0; i < numfaces; i++) {
|
||||
subd_smooth[i] = !sharp_faces[i];
|
||||
}
|
||||
|
||||
/* create subd faces */
|
||||
mesh->add_subd_face(&vi[0], n, shader, smooth);
|
||||
}
|
||||
else {
|
||||
std::fill(subd_smooth, subd_smooth + numfaces, true);
|
||||
}
|
||||
|
||||
if (material_indices) {
|
||||
for (int i = 0; i < numfaces; i++) {
|
||||
subd_shader[i] = clamp_material_index(material_indices[i]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::fill(subd_shader, subd_shader + numfaces, 0);
|
||||
}
|
||||
|
||||
std::copy(corner_verts, corner_verts + numcorners, subd_face_corners);
|
||||
|
||||
const int *poly_offsets = static_cast<const int *>(b_mesh.polygons[0].ptr.data);
|
||||
int ptex_offset = 0;
|
||||
for (int i = 0; i < numfaces; i++) {
|
||||
const int poly_start = poly_offsets[i];
|
||||
const int poly_size = poly_offsets[i + 1] - poly_start;
|
||||
|
||||
subd_start_corner[i] = poly_start;
|
||||
subd_num_corners[i] = poly_size;
|
||||
subd_ptex_offset[i] = ptex_offset;
|
||||
const int num_ptex = (poly_size == 4) ? 1 : poly_size;
|
||||
ptex_offset += num_ptex;
|
||||
}
|
||||
|
||||
mesh->tag_subd_face_corners_modified();
|
||||
mesh->tag_subd_start_corner_modified();
|
||||
mesh->tag_subd_num_corners_modified();
|
||||
mesh->tag_subd_shader_modified();
|
||||
mesh->tag_subd_smooth_modified();
|
||||
mesh->tag_subd_ptex_offset_modified();
|
||||
}
|
||||
|
||||
/* Create all needed attributes.
|
||||
@@ -1080,11 +1210,11 @@ static void create_subd_mesh(Scene *scene,
|
||||
const int edges_num = b_mesh.edges.length();
|
||||
|
||||
if (edges_num != 0 && b_mesh.edge_creases.length() > 0) {
|
||||
BL::MeshEdgeCreaseLayer creases = b_mesh.edge_creases[0];
|
||||
const float *creases = static_cast<const float *>(b_mesh.edge_creases[0].data[0].ptr.data);
|
||||
|
||||
size_t num_creases = 0;
|
||||
for (int i = 0; i < edges_num; i++) {
|
||||
if (creases.data[i].value() != 0.0f) {
|
||||
if (creases[i] != 0.0f) {
|
||||
num_creases++;
|
||||
}
|
||||
}
|
||||
@@ -1093,7 +1223,7 @@ static void create_subd_mesh(Scene *scene,
|
||||
|
||||
const MEdge *edges = static_cast<MEdge *>(b_mesh.edges[0].ptr.data);
|
||||
for (int i = 0; i < edges_num; i++) {
|
||||
const float crease = creases.data[i].value();
|
||||
const float crease = creases[i];
|
||||
if (crease != 0.0f) {
|
||||
const MEdge &b_edge = edges[i];
|
||||
mesh->add_edge_crease(b_edge.v1, b_edge.v2, crease);
|
||||
@@ -1101,10 +1231,11 @@ static void create_subd_mesh(Scene *scene,
|
||||
}
|
||||
}
|
||||
|
||||
for (BL::MeshVertexCreaseLayer &c : b_mesh.vertex_creases) {
|
||||
for (int i = 0; i < c.data.length(); ++i) {
|
||||
if (c.data[i].value() != 0.0f) {
|
||||
mesh->add_vertex_crease(i, c.data[i].value());
|
||||
for (BL::MeshVertexCreaseLayer &layer : b_mesh.vertex_creases) {
|
||||
const float *creases = static_cast<const float *>(layer.data[0].ptr.data);
|
||||
for (int i = 0; i < layer.data.length(); ++i) {
|
||||
if (creases[i] != 0.0f) {
|
||||
mesh->add_vertex_crease(i, creases[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -5,13 +5,9 @@
|
||||
|
||||
CCL_NAMESPACE_BEGIN
|
||||
|
||||
BlenderOutputDriver::BlenderOutputDriver(BL::RenderEngine &b_engine) : b_engine_(b_engine)
|
||||
{
|
||||
}
|
||||
BlenderOutputDriver::BlenderOutputDriver(BL::RenderEngine &b_engine) : b_engine_(b_engine) {}
|
||||
|
||||
BlenderOutputDriver::~BlenderOutputDriver()
|
||||
{
|
||||
}
|
||||
BlenderOutputDriver::~BlenderOutputDriver() {}
|
||||
|
||||
bool BlenderOutputDriver::read_render_tile(const Tile &tile)
|
||||
{
|
||||
|
@@ -222,7 +222,10 @@ static void export_pointcloud_motion(PointCloud *pointcloud,
|
||||
|
||||
/* Export motion points. */
|
||||
const int num_points = pointcloud->num_points();
|
||||
float3 *mP = attr_mP->data_float3() + motion_step * num_points;
|
||||
// Point cloud attributes are stored as float4 with the radius
|
||||
// in the w element. This is explict now as float3 is no longer
|
||||
// interchangeable with float4 as it is packed now.
|
||||
float4 *mP = attr_mP->data_float4() + motion_step * num_points;
|
||||
bool have_motion = false;
|
||||
const array<float3> &pointcloud_points = pointcloud->get_points();
|
||||
|
||||
@@ -231,11 +234,9 @@ static void export_pointcloud_motion(PointCloud *pointcloud,
|
||||
std::optional<BL::FloatAttribute> b_attr_radius = find_radius_attribute(b_pointcloud);
|
||||
|
||||
for (int i = 0; i < std::min(num_points, b_points_num); i++) {
|
||||
const float3 co = get_float3(b_attr_position.data[i].vector());
|
||||
const float3 P = get_float3(b_attr_position.data[i].vector());
|
||||
const float radius = b_attr_radius ? b_attr_radius->data[i].value() : 0.01f;
|
||||
float3 P = co;
|
||||
P.w = radius;
|
||||
mP[i] = P;
|
||||
mP[i] = make_float4(P.x, P.y, P.z, radius);
|
||||
have_motion = have_motion || (P != pointcloud_points[i]);
|
||||
}
|
||||
|
||||
|
@@ -67,9 +67,7 @@ BlenderSync::BlenderSync(BL::RenderEngine &b_engine,
|
||||
max_subdivisions = RNA_int_get(&cscene, "max_subdivisions");
|
||||
}
|
||||
|
||||
BlenderSync::~BlenderSync()
|
||||
{
|
||||
}
|
||||
BlenderSync::~BlenderSync() {}
|
||||
|
||||
void BlenderSync::reset(BL::BlendData &b_data, BL::Scene &b_scene)
|
||||
{
|
||||
@@ -349,8 +347,7 @@ void BlenderSync::sync_integrator(BL::ViewLayer &b_view_layer, bool background)
|
||||
|
||||
bool use_light_tree = get_boolean(cscene, "use_light_tree");
|
||||
integrator->set_use_light_tree(use_light_tree);
|
||||
integrator->set_light_sampling_threshold(
|
||||
(use_light_tree) ? 0.0f : get_float(cscene, "light_sampling_threshold"));
|
||||
integrator->set_light_sampling_threshold(get_float(cscene, "light_sampling_threshold"));
|
||||
|
||||
if (integrator->use_light_tree_is_modified()) {
|
||||
scene->light_manager->tag_update(scene, LightManager::UPDATE_ALL);
|
||||
|
@@ -702,9 +702,7 @@ static inline bool object_need_motion_attribute(BObjectInfo &b_ob_info, Scene *s
|
||||
|
||||
class EdgeMap {
|
||||
public:
|
||||
EdgeMap()
|
||||
{
|
||||
}
|
||||
EdgeMap() {}
|
||||
|
||||
void clear()
|
||||
{
|
||||
|
@@ -61,6 +61,11 @@ if(WITH_CYCLES_EMBREE)
|
||||
list(APPEND LIB
|
||||
${EMBREE_LIBRARIES}
|
||||
)
|
||||
if(EMBREE_SYCL_SUPPORT)
|
||||
list(APPEND LIB
|
||||
${SYCL_LIBRARIES}
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
cycles_add_library(cycles_bvh "${LIB}" ${SRC} ${SRC_HEADERS})
|
||||
|
@@ -16,18 +16,10 @@ CCL_NAMESPACE_BEGIN
|
||||
|
||||
/* SSE replacements */
|
||||
|
||||
__forceinline void prefetch_L1(const void * /*ptr*/)
|
||||
{
|
||||
}
|
||||
__forceinline void prefetch_L2(const void * /*ptr*/)
|
||||
{
|
||||
}
|
||||
__forceinline void prefetch_L3(const void * /*ptr*/)
|
||||
{
|
||||
}
|
||||
__forceinline void prefetch_NTA(const void * /*ptr*/)
|
||||
{
|
||||
}
|
||||
__forceinline void prefetch_L1(const void * /*ptr*/) {}
|
||||
__forceinline void prefetch_L2(const void * /*ptr*/) {}
|
||||
__forceinline void prefetch_L3(const void * /*ptr*/) {}
|
||||
__forceinline void prefetch_NTA(const void * /*ptr*/) {}
|
||||
|
||||
template<size_t src> __forceinline float extract(const int4 &b)
|
||||
{
|
||||
|
@@ -23,9 +23,7 @@ class BVHBuild;
|
||||
|
||||
class BVHObjectBinning : public BVHRange {
|
||||
public:
|
||||
__forceinline BVHObjectBinning() : leafSAH(FLT_MAX)
|
||||
{
|
||||
}
|
||||
__forceinline BVHObjectBinning() : leafSAH(FLT_MAX) {}
|
||||
|
||||
BVHObjectBinning(const BVHRange &job,
|
||||
BVHReference *prims,
|
||||
|
@@ -49,9 +49,7 @@ BVHBuild::BVHBuild(const vector<Object *> &objects_,
|
||||
spatial_min_overlap = 0.0f;
|
||||
}
|
||||
|
||||
BVHBuild::~BVHBuild()
|
||||
{
|
||||
}
|
||||
BVHBuild::~BVHBuild() {}
|
||||
|
||||
/* Adding References */
|
||||
|
||||
@@ -180,9 +178,9 @@ void BVHBuild::add_reference_curves(BoundBox &root, BoundBox ¢er, Hair *hair
|
||||
curve.bounds_grow(k, &hair->get_curve_keys()[0], curve_radius, bounds);
|
||||
const size_t num_keys = hair->get_curve_keys().size();
|
||||
const size_t num_steps = hair->get_motion_steps();
|
||||
const float3 *key_steps = curve_attr_mP->data_float3();
|
||||
const float4 *key_steps = curve_attr_mP->data_float4();
|
||||
for (size_t step = 0; step < num_steps - 1; step++) {
|
||||
curve.bounds_grow(k, key_steps + step * num_keys, curve_radius, bounds);
|
||||
curve.bounds_grow(k, key_steps + step * num_keys, bounds);
|
||||
}
|
||||
if (bounds.valid()) {
|
||||
int packed_type = PRIMITIVE_PACK_SEGMENT(primitive_type, k);
|
||||
@@ -200,7 +198,7 @@ void BVHBuild::add_reference_curves(BoundBox &root, BoundBox ¢er, Hair *hair
|
||||
const float num_bvh_steps_inv_1 = 1.0f / (num_bvh_steps - 1);
|
||||
const size_t num_steps = hair->get_motion_steps();
|
||||
const float3 *curve_keys = &hair->get_curve_keys()[0];
|
||||
const float3 *key_steps = curve_attr_mP->data_float3();
|
||||
const float4 *key_steps = curve_attr_mP->data_float4();
|
||||
const size_t num_keys = hair->get_curve_keys().size();
|
||||
/* Calculate bounding box of the previous time step.
|
||||
* Will be reused later to avoid duplicated work on
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user