This repository has been archived on 2023-10-09. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
blender-archive/build_files/cmake/platform/platform_win32.cmake

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

1048 lines
41 KiB
CMake
Raw Normal View History

# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright 2016 Blender Foundation. All rights reserved.
# Libraries configuration for Windows.
add_definitions(-DWIN32)
if(NOT MSVC)
message(FATAL_ERROR "Compiler is unsupported")
endif()
Windows: Add support for building with clang. This commit contains the minimum to make clang build/work with blender, asan and ninja build support is forthcoming Things to note: 1) Builds and runs, and is able to pass all tests (except for the freestyle_stroke_material.blend test which was broken at that time for all platforms by the looks of it) 2) It's slightly faster than msvc when using cycles. (time in seconds, on an i7-3370) victor_cpu msvc:3099.51 clang:2796.43 pavillon_barcelona_cpu msvc:1872.05 clang:1827.72 koro_cpu msvc:1097.58 clang:1006.51 fishy_cat_cpu msvc:815.37 clang:722.2 classroom_cpu msvc:1705.39 clang:1575.43 bmw27_cpu msvc:552.38 clang:561.53 barbershop_interior_cpu msvc:2134.93 clang:1922.33 3) clang on windows uses a drop in replacement for the Microsoft cl.exe (takes some of the Microsoft parameters, but not all, and takes some of the clang parameters but not all) and uses ms headers + libraries + linker, so you still need visual studio installed and will use our existing vc14 svn libs. 4) X64 only currently, X86 builds but crashes on startup. 5) Tested with llvm/clang 6.0.0 6) Requires visual studio integration, available at https://github.com/LazyDodo/llvm-vs2017-integration 7) The Microsoft compiler spawns a few copies of cl in parallel to get faster build times, clang doesn't, so the build time is 3-4x slower than with msvc. 8) No openmp support yet. Have not looked at this much, the binary distribution of clang doesn't seem to include it on windows. 9) No ASAN support yet, some of the sanitizers can be made to work, but it was decided to leave support out of this commit. Reviewers: campbellbarton Differential Revision: https://developer.blender.org/D3304
2018-05-28 14:34:47 -06:00
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
2021-11-04 14:50:24 +11:00
set(MSVC_CLANG ON)
set(VC_TOOLS_DIR $ENV{VCToolsRedistDir} CACHE STRING "Location of the msvc redistributables")
set(MSVC_REDIST_DIR ${VC_TOOLS_DIR})
2019-06-19 07:24:55 +10:00
if(DEFINED MSVC_REDIST_DIR)
file(TO_CMAKE_PATH ${MSVC_REDIST_DIR} MSVC_REDIST_DIR)
else()
message("Unable to detect the Visual Studio redist directory, copying of the runtime dlls will not work, try running from the visual studio developer prompt.")
endif()
# 1) CMake has issues detecting openmp support in clang-cl so we have to provide
# the right switches here.
# 2) While the /openmp switch *should* work, it currently doesn't as for clang 9.0.0
if(WITH_OPENMP)
set(OPENMP_CUSTOM ON)
set(OPENMP_FOUND ON)
set(OpenMP_C_FLAGS "/clang:-fopenmp")
set(OpenMP_CXX_FLAGS "/clang:-fopenmp")
get_filename_component(LLVMROOT "[HKEY_LOCAL_MACHINE\\SOFTWARE\\WOW6432Node\\LLVM\\LLVM;]" ABSOLUTE CACHE)
set(CLANG_OPENMP_DLL "${LLVMROOT}/bin/libomp.dll")
set(CLANG_OPENMP_LIB "${LLVMROOT}/lib/libomp.lib")
if(NOT EXISTS "${CLANG_OPENMP_DLL}")
message(FATAL_ERROR "Clang OpenMP library (${CLANG_OPENMP_DLL}) not found.")
endif()
set(OpenMP_LINKER_FLAGS "\"${CLANG_OPENMP_LIB}\"")
endif()
if(WITH_WINDOWS_STRIPPED_PDB)
message(WARNING "stripped pdb not supported with clang, disabling..")
2021-11-04 14:50:24 +11:00
set(WITH_WINDOWS_STRIPPED_PDB OFF)
endif()
else()
if(WITH_BLENDER AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 19.28.29921) # MSVC 2019 16.9.16
message(FATAL_ERROR "Compiler is unsupported, MSVC 2019 16.9.16 or newer is required for building blender.")
endif()
Windows: Add support for building with clang. This commit contains the minimum to make clang build/work with blender, asan and ninja build support is forthcoming Things to note: 1) Builds and runs, and is able to pass all tests (except for the freestyle_stroke_material.blend test which was broken at that time for all platforms by the looks of it) 2) It's slightly faster than msvc when using cycles. (time in seconds, on an i7-3370) victor_cpu msvc:3099.51 clang:2796.43 pavillon_barcelona_cpu msvc:1872.05 clang:1827.72 koro_cpu msvc:1097.58 clang:1006.51 fishy_cat_cpu msvc:815.37 clang:722.2 classroom_cpu msvc:1705.39 clang:1575.43 bmw27_cpu msvc:552.38 clang:561.53 barbershop_interior_cpu msvc:2134.93 clang:1922.33 3) clang on windows uses a drop in replacement for the Microsoft cl.exe (takes some of the Microsoft parameters, but not all, and takes some of the clang parameters but not all) and uses ms headers + libraries + linker, so you still need visual studio installed and will use our existing vc14 svn libs. 4) X64 only currently, X86 builds but crashes on startup. 5) Tested with llvm/clang 6.0.0 6) Requires visual studio integration, available at https://github.com/LazyDodo/llvm-vs2017-integration 7) The Microsoft compiler spawns a few copies of cl in parallel to get faster build times, clang doesn't, so the build time is 3-4x slower than with msvc. 8) No openmp support yet. Have not looked at this much, the binary distribution of clang doesn't seem to include it on windows. 9) No ASAN support yet, some of the sanitizers can be made to work, but it was decided to leave support out of this commit. Reviewers: campbellbarton Differential Revision: https://developer.blender.org/D3304
2018-05-28 14:34:47 -06:00
endif()
if(WITH_BLENDER AND NOT WITH_PYTHON_MODULE)
set_property(DIRECTORY PROPERTY VS_STARTUP_PROJECT blender)
endif()
macro(warn_hardcoded_paths package_name
)
if(WITH_WINDOWS_FIND_MODULES)
message(WARNING "Using HARDCODED ${package_name} locations")
2017-10-30 12:58:44 +11:00
endif()
endmacro()
macro(windows_find_package package_name
)
if(WITH_WINDOWS_FIND_MODULES)
find_package(${package_name})
2017-10-30 12:58:44 +11:00
endif()
endmacro()
macro(find_package_wrapper)
if(WITH_WINDOWS_FIND_MODULES)
find_package(${ARGV})
endif()
endmacro()
add_definitions(-DWIN32)
# Needed, otherwise system encoding causes utf-8 encoding to fail in some cases (C4819)
add_compile_options("$<$<C_COMPILER_ID:MSVC>:/utf-8>")
add_compile_options("$<$<CXX_COMPILER_ID:MSVC>:/utf-8>")
# needed for some MSVC installations
# 4099 : PDB 'filename' was not found with 'object/library'
string(APPEND CMAKE_EXE_LINKER_FLAGS " /SAFESEH:NO /ignore:4099")
string(APPEND CMAKE_SHARED_LINKER_FLAGS " /SAFESEH:NO /ignore:4099")
string(APPEND CMAKE_MODULE_LINKER_FLAGS " /SAFESEH:NO /ignore:4099")
list(APPEND PLATFORM_LINKLIBS
ws2_32 vfw32 winmm kernel32 user32 gdi32 comdlg32 Comctl32 version
USD: Introducing a simple USD Exporter This commit introduces the first version of an exporter to Pixar's Universal Scene Description (USD) format. Reviewed By: sergey, LazyDodo Differential Revision: https://developer.blender.org/D6287 - The USD libraries are built by `make deps`, but not yet built by install_deps.sh. - Only experimental support for instancing; by default all duplicated objects are made real in the USD file. This is fine for exporting a linked-in posed character, not so much for thousands of pebbles etc. - The way materials and UV coordinates and Normals are exported is going to change soon. - This patch contains LazyDodo's fixes for building on Windows in D5359. == Meshes == USD seems to support neither per-material nor per-face-group double-sidedness, so we just use the flag from the first non-empty material slot. If there is no material we default to double-sidedness. Each UV map is stored on the mesh in a separate primvar. Materials can refer to these UV maps, but this is not yet exported by Blender. The primvar name is the same as the UV Map name. This is to allow the standard name "st" for texture coordinates by naming the UV Map as such, without having to guess which UV Map is the "standard" one. Face-varying mesh normals are written to USD. When the mesh has custom loop normals those are written. Otherwise the poly flag `ME_SMOOTH` is inspected to determine the normals. The UV maps and mesh normals take up a significant amount of space, so exporting them is optional. They're still enabled by default, though. For comparison: a shot of Spring (03_035_A) is 1.2 GiB when exported with UVs and normals, and 262 MiB without. We probably have room for optimisation of written UVs and normals. The mesh subdivision scheme isn't using the default value 'Catmull Clark', but uses 'None', indicating we're exporting a polygonal mesh. This is necessary for USD to understand our normals; otherwise the mesh is always rendered smooth. In the future we may want to expose this choice of subdivision scheme to the user, or auto-detect it when we actually support exporting pre-subdivision meshes. A possible optimisation could be to inspect whether all polygons are smooth or flat, and mark the USD mesh as such. This can be added when needed. == Animation == Mesh and transform animation are now written when passing `animation=True` to the export operator. There is no inspection of whether an object is actually animated or not; USD can handle deduplication of static values for us. The administration of which timecode to use for the export is left to the file-format-specific concrete subclasses of `AbstractHierarchyIterator`; the abstract iterator itself doesn't know anything about the passage of time. This will allow subclasses for the frame-based USD format and time-based Alembic format. == Support for simple preview materials == Very simple versions of the materials are now exported, using only the viewport diffuse RGB, metallic, and roughness. When there are multiple materials, the mesh faces are stored as geometry subset and each material is assigned to the appropriate subset. If there is only one material this is skipped. The first material if any) is always applied to the mesh itself (regardless of the existence of geometry subsets), because the Hydra viewport doesn't support materials on subsets. See https://github.com/PixarAnimationStudios/USD/issues/542 for more info. Note that the geometry subsets are not yet time-sampled, so it may break when an animated mesh changes topology. Materials are exported as a flat list under a top-level '/_materials' namespace. This inhibits instancing of the objects using those materials, so this is subject to change. == Hair == Only the parent strands are exported, and only with a constant colour. No UV coordinates, no information about the normals. == Camera == Only perspective cameras are supported for now. == Particles == Particles are only written when they are alive, which means that they are always visible (there is currently no code that deals with marking them as invisible outside their lifespan). Particle-system-instanced objects are exported by suffixing the object name with the particle's persistent ID, giving each particle XForm a unique name. == Instancing/referencing == This exporter has experimental support for instancing/referencing. Dupli-object meshes are now written to USD as references to the original mesh. This is still very limited in correctness, as there are issues referencing to materials from a referenced mesh. I am still committing this, as it gives us a place to start when continuing the quest for proper instancing in USD. == Lights == USD does not directly support spot lights, so those aren't exported yet. It's possible to add this in the future via the UsdLuxShapingAPI. The units used for the light intensity are also still a bit of a mystery. == Fluid vertex velocities == Currently only fluid simulations (not meshes in general) have explicit vertex velocities. This is the most important case for exporting velocities, though, as the baked mesh changes topology all the time, and thus computing the velocities at import time in a post-processing step is hard. == The Building Process == - USD is built as monolithic library, instead of 25 smaller libraries. We were linking all of them as 'whole archive' anyway, so this doesn't affect the final file size. It does, however, make life easier with respect to linking order, and handling upstream changes. - The JSON files required by USD are installed into datafiles/usd; they are required on every platform. Set the `PXR_PATH_DEBUG` to any value to have the USD library print the paths it uses to find those files. - USD is patched so that it finds the aforementioned JSON files in a path that we pass to it from Blender. - USD is patched to have a `PXR_BUILD_USD_TOOLS` CMake option to disable building the tools in its `bin` directory. This is sent as a pull request at https://github.com/PixarAnimationStudios/USD/pull/1048
2019-12-13 10:27:40 +01:00
advapi32 shfolder shell32 ole32 oleaut32 uuid psapi Dbghelp Shlwapi
pathcch Shcore Dwmapi Crypt32
)
if(WITH_INPUT_IME)
list(APPEND PLATFORM_LINKLIBS imm32)
endif()
add_definitions(
-D_CRT_NONSTDC_NO_DEPRECATE
-D_CRT_SECURE_NO_DEPRECATE
-D_SCL_SECURE_NO_DEPRECATE
-D_CONSOLE
-D_LIB
)
# MSVC11 needs _ALLOW_KEYWORD_MACROS to build
add_definitions(-D_ALLOW_KEYWORD_MACROS)
# RTTI is on by default even without this switch
# however having it in the CXX Flags makes it difficult
# to remove for individual files that want to disable it
# using the /GR- flag without generating a build warning
# that both /GR and /GR- are specified.
remove_cc_flag("/GR")
# Make the Windows 8.1 API available for use.
add_definitions(-D_WIN32_WINNT=0x603)
# First generate the manifest for tests since it will not need the dependency on the CRT.
configure_file(${CMAKE_SOURCE_DIR}/release/windows/manifest/blender.exe.manifest.in ${CMAKE_CURRENT_BINARY_DIR}/tests.exe.manifest @ONLY)
if(WITH_WINDOWS_BUNDLE_CRT)
set(CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_SKIP TRUE)
set(CMAKE_INSTALL_UCRT_LIBRARIES TRUE)
set(CMAKE_INSTALL_OPENMP_LIBRARIES ${WITH_OPENMP})
include(InstallRequiredSystemLibraries)
# ucrtbase(d).dll cannot be in the manifest, due to the way windows 10 handles
# redirects for this dll, for details see T88813.
foreach(lib ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS})
string(FIND ${lib} "ucrtbase" pos)
if(NOT pos EQUAL -1)
list(REMOVE_ITEM CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS ${lib})
install(FILES ${lib} DESTINATION . COMPONENT Libraries)
endif()
endforeach()
# Install the CRT to the blender.crt Sub folder.
install(FILES ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS} DESTINATION ./blender.crt COMPONENT Libraries)
windows_generate_manifest(
FILES "${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS}"
OUTPUT "${CMAKE_BINARY_DIR}/blender.crt.manifest"
NAME "blender.crt"
)
install(FILES ${CMAKE_BINARY_DIR}/blender.crt.manifest DESTINATION ./blender.crt)
set(BUNDLECRT "<dependency><dependentAssembly><assemblyIdentity type=\"win32\" name=\"blender.crt\" version=\"1.0.0.0\" /></dependentAssembly></dependency>")
endif()
set(BUNDLECRT "${BUNDLECRT}<dependency><dependentAssembly><assemblyIdentity type=\"win32\" name=\"blender.shared\" version=\"1.0.0.0\" /></dependentAssembly></dependency>")
configure_file(${CMAKE_SOURCE_DIR}/release/windows/manifest/blender.exe.manifest.in ${CMAKE_CURRENT_BINARY_DIR}/blender.exe.manifest @ONLY)
remove_cc_flag("/MDd" "/MD" "/Zi")
2018-06-10 08:12:13 +02:00
if(MSVC_CLANG) # Clangs version of cl doesn't support all flags
string(APPEND CMAKE_CXX_FLAGS " ${CXX_WARN_FLAGS} /nologo /J /Gd /EHsc -Wno-unused-command-line-argument -Wno-microsoft-enum-forward-reference ")
Windows: Add support for building with clang. This commit contains the minimum to make clang build/work with blender, asan and ninja build support is forthcoming Things to note: 1) Builds and runs, and is able to pass all tests (except for the freestyle_stroke_material.blend test which was broken at that time for all platforms by the looks of it) 2) It's slightly faster than msvc when using cycles. (time in seconds, on an i7-3370) victor_cpu msvc:3099.51 clang:2796.43 pavillon_barcelona_cpu msvc:1872.05 clang:1827.72 koro_cpu msvc:1097.58 clang:1006.51 fishy_cat_cpu msvc:815.37 clang:722.2 classroom_cpu msvc:1705.39 clang:1575.43 bmw27_cpu msvc:552.38 clang:561.53 barbershop_interior_cpu msvc:2134.93 clang:1922.33 3) clang on windows uses a drop in replacement for the Microsoft cl.exe (takes some of the Microsoft parameters, but not all, and takes some of the clang parameters but not all) and uses ms headers + libraries + linker, so you still need visual studio installed and will use our existing vc14 svn libs. 4) X64 only currently, X86 builds but crashes on startup. 5) Tested with llvm/clang 6.0.0 6) Requires visual studio integration, available at https://github.com/LazyDodo/llvm-vs2017-integration 7) The Microsoft compiler spawns a few copies of cl in parallel to get faster build times, clang doesn't, so the build time is 3-4x slower than with msvc. 8) No openmp support yet. Have not looked at this much, the binary distribution of clang doesn't seem to include it on windows. 9) No ASAN support yet, some of the sanitizers can be made to work, but it was decided to leave support out of this commit. Reviewers: campbellbarton Differential Revision: https://developer.blender.org/D3304
2018-05-28 14:34:47 -06:00
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /nologo /J /Gd -Wno-unused-command-line-argument -Wno-microsoft-enum-forward-reference")
else()
string(APPEND CMAKE_CXX_FLAGS " /nologo /J /Gd /MP /EHsc /bigobj /Zc:inline")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /nologo /J /Gd /MP /bigobj /Zc:inline")
Windows: Add support for building with clang. This commit contains the minimum to make clang build/work with blender, asan and ninja build support is forthcoming Things to note: 1) Builds and runs, and is able to pass all tests (except for the freestyle_stroke_material.blend test which was broken at that time for all platforms by the looks of it) 2) It's slightly faster than msvc when using cycles. (time in seconds, on an i7-3370) victor_cpu msvc:3099.51 clang:2796.43 pavillon_barcelona_cpu msvc:1872.05 clang:1827.72 koro_cpu msvc:1097.58 clang:1006.51 fishy_cat_cpu msvc:815.37 clang:722.2 classroom_cpu msvc:1705.39 clang:1575.43 bmw27_cpu msvc:552.38 clang:561.53 barbershop_interior_cpu msvc:2134.93 clang:1922.33 3) clang on windows uses a drop in replacement for the Microsoft cl.exe (takes some of the Microsoft parameters, but not all, and takes some of the clang parameters but not all) and uses ms headers + libraries + linker, so you still need visual studio installed and will use our existing vc14 svn libs. 4) X64 only currently, X86 builds but crashes on startup. 5) Tested with llvm/clang 6.0.0 6) Requires visual studio integration, available at https://github.com/LazyDodo/llvm-vs2017-integration 7) The Microsoft compiler spawns a few copies of cl in parallel to get faster build times, clang doesn't, so the build time is 3-4x slower than with msvc. 8) No openmp support yet. Have not looked at this much, the binary distribution of clang doesn't seem to include it on windows. 9) No ASAN support yet, some of the sanitizers can be made to work, but it was decided to leave support out of this commit. Reviewers: campbellbarton Differential Revision: https://developer.blender.org/D3304
2018-05-28 14:34:47 -06:00
endif()
# X64 ASAN is available and usable on MSVC 16.9 preview 4 and up)
if(WITH_COMPILER_ASAN AND MSVC AND NOT MSVC_CLANG)
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 19.28.29828)
#set a flag so we don't have to do this comparison all the time
2022-08-09 13:36:45 +10:00
set(MSVC_ASAN ON)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /fsanitize=address")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /fsanitize=address")
string(APPEND CMAKE_EXE_LINKER_FLAGS_DEBUG " /INCREMENTAL:NO")
string(APPEND CMAKE_SHARED_LINKER_FLAGS_DEBUG " /INCREMENTAL:NO")
else()
message("-- ASAN not supported on MSVC ${CMAKE_CXX_COMPILER_VERSION}")
endif()
endif()
# C++ standards conformace (/permissive-) is available on msvc 15.5 (1912) and up
if(NOT MSVC_CLANG)
string(APPEND CMAKE_CXX_FLAGS " /permissive-")
# Two-phase name lookup does not place nicely with OpenMP yet, so disable for now
string(APPEND CMAKE_CXX_FLAGS " /Zc:twoPhase-")
endif()
if(WITH_WINDOWS_SCCACHE AND CMAKE_VS_MSBUILD_COMMAND)
message(WARNING "Disabling sccache, sccache is not supported with msbuild")
2021-11-04 14:50:24 +11:00
set(WITH_WINDOWS_SCCACHE OFF)
endif()
# Debug Symbol format
# sccache # MSVC_ASAN # format # why
2022-11-03 12:10:16 +11:00
# ON # ON # Z7 # sccache will only play nice with Z7.
# ON # OFF # Z7 # sccache will only play nice with Z7.
# OFF # ON # Zi # Asan will not play nice with Edit and Continue.
# OFF # OFF # ZI # Neither ASAN nor sscache is enabled Edit and
# Continue is available.
# Release Symbol format
# sccache # MSVC_ASAN # format # why
2021-11-04 14:50:24 +11:00
# ON # ON # Z7 # sccache will only play nice with Z7
# ON # OFF # Z7 # sccache will only play nice with Z7
# OFF # ON # Zi # Asan will not play nice with Edit and Continue
# OFF # OFF # Zi # Edit and Continue disables some optimizations
if(WITH_WINDOWS_SCCACHE)
2022-08-09 13:36:45 +10:00
set(CMAKE_C_COMPILER_LAUNCHER sccache)
set(CMAKE_CXX_COMPILER_LAUNCHER sccache)
set(SYMBOL_FORMAT /Z7)
set(SYMBOL_FORMAT_RELEASE /Z7)
else()
unset(CMAKE_C_COMPILER_LAUNCHER)
unset(CMAKE_CXX_COMPILER_LAUNCHER)
if(MSVC_ASAN)
set(SYMBOL_FORMAT /Z7)
set(SYMBOL_FORMAT_RELEASE /Z7)
2022-08-09 13:36:45 +10:00
else()
set(SYMBOL_FORMAT /ZI)
set(SYMBOL_FORMAT_RELEASE /Zi)
endif()
endif()
if(WITH_WINDOWS_PDB)
set(PDB_INFO_OVERRIDE_FLAGS "${SYMBOL_FORMAT_RELEASE}")
set(PDB_INFO_OVERRIDE_LINKER_FLAGS "/DEBUG /OPT:REF /OPT:ICF /INCREMENTAL:NO")
endif()
string(APPEND CMAKE_CXX_FLAGS_DEBUG " /MDd ${SYMBOL_FORMAT}")
string(APPEND CMAKE_C_FLAGS_DEBUG " /MDd ${SYMBOL_FORMAT}")
string(APPEND CMAKE_CXX_FLAGS_RELEASE " /MD ${PDB_INFO_OVERRIDE_FLAGS}")
string(APPEND CMAKE_C_FLAGS_RELEASE " /MD ${PDB_INFO_OVERRIDE_FLAGS}")
string(APPEND CMAKE_CXX_FLAGS_MINSIZEREL " /MD ${PDB_INFO_OVERRIDE_FLAGS}")
string(APPEND CMAKE_C_FLAGS_MINSIZEREL " /MD ${PDB_INFO_OVERRIDE_FLAGS}")
string(APPEND CMAKE_CXX_FLAGS_RELWITHDEBINFO " /MD ${SYMBOL_FORMAT_RELEASE}")
string(APPEND CMAKE_C_FLAGS_RELWITHDEBINFO " /MD ${SYMBOL_FORMAT_RELEASE}")
unset(SYMBOL_FORMAT)
unset(SYMBOL_FORMAT_RELEASE)
# JMC is available on msvc 15.8 (1915) and up
if(NOT MSVC_CLANG)
string(APPEND CMAKE_CXX_FLAGS_DEBUG " /JMC")
endif()
string(APPEND PLATFORM_LINKFLAGS " /SUBSYSTEM:CONSOLE /STACK:2097152")
set(PLATFORM_LINKFLAGS_RELEASE "/NODEFAULTLIB:libcmt.lib /NODEFAULTLIB:libcmtd.lib /NODEFAULTLIB:msvcrtd.lib")
string(APPEND PLATFORM_LINKFLAGS_DEBUG " /IGNORE:4099 /NODEFAULTLIB:libcmt.lib /NODEFAULTLIB:msvcrt.lib /NODEFAULTLIB:libcmtd.lib")
# Ignore meaningless for us linker warnings.
string(APPEND PLATFORM_LINKFLAGS " /ignore:4049 /ignore:4217 /ignore:4221")
set(PLATFORM_LINKFLAGS_RELEASE "${PLATFORM_LINKFLAGS} ${PDB_INFO_OVERRIDE_LINKER_FLAGS}")
string(APPEND CMAKE_STATIC_LINKER_FLAGS " /ignore:4221")
if(CMAKE_CL_64)
string(PREPEND PLATFORM_LINKFLAGS "/MACHINE:X64 ")
else()
string(PREPEND PLATFORM_LINKFLAGS "/MACHINE:IX86 /LARGEADDRESSAWARE ")
endif()
if(NOT DEFINED LIBDIR)
# Setup 64bit and 64bit windows systems
if(CMAKE_CL_64)
message(STATUS "64 bit compiler detected.")
set(LIBDIR_BASE "win64")
else()
message(FATAL_ERROR "32 bit compiler detected, blender no longer provides pre-build libraries for 32 bit windows, please set the LIBDIR cmake variable to your own library folder")
endif()
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 19.30.30423)
message(STATUS "Visual Studio 2022 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
elseif(MSVC_VERSION GREATER 1919)
message(STATUS "Visual Studio 2019 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
endif()
else()
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
endif()
if(NOT EXISTS "${LIBDIR}/")
message(FATAL_ERROR "\n\nWindows requires pre-compiled libs at: '${LIBDIR}'. Please run `make update` in the blender source folder to obtain them.")
endif()
include(platform_old_libs_update)
# Only supported in the VS IDE & Clang Tidy needs to be on.
if(CMAKE_GENERATOR MATCHES "^Visual Studio.+" AND WITH_CLANG_TIDY)
set(CMAKE_VS_GLOBALS
"RunCodeAnalysis=false"
"EnableMicrosoftCodeAnalysis=false"
"EnableClangTidyCodeAnalysis=true"
)
2021-11-04 14:50:24 +11:00
set(VS_CLANG_TIDY ON)
endif()
# To support building against both 3.4 and 3.5 lib folders, disable materialX if it is not found
set(MATERIALX_LIB_FOLDER_EXISTS EXISTS ${LIBDIR}/materialx)
set_and_warn_library_found("MaterialX" MATERIALX_LIB_FOLDER_EXISTS WITH_MATERIALX)
unset(MATERIALX_LIB_FOLDER_EXISTS)
# Mark libdir as system headers with a lower warn level, to resolve some warnings
# that we have very little control over
if(NOT MSVC_CLANG AND # Available with MSVC 15.7+ but not for CLANG.
NOT WITH_WINDOWS_SCCACHE AND # And not when sccache is enabled
NOT VS_CLANG_TIDY) # Clang-tidy does not like these options
add_compile_options(/experimental:external /external:templates- /external:I "${LIBDIR}" /external:W0)
endif()
# Add each of our libraries to our cmake_prefix_path so find_package() could work
file(GLOB children RELATIVE ${LIBDIR} ${LIBDIR}/*)
foreach(child ${children})
if(IS_DIRECTORY ${LIBDIR}/${child})
2022-12-17 13:30:07 +11:00
list(APPEND CMAKE_PREFIX_PATH ${LIBDIR}/${child})
endif()
endforeach()
if(WITH_PUGIXML)
set(PUGIXML_LIBRARIES optimized ${LIBDIR}/pugixml/lib/pugixml.lib debug ${LIBDIR}/pugixml/lib/pugixml_d.lib)
set(PUGIXML_INCLUDE_DIR ${LIBDIR}/pugixml/include)
endif()
set(ZLIB_INCLUDE_DIRS ${LIBDIR}/zlib/include)
set(ZLIB_LIBRARIES ${LIBDIR}/zlib/lib/libz_st.lib)
set(ZLIB_INCLUDE_DIR ${LIBDIR}/zlib/include)
set(ZLIB_LIBRARY ${LIBDIR}/zlib/lib/libz_st.lib)
set(ZLIB_DIR ${LIBDIR}/zlib)
windows_find_package(ZLIB) # we want to find before finding things that depend on it like png
windows_find_package(PNG)
if(NOT PNG_FOUND)
warn_hardcoded_paths(libpng)
set(PNG_PNG_INCLUDE_DIR ${LIBDIR}/png/include)
CMake: Refactor external dependencies handling This is a more correct fix to the issue Brecht was fixing in D6600. While the fix in that patch worked fine for linking it broke ASAN runtime under some circumstances. For example, `make full debug developer` would compile, but trying to start blender will cause assert failure in ASAN (related on check that ASAN is not running already). Top-level idea: leave it to CMake to keep track of dependency graph. The root of the issue comes to the fact that target like "blender" is configured to use a lot of static libraries coming from Blender sources and to use external static libraries. There is nothing which ensures order between blender's and external libraries. Only order of blender libraries is guaranteed. It was possible that due to a cycle or other circumstances some of blender libraries would have been passed to linker after libraries it uses, causing linker errors. For example, this order will likely fail: libbf_blenfont.a libfreetype6.a libbf_blenfont.a This change makes it so blender libraries are explicitly provided their dependencies to an external libraries, which allows CMake to ensure they are always linked against them. General rule here: if bf_foo depends on an external library it is to be provided to LIBS for bf_foo. For example, if bf_blenkernel depends on opensubdiv then LIBS in blenkernel's CMakeLists.txt is to include OPENSUBDIB_LIBRARIES. The change is made based on searching for used include folders such as OPENSUBDIV_INCLUDE_DIRS and adding corresponding libraries to LIBS ion that CMakeLists.txt. Transitive dependencies are not simplified by this approach, but I am not aware of any downside of this: CMake should be smart enough to simplify them on its side. And even if not, this shouldn't affect linking time. Benefit of not relying on transitive dependencies is that build system is more robust towards future changes. For example, if bf_intern_opensubiv is no longer depends on OPENSUBDIV_LIBRARIES and all such code is moved to bf_blenkernel this will not break linking. The not-so-trivial part is change to blender_add_lib (and its version in Cycles). The complexity is caused by libraries being provided as a single list argument which doesn't allow to use different release and debug libraries on Windows. The idea is: - Have every library prefixed as "optimized" or "debug" if separation is needed (non-prefixed libraries will be considered "generic"). - Loop through libraries passed to function and do simple parsing which will look for "optimized" and "debug" words and specify following library to corresponding category. This isn't something particularly great. Alternative would be to use target_link_libraries() directly, which sounds like more code but which is more explicit and allows to have more flexibility and control comparing to wrapper approach. Tested the following configurations on Linux, macOS and Windows: - make full debug developer - make full release developer - make lite debug developer - make lite release developer NOTE: Linux libraries needs to be compiled with D6641 applied, otherwise, depending on configuration, it's possible to run into duplicated zlib symbols error. Differential Revision: https://developer.blender.org/D6642
2020-01-20 18:36:19 +01:00
set(PNG_LIBRARIES ${LIBDIR}/png/lib/libpng.lib ${ZLIB_LIBRARY})
set(PNG "${LIBDIR}/png")
set(PNG_INCLUDE_DIRS "${PNG}/include")
set(PNG_LIBPATH ${PNG}/lib) # not cmake defined
endif()
set(JPEG_NAMES ${JPEG_NAMES} libjpeg)
windows_find_package(JPEG REQUIRED)
if(NOT JPEG_FOUND)
warn_hardcoded_paths(libjpeg)
set(JPEG_INCLUDE_DIR ${LIBDIR}/jpeg/include)
set(JPEG_LIBRARIES ${LIBDIR}/jpeg/lib/libjpeg.lib)
endif()
set(EPOXY_ROOT_DIR ${LIBDIR}/epoxy)
windows_find_package(Epoxy REQUIRED)
if(NOT EPOXY_FOUND)
set(Epoxy_INCLUDE_DIRS ${LIBDIR}/epoxy/include)
set(Epoxy_LIBRARIES ${LIBDIR}/epoxy/lib/epoxy.lib)
endif()
set(PTHREADS_INCLUDE_DIRS ${LIBDIR}/pthreads/include)
set(PTHREADS_LIBRARIES ${LIBDIR}/pthreads/lib/pthreadVC3.lib)
set(FREETYPE ${LIBDIR}/freetype)
set(FREETYPE_INCLUDE_DIRS
${LIBDIR}/freetype/include
${LIBDIR}/freetype/include/freetype2
)
set(FREETYPE_LIBRARIES
${LIBDIR}/freetype/lib/freetype2ST.lib
${LIBDIR}/brotli/lib/brotlidec-static.lib
${LIBDIR}/brotli/lib/brotlicommon-static.lib
)
windows_find_package(Freetype REQUIRED)
if(WITH_FFTW3)
set(FFTW3 ${LIBDIR}/fftw3)
set(FFTW3_LIBRARIES ${FFTW3}/lib/libfftw.lib)
set(FFTW3_INCLUDE_DIRS ${FFTW3}/include)
set(FFTW3_LIBPATH ${FFTW3}/lib)
endif()
if(WITH_IMAGE_WEBP)
set(WEBP_INCLUDE_DIRS ${LIBDIR}/webp/include)
set(WEBP_ROOT_DIR ${LIBDIR}/webp)
set(WEBP_LIBRARIES ${LIBDIR}/webp/lib/webp.lib ${LIBDIR}/webp/lib/webpdemux.lib ${LIBDIR}/webp/lib/webpmux.lib)
set(WEBP_FOUND ON)
endif()
if(WITH_OPENCOLLADA)
set(OPENCOLLADA ${LIBDIR}/opencollada)
set(OPENCOLLADA_INCLUDE_DIRS
${OPENCOLLADA}/include/opencollada/COLLADAStreamWriter
${OPENCOLLADA}/include/opencollada/COLLADABaseUtils
${OPENCOLLADA}/include/opencollada/COLLADAFramework
${OPENCOLLADA}/include/opencollada/COLLADASaxFrameworkLoader
${OPENCOLLADA}/include/opencollada/GeneratedSaxParser
)
set(OPENCOLLADA_LIBRARIES
CMake: Refactor external dependencies handling This is a more correct fix to the issue Brecht was fixing in D6600. While the fix in that patch worked fine for linking it broke ASAN runtime under some circumstances. For example, `make full debug developer` would compile, but trying to start blender will cause assert failure in ASAN (related on check that ASAN is not running already). Top-level idea: leave it to CMake to keep track of dependency graph. The root of the issue comes to the fact that target like "blender" is configured to use a lot of static libraries coming from Blender sources and to use external static libraries. There is nothing which ensures order between blender's and external libraries. Only order of blender libraries is guaranteed. It was possible that due to a cycle or other circumstances some of blender libraries would have been passed to linker after libraries it uses, causing linker errors. For example, this order will likely fail: libbf_blenfont.a libfreetype6.a libbf_blenfont.a This change makes it so blender libraries are explicitly provided their dependencies to an external libraries, which allows CMake to ensure they are always linked against them. General rule here: if bf_foo depends on an external library it is to be provided to LIBS for bf_foo. For example, if bf_blenkernel depends on opensubdiv then LIBS in blenkernel's CMakeLists.txt is to include OPENSUBDIB_LIBRARIES. The change is made based on searching for used include folders such as OPENSUBDIV_INCLUDE_DIRS and adding corresponding libraries to LIBS ion that CMakeLists.txt. Transitive dependencies are not simplified by this approach, but I am not aware of any downside of this: CMake should be smart enough to simplify them on its side. And even if not, this shouldn't affect linking time. Benefit of not relying on transitive dependencies is that build system is more robust towards future changes. For example, if bf_intern_opensubiv is no longer depends on OPENSUBDIV_LIBRARIES and all such code is moved to bf_blenkernel this will not break linking. The not-so-trivial part is change to blender_add_lib (and its version in Cycles). The complexity is caused by libraries being provided as a single list argument which doesn't allow to use different release and debug libraries on Windows. The idea is: - Have every library prefixed as "optimized" or "debug" if separation is needed (non-prefixed libraries will be considered "generic"). - Loop through libraries passed to function and do simple parsing which will look for "optimized" and "debug" words and specify following library to corresponding category. This isn't something particularly great. Alternative would be to use target_link_libraries() directly, which sounds like more code but which is more explicit and allows to have more flexibility and control comparing to wrapper approach. Tested the following configurations on Linux, macOS and Windows: - make full debug developer - make full release developer - make lite debug developer - make lite release developer NOTE: Linux libraries needs to be compiled with D6641 applied, otherwise, depending on configuration, it's possible to run into duplicated zlib symbols error. Differential Revision: https://developer.blender.org/D6642
2020-01-20 18:36:19 +01:00
optimized ${OPENCOLLADA}/lib/opencollada/OpenCOLLADASaxFrameworkLoader.lib
optimized ${OPENCOLLADA}/lib/opencollada/OpenCOLLADAFramework.lib
optimized ${OPENCOLLADA}/lib/opencollada/OpenCOLLADABaseUtils.lib
optimized ${OPENCOLLADA}/lib/opencollada/OpenCOLLADAStreamWriter.lib
optimized ${OPENCOLLADA}/lib/opencollada/MathMLSolver.lib
optimized ${OPENCOLLADA}/lib/opencollada/GeneratedSaxParser.lib
optimized ${OPENCOLLADA}/lib/opencollada/buffer.lib
optimized ${OPENCOLLADA}/lib/opencollada/ftoa.lib
debug ${OPENCOLLADA}/lib/opencollada/OpenCOLLADASaxFrameworkLoader_d.lib
debug ${OPENCOLLADA}/lib/opencollada/OpenCOLLADAFramework_d.lib
debug ${OPENCOLLADA}/lib/opencollada/OpenCOLLADABaseUtils_d.lib
debug ${OPENCOLLADA}/lib/opencollada/OpenCOLLADAStreamWriter_d.lib
debug ${OPENCOLLADA}/lib/opencollada/MathMLSolver_d.lib
debug ${OPENCOLLADA}/lib/opencollada/GeneratedSaxParser_d.lib
debug ${OPENCOLLADA}/lib/opencollada/buffer_d.lib
debug ${OPENCOLLADA}/lib/opencollada/ftoa_d.lib
)
if(EXISTS ${LIBDIR}/xml2/lib/libxml2s.lib) # 3.4 libraries
list(APPEND OPENCOLLADA_LIBRARIES ${LIBDIR}/xml2/lib/libxml2s.lib)
else()
list(APPEND OPENCOLLADA_LIBRARIES ${OPENCOLLADA}/lib/opencollada/xml.lib)
endif()
list(APPEND OPENCOLLADA_LIBRARIES ${OPENCOLLADA}/lib/opencollada/UTF.lib)
set(PCRE_LIBRARIES
CMake: Refactor external dependencies handling This is a more correct fix to the issue Brecht was fixing in D6600. While the fix in that patch worked fine for linking it broke ASAN runtime under some circumstances. For example, `make full debug developer` would compile, but trying to start blender will cause assert failure in ASAN (related on check that ASAN is not running already). Top-level idea: leave it to CMake to keep track of dependency graph. The root of the issue comes to the fact that target like "blender" is configured to use a lot of static libraries coming from Blender sources and to use external static libraries. There is nothing which ensures order between blender's and external libraries. Only order of blender libraries is guaranteed. It was possible that due to a cycle or other circumstances some of blender libraries would have been passed to linker after libraries it uses, causing linker errors. For example, this order will likely fail: libbf_blenfont.a libfreetype6.a libbf_blenfont.a This change makes it so blender libraries are explicitly provided their dependencies to an external libraries, which allows CMake to ensure they are always linked against them. General rule here: if bf_foo depends on an external library it is to be provided to LIBS for bf_foo. For example, if bf_blenkernel depends on opensubdiv then LIBS in blenkernel's CMakeLists.txt is to include OPENSUBDIB_LIBRARIES. The change is made based on searching for used include folders such as OPENSUBDIV_INCLUDE_DIRS and adding corresponding libraries to LIBS ion that CMakeLists.txt. Transitive dependencies are not simplified by this approach, but I am not aware of any downside of this: CMake should be smart enough to simplify them on its side. And even if not, this shouldn't affect linking time. Benefit of not relying on transitive dependencies is that build system is more robust towards future changes. For example, if bf_intern_opensubiv is no longer depends on OPENSUBDIV_LIBRARIES and all such code is moved to bf_blenkernel this will not break linking. The not-so-trivial part is change to blender_add_lib (and its version in Cycles). The complexity is caused by libraries being provided as a single list argument which doesn't allow to use different release and debug libraries on Windows. The idea is: - Have every library prefixed as "optimized" or "debug" if separation is needed (non-prefixed libraries will be considered "generic"). - Loop through libraries passed to function and do simple parsing which will look for "optimized" and "debug" words and specify following library to corresponding category. This isn't something particularly great. Alternative would be to use target_link_libraries() directly, which sounds like more code but which is more explicit and allows to have more flexibility and control comparing to wrapper approach. Tested the following configurations on Linux, macOS and Windows: - make full debug developer - make full release developer - make lite debug developer - make lite release developer NOTE: Linux libraries needs to be compiled with D6641 applied, otherwise, depending on configuration, it's possible to run into duplicated zlib symbols error. Differential Revision: https://developer.blender.org/D6642
2020-01-20 18:36:19 +01:00
optimized ${OPENCOLLADA}/lib/opencollada/pcre.lib
debug ${OPENCOLLADA}/lib/opencollada/pcre_d.lib
)
endif()
if(WITH_CODEC_FFMPEG)
set(FFMPEG_INCLUDE_DIRS
${LIBDIR}/ffmpeg/include
${LIBDIR}/ffmpeg/include/msvc
)
windows_find_package(FFmpeg)
if(NOT FFmpeg_FOUND)
warn_hardcoded_paths(FFmpeg)
set(FFMPEG_LIBRARIES
${LIBDIR}/ffmpeg/lib/avcodec.lib
${LIBDIR}/ffmpeg/lib/avformat.lib
${LIBDIR}/ffmpeg/lib/avdevice.lib
${LIBDIR}/ffmpeg/lib/avutil.lib
${LIBDIR}/ffmpeg/lib/swscale.lib
2022-08-09 13:36:45 +10:00
)
endif()
endif()
if(WITH_IMAGE_OPENEXR)
# Imath and OpenEXR have a single combined build option and include and library variables
# used by the rest of the build system.
set(IMATH_ROOT_DIR ${LIBDIR}/imath)
set(IMATH_VERSION "3.14")
windows_find_package(IMATH REQUIRED)
if(NOT IMATH_FOUND)
set(IMATH ${LIBDIR}/imath)
set(IMATH_INCLUDE_DIR ${IMATH}/include)
set(IMATH_INCLUDE_DIRS ${IMATH_INCLUDE_DIR} ${IMATH}/include/Imath)
set(IMATH_LIBPATH ${IMATH}/lib)
if(EXISTS ${IMATH_LIBPATH}/Imath_s.lib)
set(IMATH_POSTFIX _s)
endif()
set(IMATH_LIBRARIES
optimized ${IMATH_LIBPATH}/Imath${IMATH_POSTFIX}.lib
debug ${IMATH_LIBPATH}/Imath${IMATH_POSTFIX}_d.lib
)
endif()
set(OPENEXR_ROOT_DIR ${LIBDIR}/openexr)
set(OPENEXR_VERSION "3.14")
windows_find_package(OPENEXR REQUIRED)
if(NOT OpenEXR_FOUND)
warn_hardcoded_paths(OpenEXR)
set(OPENEXR ${LIBDIR}/openexr)
set(OPENEXR_INCLUDE_DIR ${OPENEXR}/include)
set(OPENEXR_INCLUDE_DIRS ${OPENEXR_INCLUDE_DIR} ${IMATH_INCLUDE_DIRS} ${OPENEXR_INCLUDE_DIR}/OpenEXR)
set(OPENEXR_LIBPATH ${OPENEXR}/lib)
# Check if the blender 3.3 lib static library eixts
# if not assume this is a 3.4+ dynamic version.
if(EXISTS "${OPENEXR_LIBPATH}/OpenEXR_s.lib")
set(OPENEXR_POSTFIX _s)
endif()
set(OPENEXR_LIBRARIES
optimized ${OPENEXR_LIBPATH}/Iex${OPENEXR_POSTFIX}.lib
optimized ${OPENEXR_LIBPATH}/IlmThread${OPENEXR_POSTFIX}.lib
optimized ${OPENEXR_LIBPATH}/OpenEXR${OPENEXR_POSTFIX}.lib
optimized ${OPENEXR_LIBPATH}/OpenEXRCore${OPENEXR_POSTFIX}.lib
optimized ${OPENEXR_LIBPATH}/OpenEXRUtil${OPENEXR_POSTFIX}.lib
debug ${OPENEXR_LIBPATH}/Iex${OPENEXR_POSTFIX}_d.lib
debug ${OPENEXR_LIBPATH}/IlmThread${OPENEXR_POSTFIX}_d.lib
debug ${OPENEXR_LIBPATH}/OpenEXR${OPENEXR_POSTFIX}_d.lib
debug ${OPENEXR_LIBPATH}/OpenEXRCore${OPENEXR_POSTFIX}_d.lib
debug ${OPENEXR_LIBPATH}/OpenEXRUtil${OPENEXR_POSTFIX}_d.lib
${IMATH_LIBRARIES}
)
endif()
endif()
if(WITH_IMAGE_TIFF)
# Try to find tiff first then complain and set static and maybe wrong paths
windows_find_package(TIFF)
if(NOT TIFF_FOUND)
warn_hardcoded_paths(libtiff)
set(TIFF_LIBRARY ${LIBDIR}/tiff/lib/libtiff.lib)
set(TIFF_INCLUDE_DIR ${LIBDIR}/tiff/include)
endif()
endif()
if(WITH_JACK)
set(JACK_INCLUDE_DIRS
${LIBDIR}/jack/include/jack
${LIBDIR}/jack/include
)
set(JACK_LIBRARIES optimized ${LIBDIR}/jack/lib/libjack.lib debug ${LIBDIR}/jack/lib/libjack_d.lib)
endif()
if(WITH_PYTHON)
# Cache version for make_bpy_wheel.py to detect.
unset(PYTHON_VERSION CACHE)
set(PYTHON_VERSION "3.10" CACHE STRING "Python version")
string(REPLACE "." "" _PYTHON_VERSION_NO_DOTS ${PYTHON_VERSION})
set(PYTHON_LIBRARY ${LIBDIR}/python/${_PYTHON_VERSION_NO_DOTS}/libs/python${_PYTHON_VERSION_NO_DOTS}.lib)
set(PYTHON_LIBRARY_DEBUG ${LIBDIR}/python/${_PYTHON_VERSION_NO_DOTS}/libs/python${_PYTHON_VERSION_NO_DOTS}_d.lib)
set(PYTHON_EXECUTABLE ${LIBDIR}/python/${_PYTHON_VERSION_NO_DOTS}/bin/python$<$<CONFIG:Debug>:_d>.exe)
set(PYTHON_INCLUDE_DIR ${LIBDIR}/python/${_PYTHON_VERSION_NO_DOTS}/include)
set(PYTHON_NUMPY_INCLUDE_DIRS ${LIBDIR}/python/${_PYTHON_VERSION_NO_DOTS}/lib/site-packages/numpy/core/include)
2021-11-04 14:50:24 +11:00
set(NUMPY_FOUND ON)
unset(_PYTHON_VERSION_NO_DOTS)
# uncached vars
set(PYTHON_INCLUDE_DIRS "${PYTHON_INCLUDE_DIR}")
set(PYTHON_LIBRARIES debug "${PYTHON_LIBRARY_DEBUG}" optimized "${PYTHON_LIBRARY}" )
endif()
if(WITH_BOOST)
if(WITH_CYCLES AND WITH_CYCLES_OSL)
set(boost_extra_libs wave)
endif()
if(WITH_INTERNATIONAL)
list(APPEND boost_extra_libs locale)
endif()
set(Boost_USE_STATIC_RUNTIME ON) # prefix lib
set(Boost_USE_MULTITHREADED ON) # suffix -mt
set(Boost_USE_STATIC_LIBS ON) # suffix -s
2017-10-30 12:58:44 +11:00
if(WITH_WINDOWS_FIND_MODULES)
find_package(Boost COMPONENTS date_time filesystem thread regex system ${boost_extra_libs})
2017-10-30 12:58:44 +11:00
endif()
if(NOT Boost_FOUND)
warn_hardcoded_paths(BOOST)
set(BOOST ${LIBDIR}/boost)
set(BOOST_INCLUDE_DIR ${BOOST}/include)
set(BOOST_LIBPATH ${BOOST}/lib)
set(BOOST_VERSION_HEADER ${BOOST_INCLUDE_DIR}/boost/version.hpp)
if(EXISTS ${BOOST_VERSION_HEADER})
file(STRINGS "${BOOST_VERSION_HEADER}" BOOST_LIB_VERSION REGEX "#define BOOST_LIB_VERSION ")
if(BOOST_LIB_VERSION MATCHES "#define BOOST_LIB_VERSION \"([0-9_]+)\"")
set(BOOST_VERSION "${CMAKE_MATCH_1}")
endif()
endif()
if(NOT BOOST_VERSION)
message(FATAL_ERROR "Unable to determine Boost version")
endif()
set(BOOST_POSTFIX "vc142-mt-x64-${BOOST_VERSION}")
set(BOOST_DEBUG_POSTFIX "vc142-mt-gyd-x64-${BOOST_VERSION}")
set(BOOST_PREFIX "")
# This is file new in 3.4 if it does not exist, assume we are building against 3.3 libs
set(BOOST_34_TRIGGER_FILE ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_python310-${BOOST_DEBUG_POSTFIX}.lib)
2022-12-17 13:30:07 +11:00
if(NOT EXISTS ${BOOST_34_TRIGGER_FILE})
set(BOOST_DEBUG_POSTFIX "vc142-mt-gd-x64-${BOOST_VERSION}")
set(BOOST_PREFIX "lib")
endif()
set(BOOST_LIBRARIES
optimized ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_date_time-${BOOST_POSTFIX}.lib
optimized ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_filesystem-${BOOST_POSTFIX}.lib
optimized ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_regex-${BOOST_POSTFIX}.lib
optimized ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_system-${BOOST_POSTFIX}.lib
optimized ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_thread-${BOOST_POSTFIX}.lib
optimized ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_chrono-${BOOST_POSTFIX}.lib
debug ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_date_time-${BOOST_DEBUG_POSTFIX}.lib
debug ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_filesystem-${BOOST_DEBUG_POSTFIX}.lib
debug ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_regex-${BOOST_DEBUG_POSTFIX}.lib
debug ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_system-${BOOST_DEBUG_POSTFIX}.lib
debug ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_thread-${BOOST_DEBUG_POSTFIX}.lib
debug ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_chrono-${BOOST_DEBUG_POSTFIX}.lib
)
2022-12-17 13:30:07 +11:00
if(EXISTS ${BOOST_34_TRIGGER_FILE})
if(WITH_USD)
set(BOOST_PYTHON_LIBRARIES
debug ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_python310-${BOOST_DEBUG_POSTFIX}.lib
optimized ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_python310-${BOOST_POSTFIX}.lib
)
endif()
endif()
if(WITH_CYCLES AND WITH_CYCLES_OSL)
set(BOOST_LIBRARIES ${BOOST_LIBRARIES}
optimized ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_wave-${BOOST_POSTFIX}.lib
debug ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_wave-${BOOST_DEBUG_POSTFIX}.lib
2022-08-09 13:36:45 +10:00
)
endif()
if(WITH_INTERNATIONAL)
set(BOOST_LIBRARIES ${BOOST_LIBRARIES}
optimized ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_locale-${BOOST_POSTFIX}.lib
debug ${BOOST_LIBPATH}/${BOOST_PREFIX}boost_locale-${BOOST_DEBUG_POSTFIX}.lib
2022-08-09 13:36:45 +10:00
)
endif()
else() # we found boost using find_package
set(BOOST_INCLUDE_DIR ${Boost_INCLUDE_DIRS})
set(BOOST_LIBRARIES ${Boost_LIBRARIES})
set(BOOST_LIBPATH ${Boost_LIBRARY_DIRS})
endif()
set(BOOST_DEFINITIONS "-DBOOST_ALL_NO_LIB")
endif()
if(WITH_OPENIMAGEIO)
windows_find_package(OpenImageIO)
if(NOT OpenImageIO_FOUND)
set(OPENIMAGEIO ${LIBDIR}/OpenImageIO)
set(OPENIMAGEIO_LIBPATH ${OPENIMAGEIO}/lib)
set(OPENIMAGEIO_INCLUDE_DIR ${OPENIMAGEIO}/include)
set(OPENIMAGEIO_INCLUDE_DIRS ${OPENIMAGEIO_INCLUDE_DIR})
set(OIIO_OPTIMIZED optimized ${OPENIMAGEIO_LIBPATH}/OpenImageIO.lib optimized ${OPENIMAGEIO_LIBPATH}/OpenImageIO_Util.lib)
set(OIIO_DEBUG debug ${OPENIMAGEIO_LIBPATH}/OpenImageIO_d.lib debug ${OPENIMAGEIO_LIBPATH}/OpenImageIO_Util_d.lib)
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
endif()
set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0")
set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe")
# If the .dll does not exist, assume it is a static OIIO
if(NOT EXISTS ${OPENIMAGEIO}/bin/OpenImageIO.dll)
add_definitions(-DOIIO_STATIC_DEFINE)
endif()
add_definitions(-DOIIO_NO_SSE=1)
endif()
if(WITH_LLVM)
set(LLVM_ROOT_DIR ${LIBDIR}/llvm CACHE PATH "Path to the LLVM installation")
2022-12-17 13:30:07 +11:00
set(LLVM_INCLUDE_DIRS ${LLVM_ROOT_DIR}/$<$<CONFIG:Debug>:Debug>/include CACHE PATH "Path to the LLVM include directory")
file(GLOB LLVM_LIBRARY_OPTIMIZED ${LLVM_ROOT_DIR}/lib/*.lib)
if(EXISTS ${LLVM_ROOT_DIR}/debug/lib)
foreach(LLVM_OPTIMIZED_LIB ${LLVM_LIBRARY_OPTIMIZED})
get_filename_component(LIBNAME ${LLVM_OPTIMIZED_LIB} ABSOLUTE)
list(APPEND LLVM_LIBS optimized ${LIBNAME})
endforeach(LLVM_OPTIMIZED_LIB)
file(GLOB LLVM_LIBRARY_DEBUG ${LLVM_ROOT_DIR}/debug/lib/*.lib)
foreach(LLVM_DEBUG_LIB ${LLVM_LIBRARY_DEBUG})
get_filename_component(LIBNAME ${LLVM_DEBUG_LIB} ABSOLUTE)
list(APPEND LLVM_LIBS debug ${LIBNAME})
endforeach(LLVM_DEBUG_LIB)
set(LLVM_LIBRARY ${LLVM_LIBS})
else()
message(WARNING "LLVM debug libs not present on this system. Using release libs for debug builds.")
set(LLVM_LIBRARY ${LLVM_LIBRARY_OPTIMIZED})
endif()
endif()
if(WITH_OPENCOLORIO)
windows_find_package(OpenColorIO)
if(NOT OpenColorIO_FOUND)
set(OPENCOLORIO ${LIBDIR}/OpenColorIO)
set(OPENCOLORIO_INCLUDE_DIRS ${OPENCOLORIO}/include)
set(OPENCOLORIO_LIBPATH ${OPENCOLORIO}/lib)
if(EXISTS ${OPENCOLORIO_LIBPATH}/libexpatMD.lib) # 3.4
set(OPENCOLORIO_LIBRARIES
optimized ${OPENCOLORIO_LIBPATH}/OpenColorIO.lib
optimized ${OPENCOLORIO_LIBPATH}/libexpatMD.lib
optimized ${OPENCOLORIO_LIBPATH}/pystring.lib
optimized ${OPENCOLORIO_LIBPATH}/libyaml-cpp.lib
debug ${OPENCOLORIO_LIBPATH}/OpencolorIO_d.lib
debug ${OPENCOLORIO_LIBPATH}/libexpatdMD.lib
debug ${OPENCOLORIO_LIBPATH}/pystring_d.lib
debug ${OPENCOLORIO_LIBPATH}/libyaml-cpp_d.lib
)
set(OPENCOLORIO_DEFINITIONS "-DOpenColorIO_SKIP_IMPORTS")
else()
set(OPENCOLORIO_LIBRARIES
optimized ${OPENCOLORIO_LIBPATH}/OpenColorIO.lib
debug ${OPENCOLORIO_LIBPATH}/OpencolorIO_d.lib
)
endif()
endif()
endif()
if(WITH_OPENVDB)
windows_find_package(OpenVDB)
if(NOT OpenVDB_FOUND)
set(OPENVDB ${LIBDIR}/openVDB)
set(OPENVDB_LIBPATH ${OPENVDB}/lib)
set(OPENVDB_INCLUDE_DIRS ${OPENVDB}/include)
set(OPENVDB_LIBRARIES optimized ${OPENVDB_LIBPATH}/openvdb.lib debug ${OPENVDB_LIBPATH}/openvdb_d.lib )
endif()
set(OPENVDB_DEFINITIONS -DNOMINMAX -D_USE_MATH_DEFINES)
endif()
if(WITH_NANOVDB)
set(NANOVDB ${LIBDIR}/openvdb)
set(NANOVDB_INCLUDE_DIR ${NANOVDB}/include)
if(NOT EXISTS "${NANOVDB_INCLUDE_DIR}/nanovdb")
# When not found, could be an older lib folder with where nanovdb
# had its own lib folder, to ease the transition period, fall back
# to that copy if the copy in openvdb is not found.
set(NANOVDB ${LIBDIR}/nanoVDB)
set(NANOVDB_INCLUDE_DIR ${NANOVDB}/include)
endif()
endif()
if(WITH_OPENIMAGEDENOISE)
set(OPENIMAGEDENOISE ${LIBDIR}/OpenImageDenoise)
set(OPENIMAGEDENOISE_LIBPATH ${LIBDIR}/OpenImageDenoise/lib)
set(OPENIMAGEDENOISE_INCLUDE_DIRS ${OPENIMAGEDENOISE}/include)
set(OPENIMAGEDENOISE_LIBRARIES
optimized ${OPENIMAGEDENOISE_LIBPATH}/OpenImageDenoise.lib
optimized ${OPENIMAGEDENOISE_LIBPATH}/common.lib
optimized ${OPENIMAGEDENOISE_LIBPATH}/dnnl.lib
debug ${OPENIMAGEDENOISE_LIBPATH}/OpenImageDenoise_d.lib
debug ${OPENIMAGEDENOISE_LIBPATH}/common_d.lib
2022-08-09 13:36:45 +10:00
debug ${OPENIMAGEDENOISE_LIBPATH}/dnnl_d.lib
)
set(OPENIMAGEDENOISE_DEFINITIONS)
endif()
if(WITH_ALEMBIC)
set(ALEMBIC ${LIBDIR}/alembic)
set(ALEMBIC_INCLUDE_DIR ${ALEMBIC}/include)
set(ALEMBIC_INCLUDE_DIRS ${ALEMBIC_INCLUDE_DIR})
set(ALEMBIC_LIBPATH ${ALEMBIC}/lib)
set(ALEMBIC_LIBRARIES optimized ${ALEMBIC}/lib/Alembic.lib debug ${ALEMBIC}/lib/Alembic_d.lib)
set(ALEMBIC_FOUND 1)
endif()
if(WITH_IMAGE_OPENJPEG)
set(OPENJPEG ${LIBDIR}/openjpeg)
set(OPENJPEG_INCLUDE_DIRS ${OPENJPEG}/include/openjpeg-2.5)
if(NOT EXISTS "${OPENJPEG_INCLUDE_DIRS}")
# when not found, could be an older lib folder with openjpeg 2.4
# to ease the transition period, fall back if 2.5 is not found.
set(OPENJPEG_INCLUDE_DIRS ${OPENJPEG}/include/openjpeg-2.4)
endif()
set(OPENJPEG_LIBRARIES ${OPENJPEG}/lib/openjp2.lib)
endif()
if(WITH_OPENSUBDIV)
windows_find_package(OpenSubdiv)
if(NOT OpenSubdiv_FOUND)
set(OPENSUBDIV ${LIBDIR}/opensubdiv)
set(OPENSUBDIV_INCLUDE_DIRS ${OPENSUBDIV}/include)
set(OPENSUBDIV_LIBPATH ${OPENSUBDIV}/lib)
set(OPENSUBDIV_LIBRARIES
optimized ${OPENSUBDIV_LIBPATH}/osdCPU.lib
optimized ${OPENSUBDIV_LIBPATH}/osdGPU.lib
debug ${OPENSUBDIV_LIBPATH}/osdCPU_d.lib
debug ${OPENSUBDIV_LIBPATH}/osdGPU_d.lib
)
endif()
endif()
if(WITH_SDL)
set(SDL ${LIBDIR}/sdl)
set(SDL_INCLUDE_DIR ${SDL}/include)
set(SDL_LIBPATH ${SDL}/lib)
set(SDL_LIBRARY ${SDL_LIBPATH}/SDL2.lib)
endif()
# Audio IO
if(WITH_SYSTEM_AUDASPACE)
set(AUDASPACE_INCLUDE_DIRS ${LIBDIR}/audaspace/include/audaspace)
set(AUDASPACE_LIBRARIES ${LIBDIR}/audaspace/lib/audaspace.lib)
set(AUDASPACE_C_INCLUDE_DIRS ${LIBDIR}/audaspace/include/audaspace)
set(AUDASPACE_C_LIBRARIES ${LIBDIR}/audaspace/lib/audaspace-c.lib)
set(AUDASPACE_PY_INCLUDE_DIRS ${LIBDIR}/audaspace/include/audaspace)
set(AUDASPACE_PY_LIBRARIES ${LIBDIR}/audaspace/lib/audaspace-py.lib)
endif()
if(WITH_TBB)
windows_find_package(TBB)
if(NOT TBB_FOUND)
set(TBB_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbb.lib debug ${LIBDIR}/tbb/lib/tbb_debug.lib)
set(TBB_INCLUDE_DIR ${LIBDIR}/tbb/include)
set(TBB_INCLUDE_DIRS ${TBB_INCLUDE_DIR})
if(WITH_TBB_MALLOC_PROXY)
set(TBB_MALLOC_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbbmalloc.lib debug ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib)
add_definitions(-DWITH_TBB_MALLOC)
endif()
endif()
endif()
# used in many places so include globally, like OpenGL
include_directories(SYSTEM "${PTHREADS_INCLUDE_DIRS}")
set(WINTAB_INC ${LIBDIR}/wintab/include)
if(WITH_OPENAL)
set(OPENAL ${LIBDIR}/openal)
set(OPENALDIR ${LIBDIR}/openal)
set(OPENAL_INCLUDE_DIR ${OPENAL}/include/AL)
set(OPENAL_LIBPATH ${OPENAL}/lib)
if(MSVC)
set(OPENAL_LIBRARY ${OPENAL_LIBPATH}/openal32.lib)
else()
set(OPENAL_LIBRARY ${OPENAL_LIBPATH}/wrap_oal.lib)
endif()
endif()
if(WITH_CODEC_SNDFILE)
set(LIBSNDFILE ${LIBDIR}/sndfile)
set(LIBSNDFILE_INCLUDE_DIRS ${LIBSNDFILE}/include)
set(LIBSNDFILE_LIBPATH ${LIBSNDFILE}/lib) # TODO, deprecate
set(LIBSNDFILE_LIBRARIES ${LIBSNDFILE_LIBPATH}/libsndfile-1.lib)
endif()
if(WITH_CYCLES AND WITH_CYCLES_OSL)
set(CYCLES_OSL ${LIBDIR}/osl CACHE PATH "Path to OpenShadingLanguage installation")
set(OSL_SHADER_DIR ${CYCLES_OSL}/shaders)
# Shaders have moved around a bit between OSL versions, check multiple locations
if(NOT EXISTS "${OSL_SHADER_DIR}")
set(OSL_SHADER_DIR ${CYCLES_OSL}/share/OSL/shaders)
endif()
find_library(OSL_LIB_EXEC NAMES oslexec PATHS ${CYCLES_OSL}/lib)
find_library(OSL_LIB_COMP NAMES oslcomp PATHS ${CYCLES_OSL}/lib)
find_library(OSL_LIB_QUERY NAMES oslquery PATHS ${CYCLES_OSL}/lib)
find_library(OSL_LIB_NOISE NAMES oslnoise PATHS ${CYCLES_OSL}/lib)
find_library(OSL_LIB_EXEC_DEBUG NAMES oslexec_d PATHS ${CYCLES_OSL}/lib)
find_library(OSL_LIB_COMP_DEBUG NAMES oslcomp_d PATHS ${CYCLES_OSL}/lib)
find_library(OSL_LIB_QUERY_DEBUG NAMES oslquery_d PATHS ${CYCLES_OSL}/lib)
find_library(OSL_LIB_NOISE_DEBUG NAMES oslnoise_d PATHS ${CYCLES_OSL}/lib)
list(APPEND OSL_LIBRARIES
optimized ${OSL_LIB_COMP}
optimized ${OSL_LIB_EXEC}
optimized ${OSL_LIB_QUERY}
debug ${OSL_LIB_EXEC_DEBUG}
debug ${OSL_LIB_COMP_DEBUG}
debug ${OSL_LIB_QUERY_DEBUG}
${PUGIXML_LIBRARIES}
)
if(OSL_LIB_NOISE)
list(APPEND OSL_LIBRARIES optimized ${OSL_LIB_NOISE})
endif()
if(OSL_LIB_NOISE_DEBUG)
list(APPEND OSL_LIBRARIES debug ${OSL_LIB_NOISE_DEBUG})
endif()
find_path(OSL_INCLUDE_DIR OSL/oslclosure.h PATHS ${CYCLES_OSL}/include)
find_program(OSL_COMPILER NAMES oslc PATHS ${CYCLES_OSL}/bin)
file(STRINGS "${OSL_INCLUDE_DIR}/OSL/oslversion.h" OSL_LIBRARY_VERSION_MAJOR
REGEX "^[ \t]*#define[ \t]+OSL_LIBRARY_VERSION_MAJOR[ \t]+[0-9]+.*$")
file(STRINGS "${OSL_INCLUDE_DIR}/OSL/oslversion.h" OSL_LIBRARY_VERSION_MINOR
REGEX "^[ \t]*#define[ \t]+OSL_LIBRARY_VERSION_MINOR[ \t]+[0-9]+.*$")
string(REGEX REPLACE ".*#define[ \t]+OSL_LIBRARY_VERSION_MAJOR[ \t]+([.0-9]+).*"
"\\1" OSL_LIBRARY_VERSION_MAJOR ${OSL_LIBRARY_VERSION_MAJOR})
string(REGEX REPLACE ".*#define[ \t]+OSL_LIBRARY_VERSION_MINOR[ \t]+([.0-9]+).*"
"\\1" OSL_LIBRARY_VERSION_MINOR ${OSL_LIBRARY_VERSION_MINOR})
endif()
if(WITH_CYCLES AND WITH_CYCLES_EMBREE)
windows_find_package(Embree)
if(NOT Embree_FOUND)
set(EMBREE_INCLUDE_DIRS ${LIBDIR}/embree/include)
set(EMBREE_LIBRARIES
optimized ${LIBDIR}/embree/lib/embree3.lib
optimized ${LIBDIR}/embree/lib/embree_avx2.lib
optimized ${LIBDIR}/embree/lib/embree_avx.lib
optimized ${LIBDIR}/embree/lib/embree_sse42.lib
optimized ${LIBDIR}/embree/lib/lexers.lib
optimized ${LIBDIR}/embree/lib/math.lib
optimized ${LIBDIR}/embree/lib/simd.lib
optimized ${LIBDIR}/embree/lib/sys.lib
optimized ${LIBDIR}/embree/lib/tasking.lib
debug ${LIBDIR}/embree/lib/embree3_d.lib
debug ${LIBDIR}/embree/lib/embree_avx2_d.lib
debug ${LIBDIR}/embree/lib/embree_avx_d.lib
debug ${LIBDIR}/embree/lib/embree_sse42_d.lib
debug ${LIBDIR}/embree/lib/lexers_d.lib
debug ${LIBDIR}/embree/lib/math_d.lib
debug ${LIBDIR}/embree/lib/simd_d.lib
debug ${LIBDIR}/embree/lib/sys_d.lib
2022-08-09 13:36:45 +10:00
debug ${LIBDIR}/embree/lib/tasking_d.lib
)
endif()
endif()
USD: Introducing a simple USD Exporter This commit introduces the first version of an exporter to Pixar's Universal Scene Description (USD) format. Reviewed By: sergey, LazyDodo Differential Revision: https://developer.blender.org/D6287 - The USD libraries are built by `make deps`, but not yet built by install_deps.sh. - Only experimental support for instancing; by default all duplicated objects are made real in the USD file. This is fine for exporting a linked-in posed character, not so much for thousands of pebbles etc. - The way materials and UV coordinates and Normals are exported is going to change soon. - This patch contains LazyDodo's fixes for building on Windows in D5359. == Meshes == USD seems to support neither per-material nor per-face-group double-sidedness, so we just use the flag from the first non-empty material slot. If there is no material we default to double-sidedness. Each UV map is stored on the mesh in a separate primvar. Materials can refer to these UV maps, but this is not yet exported by Blender. The primvar name is the same as the UV Map name. This is to allow the standard name "st" for texture coordinates by naming the UV Map as such, without having to guess which UV Map is the "standard" one. Face-varying mesh normals are written to USD. When the mesh has custom loop normals those are written. Otherwise the poly flag `ME_SMOOTH` is inspected to determine the normals. The UV maps and mesh normals take up a significant amount of space, so exporting them is optional. They're still enabled by default, though. For comparison: a shot of Spring (03_035_A) is 1.2 GiB when exported with UVs and normals, and 262 MiB without. We probably have room for optimisation of written UVs and normals. The mesh subdivision scheme isn't using the default value 'Catmull Clark', but uses 'None', indicating we're exporting a polygonal mesh. This is necessary for USD to understand our normals; otherwise the mesh is always rendered smooth. In the future we may want to expose this choice of subdivision scheme to the user, or auto-detect it when we actually support exporting pre-subdivision meshes. A possible optimisation could be to inspect whether all polygons are smooth or flat, and mark the USD mesh as such. This can be added when needed. == Animation == Mesh and transform animation are now written when passing `animation=True` to the export operator. There is no inspection of whether an object is actually animated or not; USD can handle deduplication of static values for us. The administration of which timecode to use for the export is left to the file-format-specific concrete subclasses of `AbstractHierarchyIterator`; the abstract iterator itself doesn't know anything about the passage of time. This will allow subclasses for the frame-based USD format and time-based Alembic format. == Support for simple preview materials == Very simple versions of the materials are now exported, using only the viewport diffuse RGB, metallic, and roughness. When there are multiple materials, the mesh faces are stored as geometry subset and each material is assigned to the appropriate subset. If there is only one material this is skipped. The first material if any) is always applied to the mesh itself (regardless of the existence of geometry subsets), because the Hydra viewport doesn't support materials on subsets. See https://github.com/PixarAnimationStudios/USD/issues/542 for more info. Note that the geometry subsets are not yet time-sampled, so it may break when an animated mesh changes topology. Materials are exported as a flat list under a top-level '/_materials' namespace. This inhibits instancing of the objects using those materials, so this is subject to change. == Hair == Only the parent strands are exported, and only with a constant colour. No UV coordinates, no information about the normals. == Camera == Only perspective cameras are supported for now. == Particles == Particles are only written when they are alive, which means that they are always visible (there is currently no code that deals with marking them as invisible outside their lifespan). Particle-system-instanced objects are exported by suffixing the object name with the particle's persistent ID, giving each particle XForm a unique name. == Instancing/referencing == This exporter has experimental support for instancing/referencing. Dupli-object meshes are now written to USD as references to the original mesh. This is still very limited in correctness, as there are issues referencing to materials from a referenced mesh. I am still committing this, as it gives us a place to start when continuing the quest for proper instancing in USD. == Lights == USD does not directly support spot lights, so those aren't exported yet. It's possible to add this in the future via the UsdLuxShapingAPI. The units used for the light intensity are also still a bit of a mystery. == Fluid vertex velocities == Currently only fluid simulations (not meshes in general) have explicit vertex velocities. This is the most important case for exporting velocities, though, as the baked mesh changes topology all the time, and thus computing the velocities at import time in a post-processing step is hard. == The Building Process == - USD is built as monolithic library, instead of 25 smaller libraries. We were linking all of them as 'whole archive' anyway, so this doesn't affect the final file size. It does, however, make life easier with respect to linking order, and handling upstream changes. - The JSON files required by USD are installed into datafiles/usd; they are required on every platform. Set the `PXR_PATH_DEBUG` to any value to have the USD library print the paths it uses to find those files. - USD is patched so that it finds the aforementioned JSON files in a path that we pass to it from Blender. - USD is patched to have a `PXR_BUILD_USD_TOOLS` CMake option to disable building the tools in its `bin` directory. This is sent as a pull request at https://github.com/PixarAnimationStudios/USD/pull/1048
2019-12-13 10:27:40 +01:00
if(WITH_USD)
windows_find_package(USD)
if(NOT USD_FOUND)
# 3.5 22.03 libs
USD: Introducing a simple USD Exporter This commit introduces the first version of an exporter to Pixar's Universal Scene Description (USD) format. Reviewed By: sergey, LazyDodo Differential Revision: https://developer.blender.org/D6287 - The USD libraries are built by `make deps`, but not yet built by install_deps.sh. - Only experimental support for instancing; by default all duplicated objects are made real in the USD file. This is fine for exporting a linked-in posed character, not so much for thousands of pebbles etc. - The way materials and UV coordinates and Normals are exported is going to change soon. - This patch contains LazyDodo's fixes for building on Windows in D5359. == Meshes == USD seems to support neither per-material nor per-face-group double-sidedness, so we just use the flag from the first non-empty material slot. If there is no material we default to double-sidedness. Each UV map is stored on the mesh in a separate primvar. Materials can refer to these UV maps, but this is not yet exported by Blender. The primvar name is the same as the UV Map name. This is to allow the standard name "st" for texture coordinates by naming the UV Map as such, without having to guess which UV Map is the "standard" one. Face-varying mesh normals are written to USD. When the mesh has custom loop normals those are written. Otherwise the poly flag `ME_SMOOTH` is inspected to determine the normals. The UV maps and mesh normals take up a significant amount of space, so exporting them is optional. They're still enabled by default, though. For comparison: a shot of Spring (03_035_A) is 1.2 GiB when exported with UVs and normals, and 262 MiB without. We probably have room for optimisation of written UVs and normals. The mesh subdivision scheme isn't using the default value 'Catmull Clark', but uses 'None', indicating we're exporting a polygonal mesh. This is necessary for USD to understand our normals; otherwise the mesh is always rendered smooth. In the future we may want to expose this choice of subdivision scheme to the user, or auto-detect it when we actually support exporting pre-subdivision meshes. A possible optimisation could be to inspect whether all polygons are smooth or flat, and mark the USD mesh as such. This can be added when needed. == Animation == Mesh and transform animation are now written when passing `animation=True` to the export operator. There is no inspection of whether an object is actually animated or not; USD can handle deduplication of static values for us. The administration of which timecode to use for the export is left to the file-format-specific concrete subclasses of `AbstractHierarchyIterator`; the abstract iterator itself doesn't know anything about the passage of time. This will allow subclasses for the frame-based USD format and time-based Alembic format. == Support for simple preview materials == Very simple versions of the materials are now exported, using only the viewport diffuse RGB, metallic, and roughness. When there are multiple materials, the mesh faces are stored as geometry subset and each material is assigned to the appropriate subset. If there is only one material this is skipped. The first material if any) is always applied to the mesh itself (regardless of the existence of geometry subsets), because the Hydra viewport doesn't support materials on subsets. See https://github.com/PixarAnimationStudios/USD/issues/542 for more info. Note that the geometry subsets are not yet time-sampled, so it may break when an animated mesh changes topology. Materials are exported as a flat list under a top-level '/_materials' namespace. This inhibits instancing of the objects using those materials, so this is subject to change. == Hair == Only the parent strands are exported, and only with a constant colour. No UV coordinates, no information about the normals. == Camera == Only perspective cameras are supported for now. == Particles == Particles are only written when they are alive, which means that they are always visible (there is currently no code that deals with marking them as invisible outside their lifespan). Particle-system-instanced objects are exported by suffixing the object name with the particle's persistent ID, giving each particle XForm a unique name. == Instancing/referencing == This exporter has experimental support for instancing/referencing. Dupli-object meshes are now written to USD as references to the original mesh. This is still very limited in correctness, as there are issues referencing to materials from a referenced mesh. I am still committing this, as it gives us a place to start when continuing the quest for proper instancing in USD. == Lights == USD does not directly support spot lights, so those aren't exported yet. It's possible to add this in the future via the UsdLuxShapingAPI. The units used for the light intensity are also still a bit of a mystery. == Fluid vertex velocities == Currently only fluid simulations (not meshes in general) have explicit vertex velocities. This is the most important case for exporting velocities, though, as the baked mesh changes topology all the time, and thus computing the velocities at import time in a post-processing step is hard. == The Building Process == - USD is built as monolithic library, instead of 25 smaller libraries. We were linking all of them as 'whole archive' anyway, so this doesn't affect the final file size. It does, however, make life easier with respect to linking order, and handling upstream changes. - The JSON files required by USD are installed into datafiles/usd; they are required on every platform. Set the `PXR_PATH_DEBUG` to any value to have the USD library print the paths it uses to find those files. - USD is patched so that it finds the aforementioned JSON files in a path that we pass to it from Blender. - USD is patched to have a `PXR_BUILD_USD_TOOLS` CMake option to disable building the tools in its `bin` directory. This is sent as a pull request at https://github.com/PixarAnimationStudios/USD/pull/1048
2019-12-13 10:27:40 +01:00
set(USD_INCLUDE_DIRS ${LIBDIR}/usd/include)
set(USD_RELEASE_LIB ${LIBDIR}/usd/lib/usd_usd_ms.lib)
set(USD_DEBUG_LIB ${LIBDIR}/usd/lib/usd_usd_ms_d.lib)
set(USD_LIBRARY_DIR ${LIBDIR}/usd/lib)
if(NOT EXISTS "${USD_RELEASE_LIB}") # 3.5 22.11 libs
set(USD_RELEASE_LIB ${LIBDIR}/usd/lib/usd_ms.lib)
set(USD_DEBUG_LIB ${LIBDIR}/usd/lib/usd_ms_d.lib)
endif()
# Older USD had different filenames, if the new ones are
# not found see if the older ones exist, to ease the
# transition period while landing libs.
if(NOT EXISTS "${USD_RELEASE_LIB}") # 3.3 static libs
set(USD_RELEASE_LIB ${LIBDIR}/usd/lib/usd_usd_m.lib)
set(USD_DEBUG_LIB ${LIBDIR}/usd/lib/usd_usd_m_d.lib)
endif()
USD: Introducing a simple USD Exporter This commit introduces the first version of an exporter to Pixar's Universal Scene Description (USD) format. Reviewed By: sergey, LazyDodo Differential Revision: https://developer.blender.org/D6287 - The USD libraries are built by `make deps`, but not yet built by install_deps.sh. - Only experimental support for instancing; by default all duplicated objects are made real in the USD file. This is fine for exporting a linked-in posed character, not so much for thousands of pebbles etc. - The way materials and UV coordinates and Normals are exported is going to change soon. - This patch contains LazyDodo's fixes for building on Windows in D5359. == Meshes == USD seems to support neither per-material nor per-face-group double-sidedness, so we just use the flag from the first non-empty material slot. If there is no material we default to double-sidedness. Each UV map is stored on the mesh in a separate primvar. Materials can refer to these UV maps, but this is not yet exported by Blender. The primvar name is the same as the UV Map name. This is to allow the standard name "st" for texture coordinates by naming the UV Map as such, without having to guess which UV Map is the "standard" one. Face-varying mesh normals are written to USD. When the mesh has custom loop normals those are written. Otherwise the poly flag `ME_SMOOTH` is inspected to determine the normals. The UV maps and mesh normals take up a significant amount of space, so exporting them is optional. They're still enabled by default, though. For comparison: a shot of Spring (03_035_A) is 1.2 GiB when exported with UVs and normals, and 262 MiB without. We probably have room for optimisation of written UVs and normals. The mesh subdivision scheme isn't using the default value 'Catmull Clark', but uses 'None', indicating we're exporting a polygonal mesh. This is necessary for USD to understand our normals; otherwise the mesh is always rendered smooth. In the future we may want to expose this choice of subdivision scheme to the user, or auto-detect it when we actually support exporting pre-subdivision meshes. A possible optimisation could be to inspect whether all polygons are smooth or flat, and mark the USD mesh as such. This can be added when needed. == Animation == Mesh and transform animation are now written when passing `animation=True` to the export operator. There is no inspection of whether an object is actually animated or not; USD can handle deduplication of static values for us. The administration of which timecode to use for the export is left to the file-format-specific concrete subclasses of `AbstractHierarchyIterator`; the abstract iterator itself doesn't know anything about the passage of time. This will allow subclasses for the frame-based USD format and time-based Alembic format. == Support for simple preview materials == Very simple versions of the materials are now exported, using only the viewport diffuse RGB, metallic, and roughness. When there are multiple materials, the mesh faces are stored as geometry subset and each material is assigned to the appropriate subset. If there is only one material this is skipped. The first material if any) is always applied to the mesh itself (regardless of the existence of geometry subsets), because the Hydra viewport doesn't support materials on subsets. See https://github.com/PixarAnimationStudios/USD/issues/542 for more info. Note that the geometry subsets are not yet time-sampled, so it may break when an animated mesh changes topology. Materials are exported as a flat list under a top-level '/_materials' namespace. This inhibits instancing of the objects using those materials, so this is subject to change. == Hair == Only the parent strands are exported, and only with a constant colour. No UV coordinates, no information about the normals. == Camera == Only perspective cameras are supported for now. == Particles == Particles are only written when they are alive, which means that they are always visible (there is currently no code that deals with marking them as invisible outside their lifespan). Particle-system-instanced objects are exported by suffixing the object name with the particle's persistent ID, giving each particle XForm a unique name. == Instancing/referencing == This exporter has experimental support for instancing/referencing. Dupli-object meshes are now written to USD as references to the original mesh. This is still very limited in correctness, as there are issues referencing to materials from a referenced mesh. I am still committing this, as it gives us a place to start when continuing the quest for proper instancing in USD. == Lights == USD does not directly support spot lights, so those aren't exported yet. It's possible to add this in the future via the UsdLuxShapingAPI. The units used for the light intensity are also still a bit of a mystery. == Fluid vertex velocities == Currently only fluid simulations (not meshes in general) have explicit vertex velocities. This is the most important case for exporting velocities, though, as the baked mesh changes topology all the time, and thus computing the velocities at import time in a post-processing step is hard. == The Building Process == - USD is built as monolithic library, instead of 25 smaller libraries. We were linking all of them as 'whole archive' anyway, so this doesn't affect the final file size. It does, however, make life easier with respect to linking order, and handling upstream changes. - The JSON files required by USD are installed into datafiles/usd; they are required on every platform. Set the `PXR_PATH_DEBUG` to any value to have the USD library print the paths it uses to find those files. - USD is patched so that it finds the aforementioned JSON files in a path that we pass to it from Blender. - USD is patched to have a `PXR_BUILD_USD_TOOLS` CMake option to disable building the tools in its `bin` directory. This is sent as a pull request at https://github.com/PixarAnimationStudios/USD/pull/1048
2019-12-13 10:27:40 +01:00
set(USD_LIBRARIES
2020-03-04 10:47:24 +11:00
debug ${USD_DEBUG_LIB}
optimized ${USD_RELEASE_LIB}
USD: Introducing a simple USD Exporter This commit introduces the first version of an exporter to Pixar's Universal Scene Description (USD) format. Reviewed By: sergey, LazyDodo Differential Revision: https://developer.blender.org/D6287 - The USD libraries are built by `make deps`, but not yet built by install_deps.sh. - Only experimental support for instancing; by default all duplicated objects are made real in the USD file. This is fine for exporting a linked-in posed character, not so much for thousands of pebbles etc. - The way materials and UV coordinates and Normals are exported is going to change soon. - This patch contains LazyDodo's fixes for building on Windows in D5359. == Meshes == USD seems to support neither per-material nor per-face-group double-sidedness, so we just use the flag from the first non-empty material slot. If there is no material we default to double-sidedness. Each UV map is stored on the mesh in a separate primvar. Materials can refer to these UV maps, but this is not yet exported by Blender. The primvar name is the same as the UV Map name. This is to allow the standard name "st" for texture coordinates by naming the UV Map as such, without having to guess which UV Map is the "standard" one. Face-varying mesh normals are written to USD. When the mesh has custom loop normals those are written. Otherwise the poly flag `ME_SMOOTH` is inspected to determine the normals. The UV maps and mesh normals take up a significant amount of space, so exporting them is optional. They're still enabled by default, though. For comparison: a shot of Spring (03_035_A) is 1.2 GiB when exported with UVs and normals, and 262 MiB without. We probably have room for optimisation of written UVs and normals. The mesh subdivision scheme isn't using the default value 'Catmull Clark', but uses 'None', indicating we're exporting a polygonal mesh. This is necessary for USD to understand our normals; otherwise the mesh is always rendered smooth. In the future we may want to expose this choice of subdivision scheme to the user, or auto-detect it when we actually support exporting pre-subdivision meshes. A possible optimisation could be to inspect whether all polygons are smooth or flat, and mark the USD mesh as such. This can be added when needed. == Animation == Mesh and transform animation are now written when passing `animation=True` to the export operator. There is no inspection of whether an object is actually animated or not; USD can handle deduplication of static values for us. The administration of which timecode to use for the export is left to the file-format-specific concrete subclasses of `AbstractHierarchyIterator`; the abstract iterator itself doesn't know anything about the passage of time. This will allow subclasses for the frame-based USD format and time-based Alembic format. == Support for simple preview materials == Very simple versions of the materials are now exported, using only the viewport diffuse RGB, metallic, and roughness. When there are multiple materials, the mesh faces are stored as geometry subset and each material is assigned to the appropriate subset. If there is only one material this is skipped. The first material if any) is always applied to the mesh itself (regardless of the existence of geometry subsets), because the Hydra viewport doesn't support materials on subsets. See https://github.com/PixarAnimationStudios/USD/issues/542 for more info. Note that the geometry subsets are not yet time-sampled, so it may break when an animated mesh changes topology. Materials are exported as a flat list under a top-level '/_materials' namespace. This inhibits instancing of the objects using those materials, so this is subject to change. == Hair == Only the parent strands are exported, and only with a constant colour. No UV coordinates, no information about the normals. == Camera == Only perspective cameras are supported for now. == Particles == Particles are only written when they are alive, which means that they are always visible (there is currently no code that deals with marking them as invisible outside their lifespan). Particle-system-instanced objects are exported by suffixing the object name with the particle's persistent ID, giving each particle XForm a unique name. == Instancing/referencing == This exporter has experimental support for instancing/referencing. Dupli-object meshes are now written to USD as references to the original mesh. This is still very limited in correctness, as there are issues referencing to materials from a referenced mesh. I am still committing this, as it gives us a place to start when continuing the quest for proper instancing in USD. == Lights == USD does not directly support spot lights, so those aren't exported yet. It's possible to add this in the future via the UsdLuxShapingAPI. The units used for the light intensity are also still a bit of a mystery. == Fluid vertex velocities == Currently only fluid simulations (not meshes in general) have explicit vertex velocities. This is the most important case for exporting velocities, though, as the baked mesh changes topology all the time, and thus computing the velocities at import time in a post-processing step is hard. == The Building Process == - USD is built as monolithic library, instead of 25 smaller libraries. We were linking all of them as 'whole archive' anyway, so this doesn't affect the final file size. It does, however, make life easier with respect to linking order, and handling upstream changes. - The JSON files required by USD are installed into datafiles/usd; they are required on every platform. Set the `PXR_PATH_DEBUG` to any value to have the USD library print the paths it uses to find those files. - USD is patched so that it finds the aforementioned JSON files in a path that we pass to it from Blender. - USD is patched to have a `PXR_BUILD_USD_TOOLS` CMake option to disable building the tools in its `bin` directory. This is sent as a pull request at https://github.com/PixarAnimationStudios/USD/pull/1048
2019-12-13 10:27:40 +01:00
)
endif()
endif()
2019-06-19 07:24:55 +10:00
if(WINDOWS_PYTHON_DEBUG)
# Include the system scripts in the blender_python_system_scripts project.
file(GLOB_RECURSE inFiles "${CMAKE_SOURCE_DIR}/release/scripts/*.*" )
add_custom_target(blender_python_system_scripts SOURCES ${inFiles})
foreach(_source IN ITEMS ${inFiles})
get_filename_component(_source_path "${_source}" PATH)
string(REPLACE "${CMAKE_SOURCE_DIR}/release/scripts/" "" _source_path "${_source_path}")
string(REPLACE "/" "\\" _group_path "${_source_path}")
source_group("${_group_path}" FILES "${_source}")
endforeach()
2020-12-11 14:59:08 +11:00
# If the user scripts env var is set, include scripts from there otherwise
# include user scripts in the profile folder.
if(DEFINED ENV{BLENDER_USER_SCRIPTS})
message(STATUS "Including user scripts from environment BLENDER_USER_SCRIPTS=$ENV{BLENDER_USER_SCRIPTS}")
set(USER_SCRIPTS_ROOT "$ENV{BLENDER_USER_SCRIPTS}")
else()
message(STATUS "Including user scripts from the profile folder")
# Include the user scripts from the profile folder in the blender_python_user_scripts project.
set(USER_SCRIPTS_ROOT "$ENV{appdata}/blender foundation/blender/${BLENDER_VERSION}/scripts")
endif()
2020-12-11 14:59:08 +11:00
file(TO_CMAKE_PATH ${USER_SCRIPTS_ROOT} USER_SCRIPTS_ROOT)
file(GLOB_RECURSE inFiles "${USER_SCRIPTS_ROOT}/*.*" )
add_custom_target(blender_python_user_scripts SOURCES ${inFiles})
foreach(_source IN ITEMS ${inFiles})
get_filename_component(_source_path "${_source}" PATH)
string(REPLACE "${USER_SCRIPTS_ROOT}" "" _source_path "${_source_path}")
string(REPLACE "/" "\\" _group_path "${_source_path}")
source_group("${_group_path}" FILES "${_source}")
endforeach()
set_target_properties(blender_python_system_scripts PROPERTIES FOLDER "scripts")
set_target_properties(blender_python_user_scripts PROPERTIES FOLDER "scripts")
# Set the default debugging options for the project, only write this file once so the user
# is free to override them at their own perril.
set(USER_PROPS_FILE "${CMAKE_CURRENT_BINARY_DIR}/source/creator/blender.Cpp.user.props")
if(NOT EXISTS ${USER_PROPS_FILE})
# Layout below is messy, because otherwise the generated file will look messy.
file(WRITE ${USER_PROPS_FILE} "<?xml version=\"1.0\" encoding=\"utf-8\"?>
<Project DefaultTargets=\"Build\" xmlns=\"http://schemas.microsoft.com/developer/msbuild/2003\">
<PropertyGroup>
<LocalDebuggerCommandArguments>-con --env-system-scripts \"${CMAKE_SOURCE_DIR}/release/scripts\" </LocalDebuggerCommandArguments>
</PropertyGroup>
</Project>")
endif()
endif()
Build System: Add OpenXR-SDK dependency and WITH_XR_OPENXR build option The OpenXR-SDK contains utilities for using the OpenXR standard (https://www.khronos.org/openxr/). Namely C-headers and a so called "loader" to manage runtime linking to OpenXR platforms ("runtimes") installed on the user's system. The WITH_XR_OPENXR build option is disabled by default for now, as there is no code using it yet. On macOS it will remain disabled for now, it's untested and there's no OpenXR runtime in sight for it. Some points on the OpenXR-SDK dependency: * The repository is located at https://github.com/KhronosGroup/OpenXR-SDK (Apache 2). * Notes on updating the dependency: https://wiki.blender.org/wiki/Source/OpenXR_SDK_Dependency * It contains a bunch of generated files, for which the sources are in a separate repository (https://github.com/KhronosGroup/OpenXR-SDK-Source). * We could use that other repo by default, but I'd rather go with the simpler solution and allow people to opt in if they want advanced dev features. * We currently use the OpenXR loader lib from it and the headers. * To use the injected OpenXR API-layers from the SDK (e.g. API validation layers), the SDK needs to be compiled from this other repository. The extra "XR_" prefix in the build option is to avoid mix-ups of OpenXR with OpenEXR. Most of this comes from the 2019 GSoC project, "Core Support of Virtual Reality Headsets through OpenXR" (https://wiki.blender.org/wiki/User:Severin/GSoC-2019/). Differential Revision: https://developer.blender.org/D6188 Reviewed by: Campbell Barton, Sergey Sharybin, Bastien Montagne, Ray Molenkamp
2020-03-04 16:39:00 +01:00
if(WITH_XR_OPENXR)
set(XR_OPENXR_SDK ${LIBDIR}/xr_openxr_sdk)
set(XR_OPENXR_SDK_LIBPATH ${LIBDIR}/xr_openxr_sdk/lib)
set(XR_OPENXR_SDK_INCLUDE_DIR ${XR_OPENXR_SDK}/include)
# This is the old name of this library, it is checked to
# support the transition between the old and new lib versions
# this can be removed after the next lib update.
if(EXISTS ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loader_d.lib)
Build System: Add OpenXR-SDK dependency and WITH_XR_OPENXR build option The OpenXR-SDK contains utilities for using the OpenXR standard (https://www.khronos.org/openxr/). Namely C-headers and a so called "loader" to manage runtime linking to OpenXR platforms ("runtimes") installed on the user's system. The WITH_XR_OPENXR build option is disabled by default for now, as there is no code using it yet. On macOS it will remain disabled for now, it's untested and there's no OpenXR runtime in sight for it. Some points on the OpenXR-SDK dependency: * The repository is located at https://github.com/KhronosGroup/OpenXR-SDK (Apache 2). * Notes on updating the dependency: https://wiki.blender.org/wiki/Source/OpenXR_SDK_Dependency * It contains a bunch of generated files, for which the sources are in a separate repository (https://github.com/KhronosGroup/OpenXR-SDK-Source). * We could use that other repo by default, but I'd rather go with the simpler solution and allow people to opt in if they want advanced dev features. * We currently use the OpenXR loader lib from it and the headers. * To use the injected OpenXR API-layers from the SDK (e.g. API validation layers), the SDK needs to be compiled from this other repository. The extra "XR_" prefix in the build option is to avoid mix-ups of OpenXR with OpenEXR. Most of this comes from the 2019 GSoC project, "Core Support of Virtual Reality Headsets through OpenXR" (https://wiki.blender.org/wiki/User:Severin/GSoC-2019/). Differential Revision: https://developer.blender.org/D6188 Reviewed by: Campbell Barton, Sergey Sharybin, Bastien Montagne, Ray Molenkamp
2020-03-04 16:39:00 +01:00
else()
set(XR_OPENXR_SDK_LIBRARIES optimized ${XR_OPENXR_SDK_LIBPATH}/openxr_loader.lib debug ${XR_OPENXR_SDK_LIBPATH}/openxr_loaderd.lib)
Build System: Add OpenXR-SDK dependency and WITH_XR_OPENXR build option The OpenXR-SDK contains utilities for using the OpenXR standard (https://www.khronos.org/openxr/). Namely C-headers and a so called "loader" to manage runtime linking to OpenXR platforms ("runtimes") installed on the user's system. The WITH_XR_OPENXR build option is disabled by default for now, as there is no code using it yet. On macOS it will remain disabled for now, it's untested and there's no OpenXR runtime in sight for it. Some points on the OpenXR-SDK dependency: * The repository is located at https://github.com/KhronosGroup/OpenXR-SDK (Apache 2). * Notes on updating the dependency: https://wiki.blender.org/wiki/Source/OpenXR_SDK_Dependency * It contains a bunch of generated files, for which the sources are in a separate repository (https://github.com/KhronosGroup/OpenXR-SDK-Source). * We could use that other repo by default, but I'd rather go with the simpler solution and allow people to opt in if they want advanced dev features. * We currently use the OpenXR loader lib from it and the headers. * To use the injected OpenXR API-layers from the SDK (e.g. API validation layers), the SDK needs to be compiled from this other repository. The extra "XR_" prefix in the build option is to avoid mix-ups of OpenXR with OpenEXR. Most of this comes from the 2019 GSoC project, "Core Support of Virtual Reality Headsets through OpenXR" (https://wiki.blender.org/wiki/User:Severin/GSoC-2019/). Differential Revision: https://developer.blender.org/D6188 Reviewed by: Campbell Barton, Sergey Sharybin, Bastien Montagne, Ray Molenkamp
2020-03-04 16:39:00 +01:00
endif()
endif()
if(WITH_GMP)
set(GMP_INCLUDE_DIRS ${LIBDIR}/gmp/include)
set(GMP_LIBRARIES ${LIBDIR}/gmp/lib/libgmp-10.lib optimized ${LIBDIR}/gmp/lib/libgmpxx.lib debug ${LIBDIR}/gmp/lib/libgmpxx_d.lib)
set(GMP_ROOT_DIR ${LIBDIR}/gmp)
2021-11-04 14:50:24 +11:00
set(GMP_FOUND ON)
endif()
if(WITH_POTRACE)
set(POTRACE_INCLUDE_DIRS ${LIBDIR}/potrace/include)
set(POTRACE_LIBRARIES ${LIBDIR}/potrace/lib/potrace.lib)
2021-11-04 14:50:24 +11:00
set(POTRACE_FOUND ON)
endif()
if(WITH_HARU)
set(HARU_FOUND ON)
set(HARU_ROOT_DIR ${LIBDIR}/haru)
set(HARU_INCLUDE_DIRS ${HARU_ROOT_DIR}/include)
set(HARU_LIBRARIES ${HARU_ROOT_DIR}/lib/libhpdfs.lib)
endif()
Add support for Zstandard compression for .blend files Compressing blendfiles can help save a lot of disk space, but the slowdown while loading and saving is a major annoyance. Currently Blender uses Zlib (aka gzip aka Deflate) for compression, but there are now several more modern algorithms that outperform it in every way. In this patch, I decided for Zstandard aka Zstd for several reasons: - It is widely supported, both in other programs and libraries as well as in general-purpose compression utilities on Unix - It is extremely flexible - spanning several orders of magnitude of compression speeds depending on the level setting. - It is pretty much on the Pareto frontier for all of its configurations (meaning that no other algorithm is both faster and more efficient). One downside of course is that older versions of Blender will not be able to read these files, but one can always just re-save them without compression or decompress the file manually with an external tool. The implementation here saves additional metadata into the compressed file in order to allow for efficient seeking when loading. This is standard-compliant and will be ignored by other tools that support Zstd. If the metadata is not present (e.g. because you manually compressed a .blend file with another tool), Blender will fall back to sequential reading. Saving is multithreaded to improve performance. Loading is currently not multithreaded since it's not easy to predict the access patterns of the loading code when seeking is supported. In the future, we might want to look into making this more predictable or disabling seeking for the main .blend file, which would then allow for multiple background threads that decompress data ahead of time. The compression level was chosen to get sizes comparable to previous versions at much higher speeds. In the future, this could be exposed as an option. Reviewed By: campbellbarton, brecht, mont29 Differential Revision: https://developer.blender.org/D5799
2021-08-21 03:15:31 +02:00
if(WITH_VULKAN_BACKEND)
if(EXISTS ${LIBDIR}/vulkan)
set(VULKAN_FOUND On)
set(VULKAN_ROOT_DIR ${LIBDIR}/vulkan)
set(VULKAN_INCLUDE_DIR ${VULKAN_ROOT_DIR}/include)
set(VULKAN_INCLUDE_DIRS ${VULKAN_INCLUDE_DIR})
set(VULKAN_LIBRARY ${VULKAN_ROOT_DIR}/lib/vulkan-1.lib)
set(VULKAN_LIBRARIES ${VULKAN_LIBRARY})
else()
message(WARNING "Vulkan SDK was not found, disabling WITH_VULKAN_BACKEND")
set(WITH_VULKAN_BACKEND OFF)
endif()
endif()
if(WITH_CYCLES AND WITH_CYCLES_PATH_GUIDING)
find_package(openpgl QUIET)
if(openpgl_FOUND)
get_target_property(OPENPGL_LIBRARIES_RELEASE openpgl::openpgl LOCATION_RELEASE)
get_target_property(OPENPGL_LIBRARIES_DEBUG openpgl::openpgl LOCATION_DEBUG)
set(OPENPGL_LIBRARIES optimized ${OPENPGL_LIBRARIES_RELEASE} debug ${OPENPGL_LIBRARIES_DEBUG})
get_target_property(OPENPGL_INCLUDE_DIR openpgl::openpgl INTERFACE_INCLUDE_DIRECTORIES)
else()
set(WITH_CYCLES_PATH_GUIDING OFF)
message(STATUS "OpenPGL not found, disabling WITH_CYCLES_PATH_GUIDING")
endif()
endif()
Add support for Zstandard compression for .blend files Compressing blendfiles can help save a lot of disk space, but the slowdown while loading and saving is a major annoyance. Currently Blender uses Zlib (aka gzip aka Deflate) for compression, but there are now several more modern algorithms that outperform it in every way. In this patch, I decided for Zstandard aka Zstd for several reasons: - It is widely supported, both in other programs and libraries as well as in general-purpose compression utilities on Unix - It is extremely flexible - spanning several orders of magnitude of compression speeds depending on the level setting. - It is pretty much on the Pareto frontier for all of its configurations (meaning that no other algorithm is both faster and more efficient). One downside of course is that older versions of Blender will not be able to read these files, but one can always just re-save them without compression or decompress the file manually with an external tool. The implementation here saves additional metadata into the compressed file in order to allow for efficient seeking when loading. This is standard-compliant and will be ignored by other tools that support Zstd. If the metadata is not present (e.g. because you manually compressed a .blend file with another tool), Blender will fall back to sequential reading. Saving is multithreaded to improve performance. Loading is currently not multithreaded since it's not easy to predict the access patterns of the loading code when seeking is supported. In the future, we might want to look into making this more predictable or disabling seeking for the main .blend file, which would then allow for multiple background threads that decompress data ahead of time. The compression level was chosen to get sizes comparable to previous versions at much higher speeds. In the future, this could be exposed as an option. Reviewed By: campbellbarton, brecht, mont29 Differential Revision: https://developer.blender.org/D5799
2021-08-21 03:15:31 +02:00
set(ZSTD_INCLUDE_DIRS ${LIBDIR}/zstd/include)
set(ZSTD_LIBRARIES ${LIBDIR}/zstd/lib/zstd_static.lib)
2022-06-29 12:58:04 +02:00
if(WITH_CYCLES AND WITH_CYCLES_DEVICE_ONEAPI)
set(LEVEL_ZERO_ROOT_DIR ${LIBDIR}/level_zero)
set(CYCLES_SYCL ${LIBDIR}/dpcpp CACHE PATH "Path to oneAPI DPC++ compiler")
if(EXISTS ${CYCLES_SYCL} AND NOT SYCL_ROOT_DIR)
set(SYCL_ROOT_DIR ${CYCLES_SYCL})
endif()
file(GLOB _sycl_runtime_libraries_glob
${SYCL_ROOT_DIR}/bin/sycl.dll
${SYCL_ROOT_DIR}/bin/sycl[0-9].dll
)
foreach(sycl_runtime_library IN LISTS _sycl_runtime_libraries_glob)
string(REPLACE ".dll" "$<$<CONFIG:Debug>:d>.dll" sycl_runtime_library ${sycl_runtime_library})
list(APPEND _sycl_runtime_libraries ${sycl_runtime_library})
endforeach()
unset(_sycl_runtime_libraries_glob)
file(GLOB _sycl_pi_runtime_libraries_glob
${SYCL_ROOT_DIR}/bin/pi_*.dll
)
list(REMOVE_ITEM _sycl_pi_runtime_libraries_glob "${SYCL_ROOT_DIR}/bin/pi_opencl.dll")
2022-12-17 13:30:07 +11:00
list(APPEND _sycl_runtime_libraries ${_sycl_pi_runtime_libraries_glob})
unset(_sycl_pi_runtime_libraries_glob)
list(APPEND PLATFORM_BUNDLED_LIBRARIES ${_sycl_runtime_libraries})
unset(_sycl_runtime_libraries)
endif()
# Environment variables to run precompiled executables that needed libraries.
list(JOIN PLATFORM_BUNDLED_LIBRARY_DIRS ";" _library_paths)
set(PLATFORM_ENV_BUILD_DIRS "${LIBDIR}/OpenImageIO/bin\;${LIBDIR}/boost/lib\;${LIBDIR}/openexr/bin\;${LIBDIR}/imath/bin\;${PATH}")
set(PLATFORM_ENV_BUILD "PATH=${PLATFORM_ENV_BUILD_DIRS}")
# Install needs the additional folders from PLATFORM_ENV_BUILD_DIRS as well, as tools like idiff and abcls use the release mode dlls
set(PLATFORM_ENV_INSTALL "PATH=${CMAKE_INSTALL_PREFIX_WITH_CONFIG}/blender.shared/\;${PLATFORM_ENV_BUILD_DIRS}\;$ENV{PATH}")
unset(_library_paths)