1
1

Compare commits

..

3 Commits

Author SHA1 Message Date
14fdcbe000 Setting object position is propagated to the physics simulation even for dynmamic objects.
The velocity is also reset to 0, which makes sense after forcing a dynamic object position.
Known issues:
- in interactive mode, changing an object position by rna causes a simulation step
- setting any part of the object position or orientation causes the full matrix to be applied to physics => missing true rigid body API
2018-10-19 10:08:59 +02:00
ad21793c69 Interactive mode: new scene.flag SCE_INTERACTIVE to update the depsgraph without frame change but running physics.
Interactive button added on time line.
2018-10-11 14:06:34 +02:00
b389f1e957 Fix bug with transformed object interfering with rigid body simulation.
Note: bug was caused by use of legacy ob->flag & SELECT. New OB_PHYS_MOVING
      flag to mark that an active object is being transformed.

Make active object position update independent of cache write in preparation
of interactive physics.
2018-10-10 18:13:34 +02:00
6119 changed files with 263522 additions and 168357 deletions

View File

@@ -1,7 +1,7 @@
{
"project_id" : "Blender",
"conduit_uri" : "https://developer.blender.org/",
"git.default-relative-commit" : "origin/master",
"git.default-relative-commit" : "origin/blender2.8",
"arc.land.update.default" : "rebase",
"arc.land.onto.default" : "master"
"arc.land.onto.default" : "blender2.8"
}

4
.gitignore vendored
View File

@@ -11,10 +11,6 @@ __pycache__/
*.swo
*#
# Indexes for emacs, vi & others
TAGS
tags
# QtCreator
CMakeLists.txt.user

2
.gitmodules vendored
View File

@@ -1,7 +1,7 @@
[submodule "release/scripts/addons"]
path = release/scripts/addons
url = ../blender-addons.git
branch = master
branch = blender2.8
ignore = all
[submodule "release/scripts/addons_contrib"]
path = release/scripts/addons_contrib

View File

@@ -16,6 +16,11 @@
#
# The Original Code is Copyright (C) 2006, Blender Foundation
# All rights reserved.
#
# The Original Code is: all of this file.
#
# Contributor(s): Jacques Beaurain.
#
# ***** END GPL LICENSE BLOCK *****
#-----------------------------------------------------------------------------
@@ -42,13 +47,6 @@ endif()
cmake_minimum_required(VERSION 3.5)
# Prever LEGACY OpenGL to eb compatible with all the existing releases and
# platforms which don't hare GLVND yet. Only do it if preference was not set
# externally.
if(NOT DEFINED OpenGL_GL_PREFERENCE)
set(OpenGL_GL_PREFERENCE "LEGACY")
endif()
if(NOT EXECUTABLE_OUTPUT_PATH)
set(FIRST_RUN TRUE)
else()
@@ -159,6 +157,7 @@ option_defaults_init(
_init_BUILDINFO
_init_CODEC_FFMPEG
_init_CYCLES_OSL
_init_CYCLES_OPENSUBDIV
_init_IMAGE_OPENEXR
_init_INPUT_NDOF
_init_JACK
@@ -175,6 +174,7 @@ if(UNIX AND NOT APPLE)
# disable less important dependencies by default
set(_init_CODEC_FFMPEG OFF)
set(_init_CYCLES_OSL OFF)
set(_init_CYCLES_OPENSUBDIV OFF)
set(_init_IMAGE_OPENEXR OFF)
set(_init_JACK OFF)
set(_init_OPENCOLLADA OFF)
@@ -186,6 +186,7 @@ elseif(WIN32)
set(_init_JACK OFF)
elseif(APPLE)
set(_init_JACK OFF)
set(_init_OPENSUBDIV OFF)
endif()
@@ -238,7 +239,10 @@ option(WITH_OPENCOLORIO "Enable OpenColorIO color management" ${_init_OPENCOLO
# Compositor
option(WITH_COMPOSITOR "Enable the tile based nodal compositor" ON)
option(WITH_OPENSUBDIV "Enable OpenSubdiv for surface subdivision" ${_init_OPENSUBDIV})
option(WITH_OPENSUBDIV "Enable OpenSubdiv for surface subdivision" _init_OPENSUBDIV)
option(WITH_OPENSUBDIV_MODIFIER "Use OpenSubdiv for CPU side of Subsurf/Multires modifiers" OFF)
mark_as_advanced(WITH_OPENSUBDIV_MODIFIER)
option(WITH_OPENVDB "Enable features relying on OpenVDB" OFF)
option(WITH_OPENVDB_BLOSC "Enable blosc compression for OpenVDB, only enable if OpenVDB was built with blosc support" OFF)
@@ -401,12 +405,12 @@ option(WITH_CYCLES "Enable Cycles Render Engine" ON)
option(WITH_CYCLES_STANDALONE "Build Cycles standalone application" OFF)
option(WITH_CYCLES_STANDALONE_GUI "Build Cycles standalone with GUI" OFF)
option(WITH_CYCLES_OSL "Build Cycles with OSL support" ${_init_CYCLES_OSL})
option(WITH_CYCLES_EMBREE "Build Cycles with Embree support" OFF)
option(WITH_CYCLES_OPENSUBDIV "Build Cycles with OpenSubdiv support" ${_init_CYCLES_OPENSUBDIV})
option(WITH_CYCLES_CUDA_BINARIES "Build Cycles CUDA binaries" OFF)
option(WITH_CYCLES_CUBIN_COMPILER "Build cubins with nvrtc based compiler instead of nvcc" OFF)
option(WITH_CYCLES_CUDA_BUILD_SERIAL "Build cubins one after another (useful on machines with limited RAM)" OFF)
mark_as_advanced(WITH_CYCLES_CUDA_BUILD_SERIAL)
set(CYCLES_CUDA_BINARIES_ARCH sm_30 sm_35 sm_37 sm_50 sm_52 sm_60 sm_61 sm_70 sm_75 CACHE STRING "CUDA architectures to build binaries for")
set(CYCLES_CUDA_BINARIES_ARCH sm_30 sm_35 sm_37 sm_50 sm_52 sm_60 sm_61 CACHE STRING "CUDA architectures to build binaries for")
mark_as_advanced(CYCLES_CUDA_BINARIES_ARCH)
unset(PLATFORM_DEFAULT)
option(WITH_CYCLES_LOGGING "Build Cycles with logging support" ON)
@@ -563,9 +567,6 @@ if(WIN32)
option(WINDOWS_USE_VISUAL_STUDIO_FOLDERS "Organize the visual studio project according to source folders." ON)
mark_as_advanced(WINDOWS_USE_VISUAL_STUDIO_FOLDERS)
option(WINDOWS_PYTHON_DEBUG "Include the files needed for debugging python scripts with visual studio 2017+." OFF)
mark_as_advanced(WINDOWS_PYTHON_DEBUG)
endif()
# avoid using again
@@ -758,8 +759,8 @@ if(WITH_PYTHON)
# Do this before main 'platform_*' checks,
# because UNIX will search for the old Python paths which may not exist.
# giving errors about missing paths before this case is met.
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.7")
message(FATAL_ERROR "At least Python 3.7 is required to build")
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.6")
message(FATAL_ERROR "At least Python 3.6 is required to build")
endif()
if(NOT EXISTS "${CMAKE_SOURCE_DIR}/release/scripts/addons/modules")
@@ -1052,11 +1053,8 @@ if(WITH_GL_PROFILE_ES20)
endif()
else()
if(OpenGL_GL_PREFERENCE STREQUAL "LEGACY" AND OPENGL_gl_LIBRARY)
list(APPEND BLENDER_GL_LIBRARIES ${OPENGL_gl_LIBRARY})
else()
list(APPEND BLENDER_GL_LIBRARIES ${OPENGL_opengl_LIBRARY} ${OPENGL_glx_LIBRARY})
endif()
list(APPEND BLENDER_GL_LIBRARIES "${OPENGL_gl_LIBRARY}")
endif()
if(WITH_GL_EGL)
@@ -1117,10 +1115,7 @@ endif()
#-----------------------------------------------------------------------------
# Configure OpenMP.
if(WITH_OPENMP)
if(NOT OPENMP_CUSTOM)
find_package(OpenMP)
endif()
find_package(OpenMP)
if(OPENMP_FOUND)
if(NOT WITH_OPENMP_STATIC)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}")
@@ -1466,11 +1461,6 @@ elseif(CMAKE_C_COMPILER_ID MATCHES "MSVC")
"/we4431" # missing type specifier - int assumed
)
if(MSVC_VERSION GREATER_EQUAL 1911)
# see https://docs.microsoft.com/en-us/cpp/error-messages/compiler-warnings/c5038?view=vs-2017
set(_WARNINGS "${_WARNINGS} /w35038") #order of initialisation in c++ constructors
endif()
string(REPLACE ";" " " _WARNINGS "${_WARNINGS}")
set(C_WARNINGS "${_WARNINGS}")
set(CXX_WARNINGS "${_WARNINGS}")

View File

@@ -24,133 +24,11 @@
# ../build_linux_i386
# This is for users who like to configure & build blender with a single command.
define HELP_TEXT
Convenience Targets
Provided for building Blender, (multiple at once can be used).
* debug: Build a debug binary.
* full: Enable all supported dependencies & options.
* lite: Disable non essential features for a smaller binary and faster build.
* headless: Build without an interface (renderfarm or server automation).
* cycles: Build Cycles standalone only, without Blender.
* bpy: Build as a python module which can be loaded from python directly.
* deps: Build library dependencies (intended only for platform maintainers).
* config: Run cmake configuration tool to set build options.
Note: passing the argument 'BUILD_DIR=path' when calling make will override the default build dir.
Note: passing the argument 'BUILD_CMAKE_ARGS=args' lets you add cmake arguments.
Project Files
Generate poject files for development environments.
* project_qtcreator: QtCreator Project Files.
* project_netbeans: NetBeans Project Files.
* project_eclipse: Eclipse CDT4 Project Files.
Package Targets
* package_debian: Build a debian package.
* package_pacman: Build an arch linux pacman package.
* package_archive: Build an archive package.
Testing Targets
Not associated with building Blender.
* test:
Run ctest, currently tests import/export,
operator execution and that python modules load
* test_cmake:
Runs our own cmake file checker
which detects errors in the cmake file list definitions
* test_pep8:
Checks all python script are pep8
which are tagged to use the stricter formatting
* test_deprecated:
Checks for deprecation tags in our code which may need to be removed
* test_style_c:
Checks C/C++ conforms with blenders style guide:
https://wiki.blender.org/wiki/Source/Code_Style
* test_style_c_qtc:
Same as test_style but outputs QtCreator tasks format
* test_style_osl:
Checks OpenShadingLanguage conforms with blenders style guide:
https://wiki.blender.org/wiki/Source/Code_Style
* test_style_osl_qtc:
Checks OpenShadingLanguage conforms with blenders style guide:
https://wiki.blender.org/wiki/Source/Code_Style
Static Source Code Checking
Not associated with building Blender.
* check_cppcheck: Run blender source through cppcheck (C & C++).
* check_clang_array: Run blender source through clang array checking script (C & C++).
* check_splint: Run blenders source through splint (C only).
* check_sparse: Run blenders source through sparse (C only).
* check_smatch: Run blenders source through smatch (C only).
* check_spelling_c: Check for spelling errors (C/C++ only).
* check_spelling_c_qtc: Same as check_spelling_c but outputs QtCreator tasks format.
* check_spelling_osl: Check for spelling errors (OSL only).
* check_spelling_py: Check for spelling errors (Python only).
* check_descriptions: Check for duplicate/invalid descriptions.
Utilities
Not associated with building Blender.
* icons:
Updates PNG icons from SVG files.
Optionally pass in variables: 'BLENDER_BIN', 'INKSCAPE_BIN'
otherwise default paths are used.
Example
make icons INKSCAPE_BIN=/path/to/inkscape
* icons_geom:
Updates Geometry icons from BLEND file.
Optionally pass in variable: 'BLENDER_BIN'
otherwise default paths are used.
Example
make icons_geom BLENDER_BIN=/path/to/blender
* tgz:
Create a compressed archive of the source code.
* update:
updates git and all submodules
Environment Variables
* BUILD_CMAKE_ARGS: Arguments passed to CMake.
* BUILD_DIR: Override default build path.
* PYTHON: Use this for the Python command (used for checking tools).
* NPROCS: Number of processes to use building (auto-detect when omitted).
Documentation Targets
Not associated with building Blender.
* doc_py: Generate sphinx python api docs.
* doc_doxy: Generate doxygen C/C++ docs.
* doc_dna: Generate blender file format reference.
* doc_man: Generate manpage.
Information
* help: This help message.
* help_features: Show a list of optional features when building.
endef
# HELP_TEXT (end)
# System Vars
OS:=$(shell uname -s)
OS_NCASE:=$(shell uname -s | tr '[A-Z]' '[a-z]')
CPU:=$(shell uname -m)
# CPU:=$(shell uname -m) # UNUSED
# Source and Build DIR's
@@ -177,7 +55,7 @@ ifndef DEPS_INSTALL_DIR
ifneq ($(OS_NCASE),darwin)
# Add processor type to directory name
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(shell uname -p)
endif
endif
@@ -220,12 +98,10 @@ endif
# -----------------------------------------------------------------------------
# Blender binary path
# Allow passing in own BLENDER_BIN so developers who don't
# use the default build path can still use utility helpers.
ifeq ($(OS), Darwin)
BLENDER_BIN?="$(BUILD_DIR)/bin/blender.app/Contents/MacOS/blender"
BLENDER_BIN="$(BUILD_DIR)/bin/blender.app/Contents/MacOS/blender"
else
BLENDER_BIN?="$(BUILD_DIR)/bin/blender"
BLENDER_BIN="$(BUILD_DIR)/bin/blender"
endif
@@ -320,9 +196,87 @@ config: .FORCE
# -----------------------------------------------------------------------------
# Help for build targets
export HELP_TEXT
help: .FORCE
@echo "$$HELP_TEXT"
@echo ""
@echo "Convenience targets provided for building blender, (multiple at once can be used)"
@echo " * debug - build a debug binary"
@echo " * full - enable all supported dependencies & options"
@echo " * lite - disable non essential features for a smaller binary and faster build"
@echo " * headless - build without an interface (renderfarm or server automation)"
@echo " * cycles - build Cycles standalone only, without Blender"
@echo " * bpy - build as a python module which can be loaded from python directly"
@echo " * deps - build library dependencies (intended only for platform maintainers)"
@echo ""
@echo " * config - run cmake configuration tool to set build options"
@echo ""
@echo " Note, passing the argument 'BUILD_DIR=path' when calling make will override the default build dir."
@echo " Note, passing the argument 'BUILD_CMAKE_ARGS=args' lets you add cmake arguments."
@echo ""
@echo ""
@echo "Project Files for IDE's"
@echo " * project_qtcreator - QtCreator Project Files"
@echo " * project_netbeans - NetBeans Project Files"
@echo " * project_eclipse - Eclipse CDT4 Project Files"
@echo ""
@echo "Package Targets"
@echo " * package_debian - build a debian package"
@echo " * package_pacman - build an arch linux pacman package"
@echo " * package_archive - build an archive package"
@echo ""
@echo "Testing Targets (not associated with building blender)"
@echo " * test - run ctest, currently tests import/export,"
@echo " operator execution and that python modules load"
@echo " * test_cmake - runs our own cmake file checker"
@echo " which detects errors in the cmake file list definitions"
@echo " * test_pep8 - checks all python script are pep8"
@echo " which are tagged to use the stricter formatting"
@echo " * test_deprecated - checks for deprecation tags in our code which may need to be removed"
@echo " * test_style_c - checks C/C++ conforms with blenders style guide:"
@echo " https://wiki.blender.org/wiki/Source/Code_Style"
@echo " * test_style_c_qtc - same as test_style but outputs QtCreator tasks format"
@echo " * test_style_osl - checks OpenShadingLanguage conforms with blenders style guide:"
@echo " https://wiki.blender.org/wiki/Source/Code_Style"
@echo " * test_style_osl_qtc - checks OpenShadingLanguage conforms with blenders style guide:"
@echo " https://wiki.blender.org/wiki/Source/Code_Style"
@echo ""
@echo "Static Source Code Checking (not associated with building blender)"
@echo " * check_cppcheck - run blender source through cppcheck (C & C++)"
@echo " * check_clang_array - run blender source through clang array checking script (C & C++)"
@echo " * check_splint - run blenders source through splint (C only)"
@echo " * check_sparse - run blenders source through sparse (C only)"
@echo " * check_smatch - run blenders source through smatch (C only)"
@echo " * check_spelling_c - check for spelling errors (C/C++ only)"
@echo " * check_spelling_c_qtc - same as check_spelling_c but outputs QtCreator tasks format"
@echo " * check_spelling_osl - check for spelling errors (OSL only)"
@echo " * check_spelling_py - check for spelling errors (Python only)"
@echo " * check_descriptions - check for duplicate/invalid descriptions"
@echo ""
@echo "Utilities (not associated with building blender)"
@echo " * icons - Updates PNG icons from SVG files."
@echo " Set environment variables 'BLENDER_BIN' and 'INKSCAPE_BIN'"
@echo " to define your own commands."
@echo " * icons_geom - Updates Geometry icons from BLEND file."
@echo " Set environment variable 'BLENDER_BIN'"
@echo " to define your own command."
@echo " * tgz - create a compressed archive of the source code."
@echo " * update - updates git and all submodules"
@echo ""
@echo "Environment Variables"
@echo " * BUILD_CMAKE_ARGS - arguments passed to CMake."
@echo " * BUILD_DIR - override default build path."
@echo " * PYTHON - use this for the Python command (used for checking tools)."
@echo " * NPROCS - number of processes to use building (auto-detect when omitted)."
@echo ""
@echo "Documentation Targets (not associated with building blender)"
@echo " * doc_py - generate sphinx python api docs"
@echo " * doc_doxy - generate doxygen C/C++ docs"
@echo " * doc_dna - generate blender file format reference"
@echo " * doc_man - generate manpage"
@echo ""
@echo "Information"
@echo " * help - this help message"
@echo " * help_features - show a list of optional features when building"
@echo ""
# -----------------------------------------------------------------------------
# Packages
@@ -486,12 +440,9 @@ check_descriptions: .FORCE
tgz: .FORCE
./build_files/utils/build_tgz.sh
INKSCAPE_BIN?="inkscape"
icons: .FORCE
BLENDER_BIN=$(BLENDER_BIN) INKSCAPE_BIN=$(INKSCAPE_BIN) \
"$(BLENDER_DIR)/release/datafiles/blender_icons_update.py"
BLENDER_BIN=$(BLENDER_BIN) INKSCAPE_BIN=$(INKSCAPE_BIN) \
"$(BLENDER_DIR)/release/datafiles/prvicons_update.py"
"$(BLENDER_DIR)/release/datafiles/blender_icons_update.py"
"$(BLENDER_DIR)/release/datafiles/prvicons_update.py"
icons_geom: .FORCE
BLENDER_BIN=$(BLENDER_BIN) \

View File

@@ -97,10 +97,6 @@ if(WITH_WEBP)
include(cmake/webp.cmake)
endif()
if(WITH_EMBREE)
include(cmake/embree.cmake)
endif()
if(WIN32)
# HMD branch deps
include(cmake/hidapi.cmake)
@@ -114,7 +110,7 @@ endif()
if(NOT WIN32 OR ENABLE_MINGW64)
include(cmake/openjpeg.cmake)
if(NOT WIN32 OR BUILD_MODE STREQUAL Release)
if(BUILD_MODE STREQUAL Release)
if(WIN32)
include(cmake/zlib_mingw.cmake)
endif()

View File

@@ -23,7 +23,7 @@ set(BLOSC_EXTRA_ARGS
-DBUILD_BENCHMARKS=OFF
-DCMAKE_DEBUG_POSTFIX=_d
-DThreads_FOUND=1
-DPTHREAD_LIBS=${LIBDIR}/pthreads/lib/pthreadVC3.lib
-DPTHREAD_LIBS=${LIBDIR}/pthreads/lib/pthreadVC2.lib
-DPTHREAD_INCLUDE_DIR=${LIBDIR}/pthreads/inc
-DDEACTIVATE_SNAPPY=ON
-DCMAKE_POSITION_INDEPENDENT_CODE=ON

View File

@@ -1,69 +0,0 @@
# ***** BEGIN GPL LICENSE BLOCK *****
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENSE BLOCK *****
# Note the utility apps may use png/tiff/gif system libraries, but the
# library itself does not depend on them, so should give no problems.
set(EMBREE_EXTRA_ARGS
-DEMBREE_ISPC_SUPPORT=OFF
-DEMBREE_TUTORIALS=OFF
-DEMBREE_STATIC_LIB=ON
-DEMBREE_RAY_MASK=ON
-DEMBREE_FILTER_FUNCTION=ON
-DEMBREE_BACKFACE_CULLING=OFF
-DEMBREE_TASKING_SYSTEM=INTERNAL
-DEMBREE_MAX_ISA=AVX2
)
if(WIN32)
set(EMBREE_BUILD_DIR ${BUILD_MODE}/)
else()
set(EMBREE_BUILD_DIR)
endif()
ExternalProject_Add(external_embree
URL ${EMBREE_URI}
DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH MD5=${EMBREE_HASH}
PREFIX ${BUILD_DIR}/embree
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/embree ${DEFAULT_CMAKE_FLAGS} ${EMBREE_EXTRA_ARGS}
INSTALL_DIR ${LIBDIR}/embree
)
if(WIN32)
if(BUILD_MODE STREQUAL Release)
ExternalProject_Add_Step(external_embree after_install
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/embree ${HARVEST_TARGET}/embree
DEPENDEES install
)
else()
ExternalProject_Add_Step(external_embree after_install
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/embree3.lib ${HARVEST_TARGET}/embree/lib/embree3_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/embree_avx.lib ${HARVEST_TARGET}/embree/lib/embree_avx_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/embree_avx2.lib ${HARVEST_TARGET}/embree/lib/embree_avx2_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/embree_sse42.lib ${HARVEST_TARGET}/embree/lib/embree_sse42_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/lexers.lib ${HARVEST_TARGET}/embree/lib/lexers_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/math.lib ${HARVEST_TARGET}/embree/lib/math_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/simd.lib ${HARVEST_TARGET}/embree/lib/simd_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/sys.lib ${HARVEST_TARGET}/embree/lib/sys_d.lib
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/embree/lib/tasking.lib ${HARVEST_TARGET}/embree/lib/tasking_d.lib
DEPENDEES install
)
endif()
endif()

View File

@@ -112,7 +112,6 @@ ExternalProject_Add(external_ffmpeg
--disable-indev=alsa
--disable-outdev=alsa
--disable-crystalhd
--disable-sndio
BUILD_COMMAND ${CONFIGURE_ENV_NO_PERL} && cd ${BUILD_DIR}/ffmpeg/src/external_ffmpeg/ && make -j${MAKE_THREADS}
INSTALL_COMMAND ${CONFIGURE_ENV_NO_PERL} && cd ${BUILD_DIR}/ffmpeg/src/external_ffmpeg/ && make install
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/ffmpeg ${DEFAULT_CMAKE_FLAGS}

View File

@@ -57,7 +57,7 @@ if(BUILD_MODE STREQUAL Release)
# hidapi
${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/hidapi/ ${HARVEST_TARGET}/hidapi/ &&
# webp, straight up copy
${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/webp ${HARVEST_TARGET}/webp &&
${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/webp ${HARVEST_TARGET}/webp
DEPENDS
)
endif()
@@ -102,6 +102,7 @@ function(harvest from to)
FILES_MATCHING PATTERN ${pattern}
PATTERN "pkgconfig" EXCLUDE
PATTERN "cmake" EXCLUDE
PATTERN "clang" EXCLUDE
PATTERN "__pycache__" EXCLUDE
PATTERN "tests" EXCLUDE)
endif()
@@ -128,13 +129,8 @@ harvest(jemalloc/lib jemalloc/lib "*.a")
harvest(jpg/include jpeg/include "*.h")
harvest(jpg/lib jpeg/lib "libjpeg.a")
harvest(lame/lib ffmpeg/lib "*.a")
harvest(clang/bin llvm/bin "clang-format")
harvest(llvm/bin llvm/bin "llvm-config")
harvest(llvm/lib llvm/lib "libLLVM*.a")
if(APPLE)
harvest(openmp/lib openmp/lib "*")
harvest(openmp/include openmp/include "*.h")
endif()
harvest(ogg/lib ffmpeg/lib "*.a")
harvest(openal/include openal/include "*.h")
if(UNIX AND NOT APPLE)
@@ -195,7 +191,5 @@ harvest(vpx/lib ffmpeg/lib "*.a")
harvest(webp/lib ffmpeg/lib "*.a")
harvest(x264/lib ffmpeg/lib "*.a")
harvest(xvidcore/lib ffmpeg/lib "*.a")
harvest(embree/include embree/include "*.h")
harvest(embree/lib embree/lib "*.a")
endif()

View File

@@ -23,8 +23,7 @@ ExternalProject_Add(external_openmp
URL_HASH MD5=${OPENMP_HASH}
PREFIX ${BUILD_DIR}/openmp
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/openmp ${DEFAULT_CMAKE_FLAGS}
INSTALL_COMMAND cd ${BUILD_DIR}/openmp/src/external_openmp-build && install_name_tool -id @executable_path/../Resources/lib/libomp.dylib runtime/src/libomp.dylib && make install
INSTALL_DIR ${LIBDIR}/openmp
INSTALL_DIR ${LIBDIR}/clang
)
add_dependencies(

View File

@@ -55,7 +55,7 @@ if(WIN32)
# needs to link pthreads due to it being a blosc dependency
set(OPENVDB_EXTRA_ARGS ${OPENVDB_EXTRA_ARGS}
-DOPENEXR_NAMESPACE_VERSIONING=OFF
-DEXTRA_LIBS:FILEPATH=${LIBDIR}/pthreads/lib/pthreadVC3.lib
-DEXTRA_LIBS:FILEPATH=${LIBDIR}/pthreads/lib/pthreadVC2.lib
)
if("${CMAKE_SIZEOF_VOID_P}" EQUAL "4")
set(OPENVDB_EXTRA_ARGS ${OPENVDB_EXTRA_ARGS}

View File

@@ -20,7 +20,6 @@ if(WIN32)
option(ENABLE_MINGW64 "Enable building of ffmpeg/iconv/libsndfile/lapack/fftw3 by installing mingw64" ON)
endif()
option(WITH_WEBP "Enable building of oiio with webp support" OFF)
option(WITH_EMBREE "Enable building of Embree" OFF)
set(MAKE_THREADS 1 CACHE STRING "Number of threads to run make with")
if(NOT BUILD_MODE)
@@ -127,7 +126,8 @@ else()
)
set(OSX_ARCHITECTURES x86_64)
set(OSX_DEPLOYMENT_TARGET 10.9)
set(OSX_SYSROOT ${XCODE_DEV_PATH}/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk)
set(OSX_SDK_VERSION 10.13)
set(OSX_SYSROOT ${XCODE_DEV_PATH}/Platforms/MacOSX.platform/Developer/SDKs/MacOSX${OSX_SDK_VERSION}.sdk)
set(PLATFORM_CFLAGS "-isysroot ${OSX_SYSROOT} -mmacosx-version-min=${OSX_DEPLOYMENT_TARGET}")
set(PLATFORM_CXXFLAGS "-isysroot ${OSX_SYSROOT} -mmacosx-version-min=${OSX_DEPLOYMENT_TARGET} -std=c++11 -stdlib=libc++")

View File

@@ -36,7 +36,7 @@ add_dependencies(
external_zlib
)
if(WIN32 AND BUILD_MODE STREQUAL Debug)
if(BUILD_MODE STREQUAL Debug)
ExternalProject_Add_Step(external_png after_install
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/png/lib/libpng16_staticd${LIBEXT} ${LIBDIR}/png/lib/libpng16${LIBEXT}
DEPENDEES install

View File

@@ -17,28 +17,30 @@
# ***** END GPL LICENSE BLOCK *****
if(WIN32)
set(PTHREAD_XCFLAGS /MD)
if(MSVC14) # vs2015 has timespec
set(PTHREAD_CPPFLAGS "/I. /DHAVE_CONFIG_H /D_TIMESPEC_DEFINED ")
set(PTHREAD_CPPFLAGS "/I. /DHAVE_PTW32_CONFIG_H /D_TIMESPEC_DEFINED ")
else() # everything before doesn't
set(PTHREAD_CPPFLAGS "/I. /DHAVE_CONFIG_H ")
set(PTHREAD_CPPFLAGS "/I. /DHAVE_PTW32_CONFIG_H ")
endif()
set(PTHREADS_BUILD cd ${BUILD_DIR}/pthreads/src/external_pthreads/ && cd && nmake VC-static /e CPPFLAGS=${PTHREAD_CPPFLAGS} /e XLIBS=/NODEFAULTLIB:msvcr)
set(PTHREADS_BUILD cd ${BUILD_DIR}/pthreads/src/external_pthreads/ && cd && nmake VC /e CPPFLAGS=${PTHREAD_CPPFLAGS} /e XCFLAGS=${PTHREAD_XCFLAGS} /e XLIBS=/NODEFAULTLIB:msvcr)
ExternalProject_Add(external_pthreads
URL ${PTHREADS_URI}
DOWNLOAD_DIR ${DOWNLOAD_DIR}
URL_HASH MD5=${PTHREADS_HASH}
URL_HASH SHA512=${PTHREADS_SHA512}
PREFIX ${BUILD_DIR}/pthreads
CONFIGURE_COMMAND echo .
PATCH_COMMAND ${PATCH_CMD} --verbose -p 0 -N -d ${BUILD_DIR}/pthreads/src/external_pthreads < ${PATCH_DIR}/pthreads.diff
BUILD_COMMAND ${PTHREADS_BUILD}
INSTALL_COMMAND COMMAND
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/pthreads/src/external_pthreads/libpthreadVC3${LIBEXT} ${LIBDIR}/pthreads/lib/pthreadVC3${LIBEXT} &&
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/pthreads/src/external_pthreads/pthreadVC2.dll ${LIBDIR}/pthreads/lib/pthreadVC2.dll &&
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/pthreads/src/external_pthreads/pthreadVC2${LIBEXT} ${LIBDIR}/pthreads/lib/pthreadVC2${LIBEXT} &&
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/pthreads/src/external_pthreads/pthread.h ${LIBDIR}/pthreads/inc/pthread.h &&
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/pthreads/src/external_pthreads/sched.h ${LIBDIR}/pthreads/inc/sched.h &&
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/pthreads/src/external_pthreads/semaphore.h ${LIBDIR}/pthreads/inc/semaphore.h &&
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/pthreads/src/external_pthreads/_ptw32.h ${LIBDIR}/pthreads/inc/_ptw32.h
${CMAKE_COMMAND} -E copy ${BUILD_DIR}/pthreads/src/external_pthreads/semaphore.h ${LIBDIR}/pthreads/inc/semaphore.h
INSTALL_DIR ${LIBDIR}/pthreads
)

View File

@@ -147,7 +147,6 @@ if(MSVC)
#COMMAND ${CMAKE_COMMAND} -E copy "${PYTHON_OUTPUTDIR}/xxlimited${PYTHON_POSTFIX}.pyd" ${BUILD_DIR}/python/src/external_python/redist/lib/xxlimited${PYTHON_POSTFIX}.pyd
COMMAND ${CMAKE_COMMAND} -E copy "${PYTHON_OUTPUTDIR}/libssl-1_1${SSL_POSTFIX}.dll" ${BUILD_DIR}/python/src/external_python/redist/lib/libssl-1_1${SSL_POSTFIX}.dll
COMMAND ${CMAKE_COMMAND} -E copy "${PYTHON_OUTPUTDIR}/libcrypto-1_1${SSL_POSTFIX}.dll" ${BUILD_DIR}/python/src/external_python/redist/lib/libcrypto-1_1${SSL_POSTFIX}.dll
COMMAND ${CMAKE_COMMAND} -E copy "${PYTHON_OUTPUTDIR}/sqlite3${PYTHON_POSTFIX}.dll" ${BUILD_DIR}/python/src/external_python/redist/lib/sqlite3${PYTHON_POSTFIX}.dll
COMMAND ${CMAKE_COMMAND} -E chdir "${BUILD_DIR}/python/src/external_python/redist" ${CMAKE_COMMAND} -E tar "cfvz" "${LIBDIR}/python${PYTHON_SHORT_VERSION_NO_DOTS}${PYTHON_POSTFIX}.tar.gz" "."
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/python/ ${HARVEST_TARGET}/python/
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/python${PYTHON_SHORT_VERSION_NO_DOTS}${PYTHON_POSTFIX}.tar.gz ${HARVEST_TARGET}/Release/python${PYTHON_SHORT_VERSION_NO_DOTS}${PYTHON_POSTFIX}.tar.gz

View File

@@ -25,7 +25,6 @@ else()
-DSDL_STATIC=ON
-DSDL_SHARED=OFF
-DSDL_VIDEO=OFF
-DSNDIO=OFF
)
endif()

View File

@@ -39,7 +39,7 @@ add_dependencies(
external_zlib
)
if(WIN32 AND BUILD_MODE STREQUAL Debug)
if(BUILD_MODE STREQUAL Debug)
ExternalProject_Add_Step(external_tiff after_install
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tiff/lib/tiffd${LIBEXT} ${LIBDIR}/tiff/lib/tiff${LIBEXT}
DEPENDEES install

View File

@@ -21,7 +21,7 @@ set(ZLIB_URI https://zlib.net/zlib-${ZLIB_VERSION}.tar.gz)
set(ZLIB_HASH 1c9f62f0778697a09d36121ead88e08e)
set(OPENAL_VERSION 1.18.2)
set(OPENAL_URI http://openal-soft.org/openal-releases/openal-soft-${OPENAL_VERSION}.tar.bz2)
set(OPENAL_URI http://kcat.strangesoft.net/openal-releases/openal-soft-${OPENAL_VERSION}.tar.bz2)
set(OPENAL_HASH d4eeb0889812e2fdeaa1843523d76190)
set(PNG_VERSION 1.6.35)
@@ -41,9 +41,9 @@ set(BLOSC_VERSION 1.14.4)
set(BLOSC_URI https://github.com/Blosc/c-blosc/archive/v${BLOSC_VERSION}.tar.gz)
set(BLOSC_HASH e80dfc71e4cba03b8d01ed0876547ffe)
set(PTHREADS_VERSION 3.0.0)
set(PTHREADS_URI http://sourceforge.mirrorservice.org/p/pt/pthreads4w/pthreads4w-code-v${PTHREADS_VERSION}.zip)
set(PTHREADS_HASH f3bf81bb395840b3446197bcf4ecd653)
set(PTHREADS_VERSION 2-9-1)
set(PTHREADS_URI ftp://sourceware.org/pub/pthreads-win32/pthreads-w32-${PTHREADS_VERSION}-release.tar.gz)
set(PTHREADS_SHA512 9c06e85310766834370c3dceb83faafd397da18a32411ca7645c8eb6b9495fea54ca2872f4a3e8d83cb5fdc5dea7f3f0464be5bb9af3222a6534574a184bd551)
set(ILMBASE_VERSION 2.3.0)
if (WIN32)
@@ -113,9 +113,9 @@ set(SDL_VERSION 2.0.8)
set(SDL_URI https://www.libsdl.org/release/SDL2-${SDL_VERSION}.tar.gz)
set(SDL_HASH 3800d705cef742c6a634f202c37f263f)
set(OPENCOLLADA_VERSION v1.6.68)
set(OPENCOLLADA_VERSION v1.6.63)
set(OPENCOLLADA_URI https://github.com/KhronosGroup/OpenCOLLADA/archive/${OPENCOLLADA_VERSION}.tar.gz)
set(OPENCOLLADA_HASH ee7dae874019fea7be11613d07567493)
set(OPENCOLLADA_HASH e937c3897b86fc0da53cde97257f5156)
set(OPENCOLORIO_VERSION 1.1.0)
set(OPENCOLORIO_URI https://github.com/imageworks/OpenColorIO/archive/v${OPENCOLORIO_VERSION}.tar.gz)
@@ -298,7 +298,3 @@ set(SSL_HASH ebbfc844a8c8cc0ea5dc10b86c9ce97f401837f3fa08c17b2cdadc118253cf99)
set(SQLITE_VERSION 3.24.0)
set(SQLITE_URI https://www.sqlite.org/2018/sqlite-src-3240000.zip)
set(SQLITE_HASH fb558c49ee21a837713c4f1e7e413309aabdd9c7)
set(EMBREE_VERSION 3.2.4)
set(EMBREE_URI https://github.com/embree/embree/archive/v${EMBREE_VERSION}.zip)
set(EMBREE_HASH 3d4a1147002ff43939d45140aa9d6fb8)

View File

@@ -40,8 +40,16 @@ if (WIN32)
)
endif()
else()
ExternalProject_Add_Step(external_zlib after_install
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/zlib/lib/libz.a ${LIBDIR}/zlib/lib/libz_pic.a
DEPENDEES install
)
if(BUILD_MODE STREQUAL Debug)
ExternalProject_Add_Step(external_zlib after_install
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/zlib/lib/zlibstaticd${LIBEXT} ${LIBDIR}/zlib/lib/${ZLIB_LIBRARY}
DEPENDEES install
)
endif()
if (UNIX)
ExternalProject_Add_Step(external_zlib after_install
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/zlib/lib/libz.a ${LIBDIR}/zlib/lib/libz_pic.a
DEPENDEES install
)
endif()
endif()

View File

@@ -26,17 +26,17 @@ ARGS=$( \
getopt \
-o s:i:t:h \
--long source:,install:,tmp:,info:,threads:,help,show-deps,no-sudo,no-build,no-confirm,\
with-all,with-opencollada,with-jack,with-embree,\
with-all,with-opencollada,with-jack,\
ver-ocio:,ver-oiio:,ver-llvm:,ver-osl:,ver-osd:,ver-openvdb:,\
force-all,force-python,force-numpy,force-boost,\
force-ocio,force-openexr,force-oiio,force-llvm,force-osl,force-osd,force-openvdb,\
force-ffmpeg,force-opencollada,force-alembic,force-embree,\
force-ffmpeg,force-opencollada,force-alembic,\
build-all,build-python,build-numpy,build-boost,\
build-ocio,build-openexr,build-oiio,build-llvm,build-osl,build-osd,build-openvdb,\
build-ffmpeg,build-opencollada,build-alembic,build-embree,\
build-ffmpeg,build-opencollada,build-alembic,\
skip-python,skip-numpy,skip-boost,\
skip-ocio,skip-openexr,skip-oiio,skip-llvm,skip-osl,skip-osd,skip-openvdb,\
skip-ffmpeg,skip-opencollada,skip-alembic,skip-embree \
skip-ffmpeg,skip-opencollada,skip-alembic \
-- "$@" \
)
@@ -54,9 +54,8 @@ SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
# Do not install some optional, potentially conflicting libs by default...
WITH_ALL=false
# Do not yet enable opencollada or embree, use --with-opencollada/--with-embree (or --with-all) option to try it.
# Do not yet enable opencollada, use --with-opencollada (or --with-all) option to try it.
WITH_OPENCOLLADA=false
WITH_EMBREE=false
THREADS=$(nproc)
@@ -68,7 +67,6 @@ or use --source/--install options, if you want to use other paths!
Number of threads for building: \$THREADS (automatically detected, use --threads=<nbr> to override it).
Full install: \$WITH_ALL (use --with-all option to enable it).
Building OpenCOLLADA: \$WITH_OPENCOLLADA (use --with-opencollada option to enable it).
Building Embree: \$WITH_EMBREE (use --with-embree option to enable it).
Example:
Full install without OpenCOLLADA: --with-all --skip-opencollada
@@ -115,9 +113,6 @@ ARGUMENTS_INFO="\"COMMAND LINE ARGUMENTS:
--with-opencollada
Build and install the OpenCOLLADA libraries.
--with-embree
Build and install the Embree libraries.
--with-jack
Install the jack libraries.
@@ -182,9 +177,6 @@ ARGUMENTS_INFO="\"COMMAND LINE ARGUMENTS:
--build-opencollada
Force the build of OpenCOLLADA.
--build-embree
Force the build of Embree.
--build-ffmpeg
Force the build of FFMpeg.
@@ -237,9 +229,6 @@ ARGUMENTS_INFO="\"COMMAND LINE ARGUMENTS:
--force-opencollada
Force the rebuild of OpenCOLLADA.
--force-embree
Force the rebuild of Embree.
--force-ffmpeg
Force the rebuild of FFMpeg.
@@ -285,9 +274,6 @@ ARGUMENTS_INFO="\"COMMAND LINE ARGUMENTS:
--skip-opencollada
Unconditionally skip OpenCOLLADA installation/building.
--skip-Embree
Unconditionally skip Embree installation/building.
--skip-ffmpeg
Unconditionally skip FFMpeg installation/building.\""
@@ -378,17 +364,11 @@ ALEMBIC_FORCE_BUILD=false
ALEMBIC_FORCE_REBUILD=false
ALEMBIC_SKIP=false
OPENCOLLADA_VERSION="1.6.68"
OPENCOLLADA_VERSION="1.6.63"
OPENCOLLADA_FORCE_BUILD=false
OPENCOLLADA_FORCE_REBUILD=false
OPENCOLLADA_SKIP=false
EMBREE_VERSION="3.2.4"
EMBREE_FORCE_BUILD=false
EMBREE_FORCE_REBUILD=false
EMBREE_SKIP=false
FFMPEG_VERSION="4.0.2"
FFMPEG_VERSION_MIN="2.8.4"
FFMPEG_FORCE_BUILD=false
@@ -522,9 +502,6 @@ while true; do
--with-opencollada)
WITH_OPENCOLLADA=true; shift; continue
;;
--with-embree)
WITH_EMBREE=true; shift; continue
;;
--with-jack)
WITH_JACK=true; shift; continue;
;;
@@ -570,7 +547,6 @@ while true; do
OSD_FORCE_BUILD=true
OPENVDB_FORCE_BUILD=true
OPENCOLLADA_FORCE_BUILD=true
EMBREE_FORCE_BUILD=true
FFMPEG_FORCE_BUILD=true
ALEMBIC_FORCE_BUILD=true
shift; continue
@@ -612,9 +588,6 @@ while true; do
--build-opencollada)
OPENCOLLADA_FORCE_BUILD=true; shift; continue
;;
--build-embree)
EMBREE_FORCE_BUILD=true; shift; continue
;;
--build-ffmpeg)
FFMPEG_FORCE_BUILD=true; shift; continue
;;
@@ -633,7 +606,6 @@ while true; do
OSD_FORCE_REBUILD=true
OPENVDB_FORCE_REBUILD=true
OPENCOLLADA_FORCE_REBUILD=true
EMBREE_FORCE_REBUILD=true
FFMPEG_FORCE_REBUILD=true
ALEMBIC_FORCE_REBUILD=true
shift; continue
@@ -673,9 +645,6 @@ while true; do
--force-opencollada)
OPENCOLLADA_FORCE_REBUILD=true; shift; continue
;;
--force-embree)
EMBREE_FORCE_REBUILD=true; shift; continue
;;
--force-ffmpeg)
FFMPEG_FORCE_REBUILD=true; shift; continue
;;
@@ -715,9 +684,6 @@ while true; do
--skip-opencollada)
OPENCOLLADA_SKIP=true; shift; continue
;;
--skip-embree)
EMBREE_SKIP=true; shift; continue
;;
--skip-ffmpeg)
FFMPEG_SKIP=true; shift; continue
;;
@@ -742,9 +708,6 @@ done
if [ "$WITH_ALL" = true -a "$OPENCOLLADA_SKIP" = false ]; then
WITH_OPENCOLLADA=true
fi
if [ "$WITH_ALL" = true -a "$EMBREE_SKIP" = false ]; then
WITH_EMBREE=true
fi
if [ "$WITH_ALL" = true ]; then
WITH_JACK=true
fi
@@ -833,13 +796,6 @@ OPENCOLLADA_SOURCE=( "https://github.com/KhronosGroup/OpenCOLLADA/archive/v${OPE
#~ OPENCOLLADA_REPO_UID="e937c3897b86fc0da53cde97257f5156"
#~ OPENCOLLADA_REPO_BRANCH="master"
EMBREE_USE_REPO=false
EMBREE_SOURCE=( "https://github.com/embree/embree/archive/v${EMBREE_VERSION}.tar.gz" )
#~ EMBREE_SOURCE_REPO=( "https://github.com/embree/embree.git" )
#~ EMBREE_REPO_UID="4a12bfed63c90e85b6eab98b8cdd8dd2a3ba5809"
#~ EMBREE_REPO_BRANCH="master"
FFMPEG_SOURCE=( "http://ffmpeg.org/releases/ffmpeg-$FFMPEG_VERSION.tar.bz2" )
# C++11 is required now
@@ -880,7 +836,6 @@ You may also want to build them yourself (optional ones are [between brackets]):
* [OpenSubDiv $OSD_VERSION_MIN] (from $OSD_SOURCE_REPO, branch $OSD_SOURCE_REPO_BRANCH, commit $OSD_SOURCE_REPO_UID).
* [OpenVDB $OPENVDB_VERSION_MIN] (from $OPENVDB_SOURCE), [Blosc $OPENVDB_BLOSC_VERSION] (from $OPENVDB_BLOSC_SOURCE).
* [OpenCollada $OPENCOLLADA_VERSION] (from $OPENCOLLADA_SOURCE).
* [Embree $EMBREE_VERSION] (from $EMBREE_SOURCE).
* [Alembic $ALEMBIC_VERSION] (from $ALEMBIC_SOURCE).\""
if [ "$DO_SHOW_DEPS" = true ]; then
@@ -2456,101 +2411,6 @@ compile_OpenCOLLADA() {
fi
}
#### Build Embree ####
_init_embree() {
_src=$SRC/embree-$EMBREE_VERSION
_git=true
_inst=$INST/embree-$EMBREE_VERSION
_inst_shortcut=$INST/embree
}
clean_Embree() {
_init_embree
_clean
}
compile_Embree() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, Embree will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled results!
embree_magic=9
_init_embree
# Clean install if needed!
magic_compile_check embree-$EMBREE_VERSION $embree_magic
if [ $? -eq 1 -o "$EMBREE_FORCE_REBUILD" = true ]; then
clean_Embree
fi
if [ ! -d $_inst ]; then
INFO "Building Embree-$EMBREE_VERSION"
prepare_opt
if [ ! -d $_src ]; then
mkdir -p $SRC
if [ "EMBREE_USE_REPO" = true ]; then
git clone $EMBREE_SOURCE_REPO $_src
else
download EMBREE_SOURCE[@] "$_src.tar.gz"
INFO "Unpacking Embree-$EMBREE_VERSION"
tar -C $SRC -xf $_src.tar.gz
fi
fi
cd $_src
if [ "$EMBREE_USE_REPO" = true ]; then
git pull origin $EMBREE_REPO_BRANCH
# Stick to same rev as windows' libs...
git checkout $EMBREE_REPO_UID
git reset --hard
fi
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D EMBREE_ISPC_SUPPORT=OFF"
cmake_d="$cmake_d -D EMBREE_TUTORIALS=OFF"
cmake_d="$cmake_d -D EMBREE_STATIC_LIB=ON"
cmake_d="$cmake_d -D EMBREE_RAY_MASK=ON"
cmake_d="$cmake_d -D EMBREE_FILTER_FUNCTION=ON"
cmake_d="$cmake_d -D EMBREE_BACKFACE_CULLING=OFF"
cmake_d="$cmake_d -D EMBREE_TASKING_SYSTEM=INTERNAL"
cmake_d="$cmake_d -D EMBREE_MAX_ISA=AVX2"
cmake $cmake_d ../
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Embree-$EMBREE_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set embree-$EMBREE_VERSION $embree_magic
cd $CWD
INFO "Done compiling Embree-$EMBREE_VERSION!"
else
INFO "Own Embree-$EMBREE_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-embree option."
fi
}
#### Build FFMPEG ####
_init_ffmpeg() {
_src=$SRC/ffmpeg-$FFMPEG_VERSION
@@ -3118,23 +2978,6 @@ install_DEB() {
fi
fi
if [ "$WITH_EMBREE" = true ]; then
_do_compile_embree=false
PRINT ""
if [ "$EMBREE_SKIP" = true ]; then
WARNING "Skipping Embree installation, as requested..."
elif [ "$EMBREE_FORCE_BUILD" = true ]; then
INFO "Forced Embree building, as requested..."
_do_compile_embree=true
else
# No package currently!
_do_compile_embree=true
fi
if [ "$_do_compile_embree" = true ]; then
compile_Embree
fi
fi
PRINT ""
if [ "$FFMPEG_SKIP" = true ]; then
@@ -3682,23 +3525,6 @@ install_RPM() {
fi
fi
if [ "$WITH_EMBREE" = true ]; then
PRINT ""
_do_compile_embree=false
if [ "$OPENCOLLADA_SKIP" = true ]; then
WARNING "Skipping Embree installation, as requested..."
elif [ "$EMBREE_FORCE_BUILD" = true ]; then
INFO "Forced Embree building, as requested..."
_do_compile_embree=true
else
# No package...
_do_compile_embree=true
fi
if [ "$_do_compile_embree" = true ]; then
compile_Embree
fi
fi
PRINT ""
if [ "$FFMPEG_SKIP" = true ]; then
@@ -4129,28 +3955,6 @@ install_ARCH() {
fi
fi
if [ "$WITH_EMBREE" = true ]; then
PRINT ""
_do_compile_embree=false
if [ "$EMBREE_SKIP" = true ]; then
WARNING "Skipping Embree installation, as requested..."
elif [ "$EMBREE_FORCE_BUILD" = true ]; then
INFO "Forced Embree building, as requested..."
_do_compile_embree=true
else
check_package_ARCH embree
if [ $? -eq 0 ]; then
install_packages_ARCH embree
clean_Embree
else
_do_compile_embree=true
fi
fi
if [ "$_do_compile_embree" = true ]; then
compile_Embree
fi
fi
PRINT ""
if [ "$FFMPEG_SKIP" = true ]; then
@@ -4322,21 +4126,6 @@ install_OTHER() {
fi
fi
if [ "$WITH_EMBREE" = true ]; then
_do_compile_embree=false
PRINT ""
if [ "$EMBREE_SKIP" = true ]; then
WARNING "Skipping Embree installation, as requested..."
elif [ "$EMBREE_FORCE_BUILD" = true ]; then
INFO "Forced Embree building, as requested..."
_do_compile_embree=true
fi
if [ "$_do_compile_embree" = true ]; then
PRINT ""
compile_Embree
fi
fi
PRINT ""
if [ "$FFMPEG_SKIP" = true ]; then
@@ -4532,12 +4321,6 @@ print_info() {
_buildargs="$_buildargs $_1"
fi
if [ "$WITH_EMBREE" = true ]; then
_1="-D WITH_CYCLES_EMBREE=ON"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
if [ "$WITH_JACK" = true ]; then
_1="-D WITH_JACK=ON"
_2="-D WITH_JACK_DYNLOAD=ON"

View File

@@ -68,8 +68,6 @@ goto usage
setlocal ENABLEEXTENSIONS
set CMAKE_DEBUG_OPTIONS=-DWITH_OPTIMIZED_DEBUG=On
if "%3" == "debug" set CMAKE_DEBUG_OPTIONS=-DWITH_OPTIMIZED_DEBUG=Off
set dobuild=1
if "%4" == "nobuild" set dobuild=0
set SOURCE_DIR=%~dp0\..
set BUILD_DIR=%cd%\build
@@ -119,24 +117,20 @@ cd %Staging%\%BuildDir%%ARCH%R
echo %DATE% %TIME% : Start > %StatusFile%
cmake -G "%CMAKE_BUILDER%" %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DBUILD_MODE=Release -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/
echo %DATE% %TIME% : Release Configuration done >> %StatusFile%
if "%dobuild%" == "1" (
msbuild /m "ll.vcxproj" /p:Configuration=Release /fl /flp:logfile=BlenderDeps_llvm.log;Verbosity=normal
msbuild /m "BlenderDependencies.sln" /p:Configuration=Release /fl /flp:logfile=BlenderDeps.log;Verbosity=minimal /verbosity:minimal
echo %DATE% %TIME% : Release Build done >> %StatusFile%
cmake --build . --target Harvest_Release_Results > Harvest_Release.txt
)
msbuild /m "ll.vcxproj" /p:Configuration=Release /fl /flp:logfile=BlenderDeps_llvm.log;Verbosity=normal
msbuild /m "BlenderDependencies.sln" /p:Configuration=Release /fl /flp:logfile=BlenderDeps.log;Verbosity=minimal /verbosity:minimal
echo %DATE% %TIME% : Release Build done >> %StatusFile%
cmake --build . --target Harvest_Release_Results > Harvest_Release.txt
echo %DATE% %TIME% : Release Harvest done >> %StatusFile%
cd %BUILD_DIR%
mkdir %STAGING%\%BuildDir%%ARCH%D
cd %Staging%\%BuildDir%%ARCH%D
cmake -G "%CMAKE_BUILDER%" %SOURCE_DIR% -DDOWNLOAD_DIR=%BUILD_DIR%/downloads -DCMAKE_BUILD_TYPE=Debug -DBUILD_MODE=Debug -DHARVEST_TARGET=%HARVEST_DIR%/%HARVESTROOT%%VSVER_SHORT%/ %CMAKE_DEBUG_OPTIONS%
echo %DATE% %TIME% : Debug Configuration done >> %StatusFile%
if "%dobuild%" == "1" (
msbuild /m "ll.vcxproj" /p:Configuration=Debug /fl /flp:logfile=BlenderDeps_llvm.log;;Verbosity=normal
msbuild /m "BlenderDependencies.sln" /p:Configuration=Debug /verbosity:n /fl /flp:logfile=BlenderDeps.log;;Verbosity=normal
echo %DATE% %TIME% : Debug Build done >> %StatusFile%
cmake --build . --target Harvest_Debug_Results> Harvest_Debug.txt
)
msbuild /m "ll.vcxproj" /p:Configuration=Debug /fl /flp:logfile=BlenderDeps_llvm.log;;Verbosity=normal
msbuild /m "BlenderDependencies.sln" /p:Configuration=Debug /verbosity:n /fl /flp:logfile=BlenderDeps.log;;Verbosity=normal
echo %DATE% %TIME% : Debug Build done >> %StatusFile%
cmake --build . --target Harvest_Debug_Results> Harvest_Debug.txt
echo %DATE% %TIME% : Debug Harvest done >> %StatusFile%
cd %BUILD_DIR%

View File

@@ -72,6 +72,9 @@ if 'cmake' in builder:
if builder.endswith('x86_64_10_9_cmake'):
cmake_extra_options.append('-DCMAKE_OSX_ARCHITECTURES:STRING=x86_64')
cmake_extra_options.append('-DCMAKE_OSX_DEPLOYMENT_TARGET=10.9')
# Used to trick CUDFA to see CLang as an older version.
# cmake_extra_options.append('-DCUDA_HOST_COMPILER=/usr/local/cuda-hack/clang')
# cmake_extra_options.append('-DCUDA_NVCC_EXECUTABLE=/usr/local/cuda-hack/nvcc')
elif builder.startswith('win'):
if builder.endswith('_vs2017'):
@@ -116,28 +119,12 @@ if 'cmake' in builder:
cmake_extra_options.extend(["-DCMAKE_C_COMPILER=/usr/bin/gcc-7",
"-DCMAKE_CXX_COMPILER=/usr/bin/g++-7"])
# Workaround to build only sm_7x kernels with CUDA 10, until
# older kernels work well with this version.
if builder.startswith('win'):
cmake_extra_options.append('-DCUDA_VERSION=9.1')
cmake_extra_options.append('-DCUDA_TOOLKIT_INCLUDE:PATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v9.1/include')
cmake_extra_options.append('-DCUDA_TOOLKIT_ROOT_DIR:PATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v9.1')
cmake_extra_options.append('-DCUDA_NVCC_EXECUTABLE:FILEPATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v9.1/bin/nvcc.exe')
cmake_extra_options.append('-DCUDA10_NVCC_EXECUTABLE:FILEPATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.0/bin/nvcc.exe')
cmake_extra_options.append('-DCUDA10_TOOLKIT_ROOT_DIR:PATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.0')
elif builder.startswith('linux'):
cmake_extra_options.append('-DCUDA_VERSION=9.1')
cmake_extra_options.append('-DCUDA_TOOLKIT_INCLDUE:PATH=/usr/local/cuda-9.1/include')
cmake_extra_options.append('-DCUDA_TOOLKIT_ROOT_DIR:PATH=/usr/local/cuda-9.1')
cmake_extra_options.append('-DCUDA_NVCC_EXECUTABLE:FILEPATH=/usr/local/cuda-9.1/bin/nvcc')
cmake_extra_options.append('-DCUDA10_NVCC_EXECUTABLE:FILEPATH=/usr/local/cuda-10.0/bin/nvcc')
cmake_extra_options.append('-DCUDA10_TOOLKIT_ROOT_DIR:PATH=/usr/local/cuda-10.0')
cmake_options.append("-C" + os.path.join(blender_dir, cmake_config_file))
# Prepare CMake options needed to configure cuda binaries compilation, 64bit only.
if bits == 64:
cuda_cmake_options.append("-DWITH_CYCLES_CUDA_BINARIES=%s" % ('ON' if build_cubins else 'OFF'))
cuda_cmake_options.append("-DCYCLES_CUDA_BINARIES_ARCH=sm_30;sm_35;sm_37;sm_50;sm_52;sm_60;sm_61;sm_70")
if build_cubins or 'cuda' in targets:
cuda_cmake_options.append("-DCUDA_64_BIT_DEVICE_CODE=ON")

View File

@@ -107,8 +107,8 @@ if builder.find('cmake') != -1:
if builder.endswith('vc2015'):
platform += "-vc14"
builderified_name = 'blender-{}-{}-{}'.format(blender_full_version, git_hash, platform)
# NOTE: Blender 2.7 is already respected by blender_full_version.
if branch != '' and branch != 'blender2.7':
# NOTE: Blender 2.8 is already respected by blender_full_version.
if branch != '' and branch != 'blender2.8':
builderified_name = branch + "-" + builderified_name
os.rename(result_file, "{}.zip".format(builderified_name))
@@ -173,8 +173,8 @@ if builder.find('cmake') != -1:
blender_hash,
blender_glibc,
blender_arch)
# NOTE: Blender 2.7 is already respected by blender_full_version.
if branch != '' and branch != 'blender2.7':
# NOTE: Blender 2.8 is already respected by blender_full_version.
if branch != '' and branch != 'blender2.8':
package_name = branch + "-" + package_name
upload_filename = package_name + ".tar.bz2"

View File

@@ -1,108 +0,0 @@
# - Find Embree library
# Find the native Embree includes and library
# This module defines
# EMBREE_INCLUDE_DIRS, where to find rtcore.h, Set when
# EMBREE_INCLUDE_DIR is found.
# EMBREE_LIBRARIES, libraries to link against to use Embree.
# EMBREE_ROOT_DIR, The base directory to search for Embree.
# This can also be an environment variable.
# EMBREEFOUND, If false, do not try to use Embree.
#
# also defined, but not for general use are
# EMBREE_LIBRARY, where to find the Embree library.
#=============================================================================
# Copyright 2018 Blender Foundation.
#
# Distributed under the OSI-approved BSD License (the "License");
# see accompanying file Copyright.txt for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
# If EMBREE_ROOT_DIR was defined in the environment, use it.
IF(NOT EMBREE_ROOT_DIR AND NOT $ENV{EMBREE_ROOT_DIR} STREQUAL "")
SET(EMBREE_ROOT_DIR $ENV{EMBREE_ROOT_DIR})
ENDIF()
SET(_embree_SEARCH_DIRS
${EMBREE_ROOT_DIR}
/usr/local
/sw # Fink
/opt/local # DarwinPorts
/opt/embree
/opt/lib/embree
)
FIND_PATH(EMBREE_INCLUDE_DIR
NAMES
embree3/rtcore.h
HINTS
${_embree_SEARCH_DIRS}
PATH_SUFFIXES
include
)
SET(_embree_FIND_COMPONENTS
embree_avx
embree_avx2
embree_sse42
embree3
lexers
math
simd
sys
tasking
)
SET(_embree_LIBRARIES)
FOREACH(COMPONENT ${_embree_FIND_COMPONENTS})
STRING(TOUPPER ${COMPONENT} UPPERCOMPONENT)
FIND_LIBRARY(EMBREE_${UPPERCOMPONENT}_LIBRARY
NAMES
${COMPONENT}
HINTS
${_embree_SEARCH_DIRS}
PATH_SUFFIXES
lib64 lib
)
LIST(APPEND _embree_LIBRARIES "${EMBREE_${UPPERCOMPONENT}_LIBRARY}")
ENDFOREACH()
FIND_LIBRARY(EMBREE_LIBRARY
NAMES
libembree3
HINTS
${_embree_SEARCH_DIRS}
PATH_SUFFIXES
lib64 lib
)
# handle the QUIETLY and REQUIRED arguments and set EMBREE_FOUND to TRUE if
# all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(EMBREE DEFAULT_MSG
_embree_LIBRARIES EMBREE_INCLUDE_DIR)
IF(EMBREE_FOUND)
SET(EMBREE_LIBRARIES ${_embree_LIBRARIES})
SET(EMBREE_INCLUDE_DIRS ${EMBREE_INCLUDE_DIR})
ENDIF(EMBREE_FOUND)
MARK_AS_ADVANCED(
EMBREE_INCLUDE_DIR
)
FOREACH(COMPONENT ${_embree_FIND_COMPONENTS})
STRING(TOUPPER ${COMPONENT} UPPERCOMPONENT)
MARK_AS_ADVANCED(EMBREE_${UPPERCOMPONENT}_LIBRARY)
ENDFOREACH()
UNSET(_embree_SEARCH_DIRS)
UNSET(_embree_FIND_COMPONENTS)
UNSET(_embree_LIBRARIES)

View File

@@ -1,3 +1,4 @@
#
# VLMC RPM Finder
# Authors: Rohit Yadav <rohityadav89@gmail.com>
#

View File

@@ -22,7 +22,7 @@ if(EXISTS ${SOURCE_DIR}/.git)
if(MY_WC_BRANCH STREQUAL "HEAD")
# Detached HEAD, check whether commit hash is reachable
# in the master branch
execute_process(COMMAND git rev-parse --short=12 HEAD
execute_process(COMMAND git rev-parse --short HEAD
WORKING_DIRECTORY ${SOURCE_DIR}
OUTPUT_VARIABLE MY_WC_HASH
OUTPUT_STRIP_TRAILING_WHITESPACE)
@@ -80,12 +80,12 @@ if(EXISTS ${SOURCE_DIR}/.git)
if(NOT _git_below_check STREQUAL "")
# If there're commits between HEAD and upstream this means
# that we're reset-ed to older revision. Use it's hash then.
execute_process(COMMAND git rev-parse --short=12 HEAD
execute_process(COMMAND git rev-parse --short HEAD
WORKING_DIRECTORY ${SOURCE_DIR}
OUTPUT_VARIABLE MY_WC_HASH
OUTPUT_STRIP_TRAILING_WHITESPACE)
else()
execute_process(COMMAND git rev-parse --short=12 @{u}
execute_process(COMMAND git rev-parse --short @{u}
WORKING_DIRECTORY ${SOURCE_DIR}
OUTPUT_VARIABLE MY_WC_HASH
OUTPUT_STRIP_TRAILING_WHITESPACE
@@ -94,7 +94,7 @@ if(EXISTS ${SOURCE_DIR}/.git)
if(MY_WC_HASH STREQUAL "")
# Local branch, not set to upstream.
# Well, let's use HEAD for now
execute_process(COMMAND git rev-parse --short=12 HEAD
execute_process(COMMAND git rev-parse --short HEAD
WORKING_DIRECTORY ${SOURCE_DIR}
OUTPUT_VARIABLE MY_WC_HASH
OUTPUT_STRIP_TRAILING_WHITESPACE)

View File

@@ -16,6 +16,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>
@@ -27,8 +29,7 @@ if not sys.version.startswith("3"):
sys.exit(1)
from cmake_consistency_check_config import (
IGNORE_SOURCE,
IGNORE_CMAKE,
IGNORE,
UTF8_CHECK,
SOURCE_DIR,
BUILD_DIR,
@@ -155,7 +156,6 @@ def cmake_get_src(f):
break
# replace dirs
l = l.replace("${CMAKE_SOURCE_DIR}", SOURCE_DIR)
l = l.replace("${CMAKE_CURRENT_SOURCE_DIR}", cmake_base)
l = l.replace("${CMAKE_CURRENT_BINARY_DIR}", cmake_base_bin)
l = l.strip('"')
@@ -238,16 +238,8 @@ def cmake_get_src(f):
filen.close()
def is_ignore_source(f, ignore_used):
for index, ig in enumerate(IGNORE_SOURCE):
if ig in f:
ignore_used[index] = True
return True
return False
def is_ignore_cmake(f, ignore_used):
for index, ig in enumerate(IGNORE_CMAKE):
def is_ignore(f, ignore_used):
for index, ig in enumerate(IGNORE):
if ig in f:
ignore_used[index] = True
return True
@@ -258,12 +250,8 @@ def main():
print("Scanning:", SOURCE_DIR)
ignore_used_source = [False] * len(IGNORE_SOURCE)
ignore_used_cmake = [False] * len(IGNORE_CMAKE)
for cmake in source_list(SOURCE_DIR, is_cmake):
if not is_ignore_cmake(cmake, ignore_used_cmake):
cmake_get_src(cmake)
cmake_get_src(cmake)
# First do stupid check, do these files exist?
print("\nChecking for missing references:")
@@ -294,10 +282,12 @@ def main():
del is_err
del errs
ignore_used = [False] * len(IGNORE)
# now check on files not accounted for.
print("\nC/C++ Files CMake does not know about...")
for cf in sorted(source_list(SOURCE_DIR, is_c)):
if not is_ignore_source(cf, ignore_used_source):
if not is_ignore(cf, ignore_used):
if cf not in global_c:
print("missing_c: ", cf)
@@ -314,7 +304,7 @@ def main():
print("\nC/C++ Headers CMake does not know about...")
for hf in sorted(source_list(SOURCE_DIR, is_c_header)):
if not is_ignore_source(hf, ignore_used_source):
if not is_ignore(hf, ignore_used):
if hf not in global_h:
print("missing_h: ", hf)
@@ -336,15 +326,9 @@ def main():
traceback.print_exc()
# Check ignores aren't stale
print("\nCheck for unused 'IGNORE_SOURCE' paths...")
for index, ig in enumerate(IGNORE_SOURCE):
if not ignore_used_source[index]:
print("unused ignore: %r" % ig)
# Check ignores aren't stale
print("\nCheck for unused 'IGNORE_CMAKE' paths...")
for index, ig in enumerate(IGNORE_CMAKE):
if not ignore_used_cmake[index]:
print("\nCheck for unused 'IGNORE' paths...")
for index, ig in enumerate(IGNORE):
if not ignore_used[index]:
print("unused ignore: %r" % ig)

View File

@@ -1,13 +1,10 @@
import os
IGNORE_SOURCE = (
IGNORE = (
"/test/",
"/tests/gtests/",
"/release/",
# specific source files
"extern/audaspace/",
# specific source files
"extern/bullet2/src/BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp",
@@ -20,10 +17,6 @@ IGNORE_SOURCE = (
"intern/audaspace/SRC/AUD_SRCResampleFactory.cpp",
"intern/audaspace/SRC/AUD_SRCResampleReader.cpp",
"doc/doxygen/doxygen.extern.h",
"doc/doxygen/doxygen.intern.h",
"doc/doxygen/doxygen.main.h",
"doc/doxygen/doxygen.source.h",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.h",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.h",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btInternalEdgeUtility.h",
@@ -36,10 +29,6 @@ IGNORE_SOURCE = (
"intern/audaspace/SRC/AUD_SRCResampleReader.h",
)
IGNORE_CMAKE = (
"extern/audaspace/CMakeLists.txt",
)
UTF8_CHECK = True
SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(os.path.join(os.path.dirname(__file__), "..", ".."))))

View File

@@ -16,6 +16,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton, M.G. Kishalmi
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>

View File

@@ -16,6 +16,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton, M.G. Kishalmi
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>

View File

@@ -16,6 +16,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>

View File

@@ -16,6 +16,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>

View File

@@ -16,6 +16,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>

View File

@@ -16,6 +16,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>

View File

@@ -16,6 +16,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>

View File

@@ -12,6 +12,7 @@ set(WITH_CODEC_FFMPEG ON CACHE BOOL "" FORCE)
set(WITH_CODEC_SNDFILE ON CACHE BOOL "" FORCE)
set(WITH_CYCLES ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_OSL ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_OPENSUBDIV ON CACHE BOOL "" FORCE)
set(WITH_FFTW3 ON CACHE BOOL "" FORCE)
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)
@@ -40,7 +41,6 @@ set(WITH_OPENAL ON CACHE BOOL "" FORCE)
set(WITH_OPENCOLLADA ON CACHE BOOL "" FORCE)
set(WITH_OPENCOLORIO ON CACHE BOOL "" FORCE)
set(WITH_OPENMP ON CACHE BOOL "" FORCE)
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
set(WITH_OPENVDB ON CACHE BOOL "" FORCE)
set(WITH_OPENVDB_BLOSC ON CACHE BOOL "" FORCE)
set(WITH_PYTHON_INSTALL ON CACHE BOOL "" FORCE)
@@ -56,10 +56,13 @@ set(WITH_MEM_JEMALLOC ON CACHE BOOL "" FORCE)
if(UNIX AND NOT APPLE)
set(WITH_JACK ON CACHE BOOL "" FORCE)
set(WITH_DOC_MANPAGE ON CACHE BOOL "" FORCE)
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
elseif(WIN32)
set(WITH_JACK OFF CACHE BOOL "" FORCE)
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
elseif(APPLE)
set(WITH_JACK ON CACHE BOOL "" FORCE)
set(WITH_OPENSUBDIV OFF CACHE BOOL "" FORCE)
# include("${CMAKE_CURRENT_SOURCE_DIR}/../platform/platform_apple_xcode.cmake")
endif()

View File

@@ -17,6 +17,7 @@ set(WITH_CODEC_FFMPEG OFF CACHE BOOL "" FORCE)
set(WITH_CODEC_SNDFILE OFF CACHE BOOL "" FORCE)
set(WITH_CYCLES OFF CACHE BOOL "" FORCE)
set(WITH_CYCLES_OSL OFF CACHE BOOL "" FORCE)
set(WITH_CYCLES_OPENSUBDIV OFF CACHE BOOL "" FORCE)
set(WITH_FFTW3 OFF CACHE BOOL "" FORCE)
set(WITH_LIBMV OFF CACHE BOOL "" FORCE)
set(WITH_LLVM OFF CACHE BOOL "" FORCE)

View File

@@ -13,6 +13,7 @@ set(WITH_CODEC_FFMPEG ON CACHE BOOL "" FORCE)
set(WITH_CODEC_SNDFILE ON CACHE BOOL "" FORCE)
set(WITH_CYCLES ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_OSL ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_OPENSUBDIV ON CACHE BOOL "" FORCE)
set(WITH_FFTW3 ON CACHE BOOL "" FORCE)
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)
@@ -41,7 +42,6 @@ set(WITH_OPENAL ON CACHE BOOL "" FORCE)
set(WITH_OPENCOLLADA ON CACHE BOOL "" FORCE)
set(WITH_OPENCOLORIO ON CACHE BOOL "" FORCE)
set(WITH_OPENMP ON CACHE BOOL "" FORCE)
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
set(WITH_OPENVDB ON CACHE BOOL "" FORCE)
set(WITH_OPENVDB_BLOSC ON CACHE BOOL "" FORCE)
set(WITH_PYTHON_INSTALL ON CACHE BOOL "" FORCE)
@@ -52,16 +52,19 @@ set(WITH_X11_XF86VMODE ON CACHE BOOL "" FORCE)
set(WITH_MEM_JEMALLOC ON CACHE BOOL "" FORCE)
set(WITH_CYCLES_CUDA_BINARIES ON CACHE BOOL "" FORCE)
set(CYCLES_CUDA_BINARIES_ARCH sm_30;sm_35;sm_37;sm_50;sm_52;sm_60;sm_61;sm_70;sm_75 CACHE STRING "" FORCE)
set(CYCLES_CUDA_BINARIES_ARCH sm_30;sm_35;sm_37;sm_50;sm_52;sm_60;sm_61 CACHE STRING "" FORCE)
# platform dependent options
if(UNIX AND NOT APPLE)
set(WITH_JACK ON CACHE BOOL "" FORCE)
set(WITH_DOC_MANPAGE ON CACHE BOOL "" FORCE)
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
elseif(WIN32)
set(WITH_JACK OFF CACHE BOOL "" FORCE)
set(WITH_OPENSUBDIV ON CACHE BOOL "" FORCE)
elseif(APPLE)
set(WITH_JACK ON CACHE BOOL "" FORCE)
set(WITH_OPENSUBDIV OFF CACHE BOOL "" FORCE)
# include("${CMAKE_CURRENT_SOURCE_DIR}/../platform/platform_apple_xcode.cmake")
endif()

View File

@@ -0,0 +1,10 @@
#!/bin/bash
# filters CMake output to be more like nan-makefiles
FILTER="^\[ *[0-9]*%] \|^Built target \|^Scanning "
make $@ | \
sed -u -e 's/^Linking .*\//Linking /' | \
sed -u -e 's/^.*\// /' | \
grep --line-buffered -v "$FILTER"
echo "Build Done"

View File

@@ -16,6 +16,11 @@
#
# The Original Code is Copyright (C) 2006, Blender Foundation
# All rights reserved.
#
# The Original Code is: all of this file.
#
# Contributor(s): Jacques Beaurain.
#
# ***** END GPL LICENSE BLOCK *****
macro(list_insert_after
@@ -365,11 +370,6 @@ function(setup_liblinks
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${PLATFORM_LINKFLAGS}" PARENT_SCOPE)
set(CMAKE_MODULE_LINKER_FLAGS_DEBUG "${CMAKE_MODULE_LINKER_FLAGS_DEBUG} ${PLATFORM_LINKFLAGS_DEBUG}" PARENT_SCOPE)
# jemalloc must be early in the list, to be before pthread (see T57998)
if(WITH_MEM_JEMALLOC)
target_link_libraries(${target} ${JEMALLOC_LIBRARIES})
endif()
target_link_libraries(
${target}
${PNG_LIBRARIES}
@@ -426,7 +426,7 @@ function(setup_liblinks
if(WITH_OPENCOLORIO)
target_link_libraries(${target} ${OPENCOLORIO_LIBRARIES})
endif()
if(WITH_OPENSUBDIV)
if(WITH_OPENSUBDIV OR WITH_CYCLES_OPENSUBDIV)
target_link_libraries(${target} ${OPENSUBDIV_LIBRARIES})
endif()
if(WITH_OPENVDB)
@@ -435,9 +435,6 @@ function(setup_liblinks
if(WITH_CYCLES_OSL)
target_link_libraries(${target} ${OSL_LIBRARIES})
endif()
if(WITH_CYCLES_EMBREE)
target_link_libraries(${target} ${EMBREE_LIBRARIES})
endif()
if(WITH_BOOST)
target_link_libraries(${target} ${BOOST_LIBRARIES})
if(Boost_USE_STATIC_LIBS AND Boost_USE_ICU)
@@ -485,6 +482,9 @@ function(setup_liblinks
)
endif()
endif()
if(WITH_MEM_JEMALLOC)
target_link_libraries(${target} ${JEMALLOC_LIBRARIES})
endif()
if(WITH_MOD_CLOTH_ELTOPO)
target_link_libraries(${target} ${LAPACK_LIBRARIES})
endif()
@@ -672,10 +672,10 @@ function(SETUP_BLENDER_SORTED_LIBS)
bf_intern_mikktspace
bf_intern_dualcon
bf_intern_cycles
cycles_device
cycles_render
cycles_graph
cycles_bvh
cycles_device
cycles_kernel
cycles_util
cycles_subd
@@ -690,7 +690,6 @@ function(SETUP_BLENDER_SORTED_LIBS)
bf_intern_glew_mx
bf_intern_clog
bf_intern_opensubdiv
bf_intern_numaapi
)
if(NOT WITH_SYSTEM_GLOG)
@@ -1196,11 +1195,7 @@ function(delayed_do_install
foreach(i RANGE ${n})
list(GET files ${i} f)
list(GET destinations ${i} d)
if(NOT IS_ABSOLUTE ${d})
install(FILES ${f} DESTINATION ${targetdir}/${d})
else()
install(FILES ${f} DESTINATION ${d})
endif()
install(FILES ${f} DESTINATION ${targetdir}/${d})
endforeach()
endif()
endfunction()
@@ -1237,8 +1232,6 @@ function(data_to_c_simple
get_filename_component(_file_to ${CMAKE_CURRENT_BINARY_DIR}/${file_from}.c REALPATH)
list(APPEND ${list_to_add} ${_file_to})
source_group(Generated FILES ${_file_to})
list(APPEND ${list_to_add} ${file_from})
set(${list_to_add} ${${list_to_add}} PARENT_SCOPE)
get_filename_component(_file_to_path ${_file_to} PATH)
@@ -1393,7 +1386,7 @@ function(find_python_package
NO_DEFAULT_PATH
)
if(NOT EXISTS "${PYTHON_${_upper_package}_PATH}")
if(NOT EXISTS "${PYTHON_${_upper_package}_PATH}")
message(WARNING
"Python package '${package}' path could not be found in:\n"
"'${PYTHON_LIBPATH}/python${PYTHON_VERSION}/site-packages/${package}', "

View File

@@ -26,7 +26,7 @@ if(EXISTS ${CMAKE_SOURCE_DIR}/.git/)
find_package(Git)
if(GIT_FOUND)
# message(STATUS "Found Git: ${GIT_EXECUTABLE}")
execute_process(COMMAND git rev-parse --short=12 HEAD
execute_process(COMMAND git rev-parse --short HEAD
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE MY_WC_HASH
OUTPUT_STRIP_TRAILING_WHITESPACE

View File

@@ -16,6 +16,9 @@
#
# The Original Code is Copyright (C) 2016, Blender Foundation
# All rights reserved.
#
# Contributor(s): Sergey Sharybin.
#
# ***** END GPL LICENSE BLOCK *****
# Libraries configuration for Apple.
@@ -56,7 +59,7 @@ if(WITH_ALEMBIC)
set(ALEMBIC_FOUND ON)
endif()
if(WITH_OPENSUBDIV)
if(WITH_OPENSUBDIV OR WITH_CYCLES_OPENSUBDIV)
set(OPENSUBDIV ${LIBDIR}/opensubdiv)
set(OPENSUBDIV_LIBPATH ${OPENSUBDIV}/lib)
find_library(OSD_LIB_CPU NAMES osdCPU PATHS ${OPENSUBDIV_LIBPATH})
@@ -377,32 +380,13 @@ if(WITH_CYCLES_OSL)
endif()
endif()
if(WITH_CYCLES_EMBREE)
find_package(Embree 3.2.4 REQUIRED)
set(PLATFORM_LINKFLAGS "${PLATFORM_LINKFLAGS} -Xlinker -stack_size -Xlinker 0x100000")
endif()
# CMake FindOpenMP doesn't know about AppleClang before 3.12, so provide custom flags.
if(WITH_OPENMP)
if(CMAKE_C_COMPILER_ID MATCHES "AppleClang" AND CMAKE_C_COMPILER_VERSION VERSION_GREATER_EQUAL "7.0")
# Use OpenMP from our precompiled libraries.
message(STATUS "Using ${LIBDIR}/openmp for OpenMP")
set(OPENMP_CUSTOM ON)
set(OPENMP_FOUND ON)
set(OpenMP_C_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'")
set(OpenMP_CXX_FLAGS "-Xclang -fopenmp -I'${LIBDIR}/openmp/include'")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L'${LIBDIR}/openmp/lib' -lomp")
# Copy libomp.dylib to allow executables like datatoc to work.
if(CMAKE_MAKE_PROGRAM MATCHES "xcodebuild")
set(OPENMP_DYLIB_AUX_PATH "${CMAKE_BINARY_DIR}/bin")
else()
set(OPENMP_DYLIB_AUX_PATH "${CMAKE_BINARY_DIR}")
endif()
execute_process(
COMMAND mkdir -p ${OPENMP_DYLIB_AUX_PATH}/Resources/lib
COMMAND cp -p ${LIBDIR}/openmp/lib/libomp.dylib ${OPENMP_DYLIB_AUX_PATH}/Resources/lib/libomp.dylib)
execute_process(COMMAND ${CMAKE_C_COMPILER} --version OUTPUT_VARIABLE COMPILER_VENDOR)
string(SUBSTRING "${COMPILER_VENDOR}" 0 5 VENDOR_NAME) # truncate output
if(${VENDOR_NAME} MATCHES "Apple") # Apple does not support OpenMP reliable with gcc and not with clang
set(WITH_OPENMP OFF)
else() # vanilla gcc or clang_omp support OpenMP
message(STATUS "Using special OpenMP enabled compiler !") # letting find_package(OpenMP) module work for gcc
endif()
endif()

View File

@@ -16,6 +16,9 @@
#
# The Original Code is Copyright (C) 2016, Blender Foundation
# All rights reserved.
#
# Contributor(s): Jacques Beaurain.
#
# ***** END GPL LICENSE BLOCK *****
# Xcode and system configuration for Apple.

View File

@@ -16,6 +16,9 @@
#
# The Original Code is Copyright (C) 2016, Blender Foundation
# All rights reserved.
#
# Contributor(s): Sergey Sharybin.
#
# ***** END GPL LICENSE BLOCK *****
# Libraries configuration for any *nix system including Linux and Unix.
@@ -39,10 +42,6 @@ if(EXISTS ${LIBDIR})
set(WITH_OPENMP_STATIC ON)
endif()
if(WITH_STATIC_LIBS)
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libstdc++")
endif()
# Wrapper to prefer static libraries
macro(find_package_wrapper)
if(WITH_STATIC_LIBS)
@@ -246,17 +245,13 @@ if(WITH_OPENVDB)
find_package_wrapper(OpenVDB)
find_package_wrapper(TBB)
find_package_wrapper(Blosc)
if(NOT TBB_FOUND)
set(WITH_OPENVDB OFF)
set(WITH_OPENVDB_BLOSC OFF)
message(STATUS "TBB not found, disabling OpenVDB")
elseif(NOT OPENVDB_FOUND)
if(NOT OPENVDB_FOUND OR NOT TBB_FOUND)
set(WITH_OPENVDB OFF)
set(WITH_OPENVDB_BLOSC OFF)
message(STATUS "OpenVDB not found, disabling it")
elseif(NOT BLOSC_FOUND)
set(WITH_OPENVDB_BLOSC OFF)
message(STATUS "Blosc not found, disabling it for OpenVBD")
message(STATUS "Blosc not found, disabling it")
endif()
endif()
@@ -364,10 +359,6 @@ if(WITH_OPENCOLORIO)
endif()
endif()
if(WITH_CYCLES_EMBREE)
find_package(Embree 3.2.4 REQUIRED)
endif()
if(WITH_LLVM)
if(EXISTS ${LIBDIR})
set(LLVM_STATIC ON)
@@ -395,7 +386,7 @@ if(WITH_LLVM OR WITH_SDL_DYNLOAD)
)
endif()
if(WITH_OPENSUBDIV)
if(WITH_OPENSUBDIV OR WITH_CYCLES_OPENSUBDIV)
find_package_wrapper(OpenSubdiv)
set(OPENSUBDIV_LIBRARIES ${OPENSUBDIV_LIBRARIES})
@@ -403,6 +394,7 @@ if(WITH_OPENSUBDIV)
if(NOT OPENSUBDIV_FOUND)
set(WITH_OPENSUBDIV OFF)
set(WITH_CYCLES_OPENSUBDIV OFF)
message(STATUS "OpenSubdiv not found")
endif()
endif()

View File

@@ -16,6 +16,9 @@
#
# The Original Code is Copyright (C) 2016, Blender Foundation
# All rights reserved.
#
# Contributor(s): Sergey Sharybin.
#
# ***** END GPL LICENSE BLOCK *****
# Libraries configuration for Windows.
@@ -170,10 +173,7 @@ if(NOT DEFINED LIBDIR)
set(LIBDIR_BASE "windows")
endif()
# Can be 1910..1912
if(MSVC_VERSION GREATER 1919)
message(STATUS "Visual Studio 2019 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc14)
elseif(MSVC_VERSION GREATER 1909)
if(MSVC_VERSION GREATER 1909)
message(STATUS "Visual Studio 2017 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc14)
elseif(MSVC_VERSION EQUAL 1900)
@@ -225,7 +225,7 @@ if(NOT JPEG_FOUND)
endif()
set(PTHREADS_INCLUDE_DIRS ${LIBDIR}/pthreads/include)
set(PTHREADS_LIBRARIES ${LIBDIR}/pthreads/lib/pthreadVC3.lib)
set(PTHREADS_LIBRARIES ${LIBDIR}/pthreads/lib/pthreadVC2.lib)
set(FREETYPE ${LIBDIR}/freetype)
set(FREETYPE_INCLUDE_DIRS
@@ -502,7 +502,7 @@ if(WITH_IMAGE_OPENJPEG)
set(OPENJPEG_LIBRARIES ${OPENJPEG}/lib/openjp2.lib)
endif()
if(WITH_OPENSUBDIV)
if(WITH_OPENSUBDIV OR WITH_CYCLES_OPENSUBDIV)
set(OPENSUBDIV_INCLUDE_DIR ${LIBDIR}/opensubdiv/include)
set(OPENSUBDIV_LIBPATH ${LIBDIR}/opensubdiv/lib)
set(OPENSUBDIV_LIBRARIES
@@ -606,67 +606,3 @@ if(WITH_CYCLES_OSL)
set(WITH_CYCLES_OSL OFF)
endif()
endif()
if(WITH_CYCLES_EMBREE)
windows_find_package(Embree)
if(NOT EMBREE_FOUND)
set(EMBREE_INCLUDE_DIRS ${LIBDIR}/embree/include)
set(EMBREE_LIBRARIES
optimized ${LIBDIR}/embree/lib/embree3.lib
optimized ${LIBDIR}/embree/lib/embree_avx2.lib
optimized ${LIBDIR}/embree/lib/embree_avx.lib
optimized ${LIBDIR}/embree/lib/embree_sse42.lib
optimized ${LIBDIR}/embree/lib/lexers.lib
optimized ${LIBDIR}/embree/lib/math.lib
optimized ${LIBDIR}/embree/lib/simd.lib
optimized ${LIBDIR}/embree/lib/sys.lib
optimized ${LIBDIR}/embree/lib/tasking.lib
debug ${LIBDIR}/embree/lib/embree3_d.lib
debug ${LIBDIR}/embree/lib/embree_avx2_d.lib
debug ${LIBDIR}/embree/lib/embree_avx_d.lib
debug ${LIBDIR}/embree/lib/embree_sse42_d.lib
debug ${LIBDIR}/embree/lib/lexers_d.lib
debug ${LIBDIR}/embree/lib/math_d.lib
debug ${LIBDIR}/embree/lib/simd_d.lib
debug ${LIBDIR}/embree/lib/sys_d.lib
debug ${LIBDIR}/embree/lib/tasking_d.lib)
endif()
endif()
if (WINDOWS_PYTHON_DEBUG)
# Include the system scripts in the blender_python_system_scripts project.
FILE(GLOB_RECURSE inFiles "${CMAKE_SOURCE_DIR}/release/scripts/*.*" )
ADD_CUSTOM_TARGET(blender_python_system_scripts SOURCES ${inFiles})
foreach(_source IN ITEMS ${inFiles})
get_filename_component(_source_path "${_source}" PATH)
string(REPLACE "${CMAKE_SOURCE_DIR}/release/scripts/" "" _source_path "${_source_path}")
string(REPLACE "/" "\\" _group_path "${_source_path}")
source_group("${_group_path}" FILES "${_source}")
endforeach()
# Include the user scripts from the profile folder in the blender_python_user_scripts project.
set(USER_SCRIPTS_ROOT "$ENV{appdata}/blender foundation/blender/${BLENDER_VERSION}")
file(TO_CMAKE_PATH ${USER_SCRIPTS_ROOT} USER_SCRIPTS_ROOT)
FILE(GLOB_RECURSE inFiles "${USER_SCRIPTS_ROOT}/scripts/*.*" )
ADD_CUSTOM_TARGET(blender_python_user_scripts SOURCES ${inFiles})
foreach(_source IN ITEMS ${inFiles})
get_filename_component(_source_path "${_source}" PATH)
string(REPLACE "${USER_SCRIPTS_ROOT}/scripts" "" _source_path "${_source_path}")
string(REPLACE "/" "\\" _group_path "${_source_path}")
source_group("${_group_path}" FILES "${_source}")
endforeach()
set_target_properties(blender_python_system_scripts PROPERTIES FOLDER "scripts")
set_target_properties(blender_python_user_scripts PROPERTIES FOLDER "scripts")
# Set the default debugging options for the project, only write this file once so the user
# is free to override them at their own perril.
set(USER_PROPS_FILE "${CMAKE_CURRENT_BINARY_DIR}/source/creator/blender.Cpp.user.props")
if(NOT EXISTS ${USER_PROPS_FILE})
# Layout below is messy, because otherwise the generated file will look messy.
file(WRITE ${USER_PROPS_FILE} "<?xml version=\"1.0\" encoding=\"utf-8\"?>
<Project DefaultTargets=\"Build\" xmlns=\"http://schemas.microsoft.com/developer/msbuild/2003\">
<PropertyGroup>
<LocalDebuggerCommandArguments>-con --env-system-scripts \"${CMAKE_SOURCE_DIR}/release/scripts\" </LocalDebuggerCommandArguments>
</PropertyGroup>
</Project>")
endif()
endif()

View File

@@ -16,6 +16,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton, M.G. Kishalmi
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>

View File

@@ -14,6 +14,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>

View File

@@ -29,7 +29,7 @@ try:
os.remove(package_archive)
if os.path.exists(package_dir):
shutil.rmtree(package_dir)
except Exception as ex:
except Exception, ex:
sys.stderr.write('Failed to clean up old package files: ' + str(ex) + '\n')
sys.exit(1)
@@ -40,7 +40,7 @@ try:
for f in os.listdir(package_dir):
if f.startswith('makes'):
os.remove(os.path.join(package_dir, f))
except Exception as ex:
except Exception, ex:
sys.stderr.write('Failed to copy install directory: ' + str(ex) + '\n')
sys.exit(1)
@@ -58,13 +58,13 @@ try:
sys.exit(-1)
subprocess.call(archive_cmd)
except Exception as ex:
except Exception, ex:
sys.stderr.write('Failed to create package archive: ' + str(ex) + '\n')
sys.exit(1)
# empty temporary package dir
try:
shutil.rmtree(package_dir)
except Exception as ex:
except Exception, ex:
sys.stderr.write('Failed to clean up package directory: ' + str(ex) + '\n')
sys.exit(1)

View File

@@ -3,9 +3,6 @@ echo No explicit msvc version requested, autodetecting version.
call "%~dp0\detect_msvc2017.cmd"
if %ERRORLEVEL% EQU 0 goto DetectionComplete
call "%~dp0\detect_msvc2019.cmd"
if %ERRORLEVEL% EQU 0 goto DetectionComplete
call "%~dp0\detect_msvc2015.cmd"
if %ERRORLEVEL% EQU 0 goto DetectionComplete

View File

@@ -1,6 +1,5 @@
if "%BUILD_VS_YEAR%"=="2015" set BUILD_VS_LIBDIRPOST=vc14
if "%BUILD_VS_YEAR%"=="2017" set BUILD_VS_LIBDIRPOST=vc14
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc14
if "%BUILD_ARCH%"=="x64" (
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%

View File

@@ -1,4 +1,4 @@
if NOT exist "%BLENDER_DIR%\source\tools\.git" (
if NOT exist "%BLENDER_DIR%/source/tools" (
echo Checking out sub-modules
if not "%GIT%" == "" (
"%GIT%" submodule update --init --recursive --progress

View File

@@ -1,5 +1,3 @@
set BUILD_GENERATOR_POST=
set BUILD_PLATFORM_SELECT=
if "%BUILD_ARCH%"=="x64" (
set MSBUILD_PLATFORM=x64
) else if "%BUILD_ARCH%"=="x86" (
@@ -11,7 +9,7 @@ if "%BUILD_ARCH%"=="x64" (
)
if "%WITH_CLANG%"=="1" (
set CLANG_CMAKE_ARGS=-T"llvm"
set CLANG_CMAKE_ARGS=-T"LLVM-vs2017"
if "%WITH_ASAN%"=="1" (
set ASAN_CMAKE_ARGS=-DWITH_COMPILER_ASAN=On
)
@@ -21,18 +19,7 @@ if "%WITH_CLANG%"=="1" (
exit /b 1
)
)
if "%WITH_PYDEBUG%"=="1" (
set PYDEBUG_CMAKE_ARGS=-DWINDOWS_PYTHON_DEBUG=On
)
if "%BUILD_VS_YEAR%"=="2019" (
set BUILD_PLATFORM_SELECT=-A %MSBUILD_PLATFORM%
) else (
set BUILD_GENERATOR_POST=%WINDOWS_ARCH%
)
set BUILD_CMAKE_ARGS=%BUILD_CMAKE_ARGS% -G "Visual Studio %BUILD_VS_VER% %BUILD_VS_YEAR%%BUILD_GENERATOR_POST%" %BUILD_PLATFORM_SELECT% %TESTS_CMAKE_ARGS% %CLANG_CMAKE_ARGS% %ASAN_CMAKE_ARGS% %PYDEBUG_CMAKE_ARGS%
set BUILD_CMAKE_ARGS=%BUILD_CMAKE_ARGS% -G "Visual Studio %BUILD_VS_VER% %BUILD_VS_YEAR%%WINDOWS_ARCH%" %TESTS_CMAKE_ARGS% %CLANG_CMAKE_ARGS% %ASAN_CMAKE_ARGS%
if NOT EXIST %BUILD_DIR%\nul (
mkdir %BUILD_DIR%
@@ -54,15 +41,15 @@ if "%NOBUILD%"=="1" set MUST_CONFIGURE=1
if "%MUST_CONFIGURE%"=="1" (
if NOT "%verbose%" == "" (
echo "%CMAKE% %BUILD_CMAKE_ARGS% -H%BLENDER_DIR% -B%BUILD_DIR%"
echo %CMAKE% %BUILD_CMAKE_ARGS% -H%BLENDER_DIR% -B%BUILD_DIR%
)
cmake ^
%BUILD_CMAKE_ARGS% ^
-H%BLENDER_DIR% ^
-B%BUILD_DIR%
if errorlevel 1 (
if %ERRORLEVEL% NEQ 0 (
echo "Configuration Failed"
exit /b 1
)

View File

@@ -1,3 +1,76 @@
if NOT "%verbose%" == "" (
echo Detecting msvc 2017
)
set BUILD_VS_VER=15
set BUILD_VS_YEAR=2017
call "%~dp0\detect_msvc_vswhere.cmd"
set ProgramFilesX86=%ProgramFiles(x86)%
if not exist "%ProgramFilesX86%" set ProgramFilesX86=%ProgramFiles%
set vs_where=%ProgramFilesX86%\Microsoft Visual Studio\Installer\vswhere.exe
if not exist "%vs_where%" (
if NOT "%verbose%" == "" (
echo Visual Studio 2017 ^(15.2 or newer^) is not detected
)
goto FAIL
)
if NOT "%verbose%" == "" (
echo "%vs_where%" -latest %VSWHERE_ARGS% -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64`
)
for /f "usebackq tokens=1* delims=: " %%i in (`"%vs_where%" -latest %VSWHERE_ARGS% -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64`) do (
if /i "%%i"=="installationPath" set VS_InstallDir=%%j
)
if "%VS_InstallDir%"=="" (
if NOT "%verbose%" == "" (
echo Visual Studio is detected but the "Desktop development with C++" workload has not been instlled
goto FAIL
)
)
set VCVARS=%VS_InstallDir%\VC\Auxiliary\Build\vcvarsall.bat
if exist "%VCVARS%" (
call "%VCVARS%" %BUILD_ARCH%
) else (
if NOT "%verbose%" == "" (
echo "%VCVARS%" not found
)
goto FAIL
)
rem try msbuild
msbuild /version > NUL
if errorlevel 1 (
if NOT "%verbose%" == "" (
echo Visual Studio %BUILD_VS_YEAR% msbuild not found
)
goto FAIL
)
if NOT "%verbose%" == "" (
echo Visual Studio %BUILD_VS_YEAR% msbuild found
)
REM try the c++ compiler
cl 2> NUL 1>&2
if errorlevel 1 (
if NOT "%verbose%" == "" (
echo Visual Studio %BUILD_VS_YEAR% C/C++ Compiler not found
)
goto FAIL
)
if NOT "%verbose%" == "" (
echo Visual Studio %BUILD_VS_YEAR% C/C++ Compiler found
)
if NOT "%verbose%" == "" (
echo Visual Studio 2017 is detected successfully
)
goto EOF
:FAIL
exit /b 1
:EOF

View File

@@ -1,3 +0,0 @@
set BUILD_VS_VER=16
set BUILD_VS_YEAR=2019
call "%~dp0\detect_msvc_vswhere.cmd"

View File

@@ -1,79 +0,0 @@
if NOT "%verbose%" == "" (
echo Detecting msvc %BUILD_VS_YEAR%
)
set ProgramFilesX86=%ProgramFiles(x86)%
if not exist "%ProgramFilesX86%" set ProgramFilesX86=%ProgramFiles%
set vs_where=%ProgramFilesX86%\Microsoft Visual Studio\Installer\vswhere.exe
if not exist "%vs_where%" (
if NOT "%verbose%" == "" (
echo Visual Studio %BUILD_VS_YEAR% is not detected
)
goto FAIL
)
if NOT "%verbose%" == "" (
echo "%vs_where%" -latest %VSWHERE_ARGS% -version ^[%BUILD_VS_VER%.0^,%BUILD_VS_VER%.99^) -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64
)
for /f "usebackq tokens=1* delims=: " %%i in (`"%vs_where%" -latest -version ^[%BUILD_VS_VER%.0^,%BUILD_VS_VER%.99^) %VSWHERE_ARGS% -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64`) do (
if /i "%%i"=="installationPath" set VS_InstallDir=%%j
)
if NOT "%verbose%" == "" (
echo VS_Installdir="%VS_InstallDir%"
)
if "%VS_InstallDir%"=="" (
if NOT "%verbose%" == "" (
echo Visual Studio is detected but the "Desktop development with C++" workload has not been instlled
goto FAIL
)
)
set VCVARS=%VS_InstallDir%\VC\Auxiliary\Build\vcvarsall.bat
if exist "%VCVARS%" (
call "%VCVARS%" %BUILD_ARCH%
) else (
if NOT "%verbose%" == "" (
echo "%VCVARS%" not found
)
goto FAIL
)
rem try msbuild
msbuild /version > NUL
if errorlevel 1 (
if NOT "%verbose%" == "" (
echo Visual Studio %BUILD_VS_YEAR% msbuild not found
)
goto FAIL
)
if NOT "%verbose%" == "" (
echo Visual Studio %BUILD_VS_YEAR% msbuild found
)
REM try the c++ compiler
cl 2> NUL 1>&2
if errorlevel 1 (
if NOT "%verbose%" == "" (
echo Visual Studio %BUILD_VS_YEAR% C/C++ Compiler not found
)
goto FAIL
)
if NOT "%verbose%" == "" (
echo Visual Studio %BUILD_VS_YEAR% C/C++ Compiler found
)
if NOT "%verbose%" == "" (
echo Visual Studio %BUILD_VS_YEAR% is detected successfully
)
goto EOF
:FAIL
exit /b 1
:EOF

View File

@@ -50,17 +50,10 @@ if NOT "%1" == "" (
) else if "%1" == "2017pre" (
set BUILD_VS_YEAR=2017
set VSWHERE_ARGS=-prerelease
set BUILD_VS_YEAR=2017
) else if "%1" == "2017b" (
set BUILD_VS_YEAR=2017
set VSWHERE_ARGS=-products Microsoft.VisualStudio.Product.BuildTools
) else if "%1" == "2019" (
set BUILD_VS_YEAR=2019
) else if "%1" == "2019pre" (
set BUILD_VS_YEAR=2019
set VSWHERE_ARGS=-prerelease
) else if "%1" == "2019b" (
set BUILD_VS_YEAR=2019
set VSWHERE_ARGS=-products Microsoft.VisualStudio.Product.BuildTools
) else if "%1" == "2015" (
set BUILD_VS_YEAR=2015
) else if "%1" == "packagename" (
@@ -68,8 +61,6 @@ if NOT "%1" == "" (
shift /1
) else if "%1" == "nobuild" (
set NOBUILD=1
) else if "%1" == "pydebug" (
set WITH_PYDEBUG=1
) else if "%1" == "showhash" (
SET BUILD_SHOW_HASHES=1
REM Non-Build Commands

View File

@@ -25,5 +25,3 @@ set WITH_CLANG=
set WITH_ASAN=
set CLANG_CMAKE_ARGS=
set ASAN_CMAKE_ARGS=
set WITH_PYDEBUG=
set PYDEBUG_CMAKE_ARGS=

View File

@@ -13,4 +13,4 @@ if "%GIT%" == "" (
"%GIT%" pull --rebase
"%GIT%" submodule foreach git pull --rebase origin master
:EOF
:EOF

View File

@@ -1,4 +1,4 @@
# Doxyfile 1.8.15
# Doxyfile 1.8.11
# This file describes the settings to be used by the documentation system
# doxygen (www.doxygen.org) for a project.
@@ -17,11 +17,11 @@
# Project related configuration options
#---------------------------------------------------------------------------
# This tag specifies the encoding used for all characters in the configuration
# file that follow. The default is UTF-8 which is also the encoding used for all
# text before the first occurrence of this tag. Doxygen uses libiconv (or the
# iconv built into libc) for the transcoding. See
# https://www.gnu.org/software/libiconv/ for the list of possible encodings.
# This tag specifies the encoding used for all characters in the config file
# that follow. The default is UTF-8 which is also the encoding used for all text
# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
# for the list of possible encodings.
# The default value is: UTF-8.
DOXYFILE_ENCODING = UTF-8
@@ -93,14 +93,6 @@ ALLOW_UNICODE_NAMES = NO
OUTPUT_LANGUAGE = English
# The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all
# documentation generated by doxygen is written. Doxygen will use this
# information to generate all generated output in the proper direction.
# Possible values are: None, LTR, RTL and Context.
# The default value is: None.
OUTPUT_TEXT_DIRECTION = None
# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
# descriptions after the members that are listed in the file and class
# documentation (similar to Javadoc). Set to NO to disable this.
@@ -244,12 +236,7 @@ TAB_SIZE = 4
# will allow you to put the command \sideeffect (or @sideeffect) in the
# documentation, which will result in a user-defined paragraph with heading
# "Side Effects:". You can put \n's in the value part of an alias to insert
# newlines (in the resulting output). You can put ^^ in the value part of an
# alias to insert a newline as if a physical newline was in the original file.
# When you need a literal { or } or , in the value part of an alias you have to
# escape them by means of a backslash (\), this can lead to conflicts with the
# commands \{ and \} for these it is advised to use the version @{ and @} or use
# a double escape (\\{ and \\})
# newlines.
ALIASES =
@@ -287,26 +274,17 @@ OPTIMIZE_FOR_FORTRAN = NO
OPTIMIZE_OUTPUT_VHDL = NO
# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice
# sources only. Doxygen will then generate output that is more tailored for that
# language. For instance, namespaces will be presented as modules, types will be
# separated into more groups, etc.
# The default value is: NO.
OPTIMIZE_OUTPUT_SLICE = NO
# Doxygen selects the parser to use depending on the extension of the files it
# parses. With this tag you can assign which parser to use for a given
# extension. Doxygen has a built-in mapping, but you can override or extend it
# using this tag. The format is ext=language, where ext is a file extension, and
# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice,
# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
# tries to guess whether the code is fixed or free formatted code, this is the
# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat
# .inc files as Fortran files (default is PHP), and .f files as C (default is
# Fortran), use: inc=Fortran f=C.
# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran:
# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran:
# Fortran. In the later case the parser tries to guess whether the code is fixed
# or free formatted code, this is the default for Fortran type files), VHDL. For
# instance to make doxygen treat .inc files as Fortran files (default is PHP),
# and .f files as C (default is Fortran), use: inc=Fortran f=C.
#
# Note: For files without extension you can use no_extension as a placeholder.
#
@@ -317,7 +295,7 @@ EXTENSION_MAPPING =
# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
# according to the Markdown format, which allows for more readable
# documentation. See https://daringfireball.net/projects/markdown/ for details.
# documentation. See http://daringfireball.net/projects/markdown/ for details.
# The output of markdown processing is further processed by doxygen, so you can
# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
# case of backward compatibilities issues.
@@ -325,15 +303,6 @@ EXTENSION_MAPPING =
MARKDOWN_SUPPORT = YES
# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up
# to that level are automatically included in the table of contents, even if
# they do not have an id attribute.
# Note: This feature currently applies only to Markdown headings.
# Minimum value: 0, maximum value: 99, default value: 0.
# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
TOC_INCLUDE_HEADINGS = 0
# When enabled doxygen tries to link words that correspond to documented
# classes, or namespaces to their corresponding documentation. Such a link can
# be prevented in individual cases by putting a % sign in front of the word or
@@ -359,7 +328,7 @@ BUILTIN_STL_SUPPORT = NO
CPP_CLI_SUPPORT = NO
# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen
# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
# will parse them like normal C++ but will assume all classes use public instead
# of private inheritance when no explicit protection keyword is present.
# The default value is: NO.
@@ -762,7 +731,7 @@ WARNINGS = YES
# will automatically be disabled.
# The default value is: YES.
WARN_IF_UNDOCUMENTED = NO
WARN_IF_UNDOCUMENTED = YES
# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
# potential errors in the documentation, such as not documenting some parameters
@@ -775,8 +744,7 @@ WARN_IF_DOC_ERROR = YES
# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
# are documented, but have no documentation for their parameters or return
# value. If set to NO, doxygen will only warn about wrong or incomplete
# parameter documentation, but not about the absence of documentation. If
# EXTRACT_ALL is set to YES then this flag will automatically be disabled.
# parameter documentation, but not about the absence of documentation.
# The default value is: NO.
WARN_NO_PARAMDOC = NO
@@ -824,7 +792,7 @@ INPUT = doxygen.main.h \
# This tag can be used to specify the character encoding of the source files
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
# documentation (see: https://www.gnu.org/software/libiconv/) for the list of
# documentation (see: http://www.gnu.org/software/libiconv) for the list of
# possible encodings.
# The default value is: UTF-8.
@@ -841,8 +809,8 @@ INPUT_ENCODING = UTF-8
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice.
# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f, *.for, *.tcl,
# *.vhd, *.vhdl, *.ucf, *.qsf, *.as and *.js.
FILE_PATTERNS =
@@ -859,7 +827,7 @@ RECURSIVE = YES
# Note that relative paths are relative to the directory from which doxygen is
# run.
EXCLUDE = ../../build_files \
EXCLUDE = ../../build_files, \
../../release
# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
@@ -1000,7 +968,7 @@ INLINE_SOURCES = NO
STRIP_CODE_COMMENTS = YES
# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
# entity all documented functions referencing it will be listed.
# function all documented functions referencing it will be listed.
# The default value is: NO.
REFERENCED_BY_RELATION = YES
@@ -1032,12 +1000,12 @@ SOURCE_TOOLTIPS = YES
# If the USE_HTAGS tag is set to YES then the references to source code will
# point to the HTML generated by the htags(1) tool instead of doxygen built-in
# source browser. The htags tool is part of GNU's global source tagging system
# (see https://www.gnu.org/software/global/global.html). You will need version
# (see http://www.gnu.org/software/global/global.html). You will need version
# 4.8.6 or higher.
#
# To use it do the following:
# - Install the latest version of global
# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file
# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
# - Make sure the INPUT points to the root of the source tree
# - Run doxygen as normal
#
@@ -1213,17 +1181,6 @@ HTML_COLORSTYLE_GAMMA = 79
HTML_TIMESTAMP = YES
# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
# documentation will contain a main index with vertical navigation menus that
# are dynamically created via Javascript. If disabled, the navigation index will
# consists of multiple levels of tabs that are statically embedded in every HTML
# page. Disable this option to support browsers that do not have Javascript,
# like the Qt help browser.
# The default value is: YES.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_DYNAMIC_MENUS = YES
# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
# documentation will contain sections that can be hidden and shown after the
# page has loaded.
@@ -1247,13 +1204,13 @@ HTML_INDEX_NUM_ENTRIES = 100
# If the GENERATE_DOCSET tag is set to YES, additional index files will be
# generated that can be used as input for Apple's Xcode 3 integrated development
# environment (see: https://developer.apple.com/xcode/), introduced with OSX
# 10.5 (Leopard). To create a documentation set, doxygen will generate a
# environment (see: http://developer.apple.com/tools/xcode/), introduced with
# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
# Makefile in the HTML output directory. Running make will produce the docset in
# that directory and running make install will install the docset in
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
# genXcode/_index.html for more information.
# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
# for more information.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
@@ -1292,7 +1249,7 @@ DOCSET_PUBLISHER_NAME = Publisher
# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on
# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
# Windows.
#
# The HTML Help Workshop contains a compiler that can convert all HTML output
@@ -1368,7 +1325,7 @@ QCH_FILE =
# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
# Project output. For more information please see Qt Help Project / Namespace
# (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
# The default value is: org.doxygen.Project.
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1376,7 +1333,7 @@ QHP_NAMESPACE = org.doxygen.Project
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
# Help Project output. For more information please see Qt Help Project / Virtual
# Folders (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-
# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
# folders).
# The default value is: doc.
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1385,7 +1342,7 @@ QHP_VIRTUAL_FOLDER = doc
# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
# filter to add. For more information please see Qt Help Project / Custom
# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1393,7 +1350,7 @@ QHP_CUST_FILTER_NAME =
# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
# custom filter to add. For more information please see Qt Help Project / Custom
# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1401,7 +1358,7 @@ QHP_CUST_FILTER_ATTRS =
# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
# project's filter section matches. Qt Help Project / Filter Attributes (see:
# http://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes).
# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_SECT_FILTER_ATTRS =
@@ -1494,7 +1451,7 @@ EXT_LINKS_IN_WINDOW = NO
FORMULA_FONTSIZE = 10
# Use the FORMULA_TRANSPARENT tag to determine whether or not the images
# Use the FORMULA_TRANPARENT tag to determine whether or not the images
# generated for formulas are transparent PNGs. Transparent PNGs are not
# supported properly for IE 6.0, but are supported on all modern browsers.
#
@@ -1506,7 +1463,7 @@ FORMULA_FONTSIZE = 10
FORMULA_TRANSPARENT = YES
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
# https://www.mathjax.org) which uses client side Javascript for the rendering
# http://www.mathjax.org) which uses client side Javascript for the rendering
# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
# installed or if you want to formulas look prettier in the HTML output. When
# enabled you may also need to install MathJax separately and configure the path
@@ -1533,8 +1490,8 @@ MATHJAX_FORMAT = HTML-CSS
# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
# Content Delivery Network so you can quickly see the result without installing
# MathJax. However, it is strongly recommended to install a local copy of
# MathJax from https://www.mathjax.org before deployment.
# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/.
# MathJax from http://www.mathjax.org before deployment.
# The default value is: http://cdn.mathjax.org/mathjax/latest.
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_RELPATH = http://www.mathjax.org/mathjax
@@ -1595,7 +1552,7 @@ SERVER_BASED_SEARCH = NO
#
# Doxygen ships with an example indexer (doxyindexer) and search engine
# (doxysearch.cgi) which are based on the open source search engine library
# Xapian (see: https://xapian.org/).
# Xapian (see: http://xapian.org/).
#
# See the section "External Indexing and Searching" for details.
# The default value is: NO.
@@ -1608,7 +1565,7 @@ EXTERNAL_SEARCH = NO
#
# Doxygen ships with an example indexer (doxyindexer) and search engine
# (doxysearch.cgi) which are based on the open source search engine library
# Xapian (see: https://xapian.org/). See the section "External Indexing and
# Xapian (see: http://xapian.org/). See the section "External Indexing and
# Searching" for details.
# This tag requires that the tag SEARCHENGINE is set to YES.
@@ -1660,34 +1617,21 @@ LATEX_OUTPUT = latex
# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
# invoked.
#
# Note that when not enabling USE_PDFLATEX the default is latex when enabling
# USE_PDFLATEX the default is pdflatex and when in the later case latex is
# chosen this is overwritten by pdflatex. For specific output languages the
# default can have been set differently, this depends on the implementation of
# the output language.
# Note that when enabling USE_PDFLATEX this option is only used for generating
# bitmaps for formulas in the HTML output, but not in the Makefile that is
# written to the output directory.
# The default file is: latex.
# This tag requires that the tag GENERATE_LATEX is set to YES.
LATEX_CMD_NAME = latex
# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
# index for LaTeX.
# Note: This tag is used in the Makefile / make.bat.
# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file
# (.tex).
# The default file is: makeindex.
# This tag requires that the tag GENERATE_LATEX is set to YES.
MAKEINDEX_CMD_NAME = makeindex
# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to
# generate index for LaTeX.
# Note: This tag is used in the generated output file (.tex).
# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat.
# The default value is: \makeindex.
# This tag requires that the tag GENERATE_LATEX is set to YES.
LATEX_MAKEINDEX_CMD = \makeindex
# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
# documents. This may be useful for small projects and may help to save some
# trees in general.
@@ -1822,14 +1766,6 @@ LATEX_BIB_STYLE = plain
LATEX_TIMESTAMP = NO
# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute)
# path from which the emoji images will be read. If a relative path is entered,
# it will be relative to the LATEX_OUTPUT directory. If left blank the
# LATEX_OUTPUT directory will be used.
# This tag requires that the tag GENERATE_LATEX is set to YES.
LATEX_EMOJI_DIRECTORY =
#---------------------------------------------------------------------------
# Configuration options related to the RTF output
#---------------------------------------------------------------------------
@@ -1869,9 +1805,9 @@ COMPACT_RTF = NO
RTF_HYPERLINKS = NO
# Load stylesheet definitions from file. Syntax is similar to doxygen's
# configuration file, i.e. a series of assignments. You only have to provide
# replacements, missing definitions are set to their default value.
# Load stylesheet definitions from file. Syntax is similar to doxygen's config
# file, i.e. a series of assignments. You only have to provide replacements,
# missing definitions are set to their default value.
#
# See also section "Doxygen usage" for information on how to generate the
# default style sheet that doxygen normally uses.
@@ -1880,8 +1816,8 @@ RTF_HYPERLINKS = NO
RTF_STYLESHEET_FILE =
# Set optional variables used in the generation of an RTF document. Syntax is
# similar to doxygen's configuration file. A template extensions file can be
# generated using doxygen -e rtf extensionFile.
# similar to doxygen's config file. A template extensions file can be generated
# using doxygen -e rtf extensionFile.
# This tag requires that the tag GENERATE_RTF is set to YES.
RTF_EXTENSIONS_FILE =
@@ -1967,13 +1903,6 @@ XML_OUTPUT = xml
XML_PROGRAMLISTING = YES
# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include
# namespace members in file scope as well, matching the HTML output.
# The default value is: NO.
# This tag requires that the tag GENERATE_XML is set to YES.
XML_NS_MEMB_FILE_SCOPE = NO
#---------------------------------------------------------------------------
# Configuration options related to the DOCBOOK output
#---------------------------------------------------------------------------
@@ -2006,9 +1935,9 @@ DOCBOOK_PROGRAMLISTING = NO
#---------------------------------------------------------------------------
# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
# AutoGen Definitions (see http://autogen.sourceforge.net/) file that captures
# the structure of the code including all documentation. Note that this feature
# is still experimental and incomplete at the moment.
# AutoGen Definitions (see http://autogen.sf.net) file that captures the
# structure of the code including all documentation. Note that this feature is
# still experimental and incomplete at the moment.
# The default value is: NO.
GENERATE_AUTOGEN_DEF = NO
@@ -2431,11 +2360,6 @@ DIAFILE_DIRS =
PLANTUML_JAR_PATH =
# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a
# configuration file for plantuml.
PLANTUML_CFG_FILE =
# When using plantuml, the specified paths are searched for files specified by
# the !include statement in a plantuml block.

View File

@@ -1,4 +1,10 @@
/** \defgroup blenderplayer Blender Player */
/** \defgroup blc bad level calls
* \ingroup blenderplayer
*/
/** \defgroup render Rendering
* \ingroup blender
*/
@@ -23,14 +29,15 @@
* \ingroup python
*/
/** \defgroup blpluginapi Blender pluginapi
* \ingroup blender
* \attention not in use currently
*/
/* ================================ */
/** \defgroup blender Blender */
/** \defgroup balembic BlenderAlembic
* \ingroup blender
*/
/** \defgroup blt BlenTranslation
* \ingroup blender
*/
@@ -74,10 +81,6 @@
* \ingroup blender
*/
/** \defgroup shader_fx Shader Effects
* \ingroup blender
*/
/** \defgroup data DNA, RNA and .blend access*/
/** \defgroup gpu GPU

View File

@@ -65,7 +65,7 @@ fw('.TH "BLENDER" "1" "%s" "Blender %s"\n' % (date_string, blender_version.repla
fw('''
.SH NAME
blender \- a full-featured 3D application''')
blender \- a 3D modelling and rendering package''')
fw('''
.SH SYNOPSIS
@@ -76,9 +76,9 @@ fw('''
.SH DESCRIPTION
.PP
.B blender
is a full-featured 3D application. It supports the entirety of the 3D pipeline - modeling, rigging, animation, simulation, rendering, compositing, motion tracking, and video editing.
is a 3D modelling and rendering package. Originating as the in-house software of a high quality animation studio, Blender has proven to be an extremely fast and versatile design instrument. The software has a personal touch, offering a unique approach to the world of Three Dimensions.
Use Blender to create 3D images and animations, films and commercials, content for games, architectural and industrial visualizatons, and scientific visualizations.
Use Blender to create TV commercials, to make technical visualizations, business graphics, to create content for games, or design user interfaces. You can easy build and manage complex environments. The renderer is versatile and extremely fast. All basic animation principles (curves & keys) are well implemented.
http://www.blender.org''')

View File

@@ -1,10 +0,0 @@
"""
Run a Function in x Seconds
---------------------------
"""
import bpy
def in_5_seconds():
print("Hello World")
bpy.app.timers.register(in_5_seconds, first_interval=5)

View File

@@ -1,11 +0,0 @@
"""
Run a Function every x Seconds
------------------------------
"""
import bpy
def every_2_seconds():
print("Hello World")
return 2.0
bpy.app.timers.register(every_2_seconds)

View File

@@ -1,17 +0,0 @@
"""
Run a Function n times every x seconds
--------------------------------------
"""
import bpy
counter = 0
def run_10_times():
global counter
counter += 1
print(counter)
if counter == 10:
return None
return 0.1
bpy.app.timers.register(run_10_times)

View File

@@ -1,12 +0,0 @@
"""
Assign parameters to functions
------------------------------
"""
import bpy
import functools
def print_message(message):
print("Message:", message)
bpy.app.timers.register(functools.partial(print_message, "Hello"), first_interval=2.0)
bpy.app.timers.register(functools.partial(print_message, "World"), first_interval=3.0)

View File

@@ -1,25 +0,0 @@
"""
Use a Timer to react to events in another thread
------------------------------------------------
You should never modify Blender data at arbitrary points in time in separate threads.
However you can use a queue to collect all the actions that should be executed when Blender is in the right state again.
Pythons `queue.Queue` can be used here, because it implements the required locking semantics.
"""
import bpy
import queue
execution_queue = queue.Queue()
# This function can savely be called in another thread.
# The function will be executed when the timer runs the next time.
def run_in_main_thread(function):
execution_queue.put(function)
def execute_queued_functions():
while not execution_queue.empty():
function = execution_queue.get()
function()
return 1.0
bpy.app.timers.register(execute_queued_functions)

View File

@@ -50,8 +50,8 @@ class OBJECT_OT_addon_prefs_example(Operator):
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
preferences = context.preferences
addon_prefs = preferences.addons[__name__].preferences
user_preferences = context.user_preferences
addon_prefs = user_preferences.addons[__name__].preferences
info = ("Path: %s, Number: %d, Boolean %r" %
(addon_prefs.filepath, addon_prefs.number, addon_prefs.boolean))

View File

@@ -3,27 +3,26 @@ Basic Object Operations Example
+++++++++++++++++++++++++++++++
This script demonstrates basic operations on object like creating new
object, placing it into a view layer, selecting it and making it active.
object, placing it into scene, selecting it and making it active.
"""
import bpy
from mathutils import Matrix
view_layer = bpy.context.view_layer
scene = bpy.context.scene
# Create new light datablock.
light_data = bpy.data.lights.new(name="New Light", type='POINT')
# Create new lamp datablock
lamp_data = bpy.data.lamps.new(name="New Lamp", type='POINT')
# Create new object with our light datablock.
light_object = bpy.data.objects.new(name="New Light", object_data=light_data)
# Create new object with our lamp datablock
lamp_object = bpy.data.objects.new(name="New Lamp", object_data=lamp_data)
# Link light object to the active collection of current view layer,
# so that it'll appear in the current scene.
view_layer.active_layer_collection.collection.objects.link(light_object)
# Link lamp object to the scene so it'll appear in this scene
scene.objects.link(lamp_object)
# Place light to a specified location.
light_object.location = (5.0, 5.0, 5.0)
# Place lamp to a specified location
lamp_object.location = (5.0, 5.0, 5.0)
# And finally select it and make it active.
light_object.select_set(True)
view_layer.objects.active = light_object
# And finally select it make active
lamp_object.select = True
scene.objects.active = lamp_object

View File

@@ -1,122 +0,0 @@
"""
Geometry Batches
++++++++++++++++
Geometry is drawn in batches.
A batch contains the necessary data to perform the drawing.
That includes an obligatory *Vertex Buffer* and an optional *Index Buffer*,
each of which is described in more detail in the following sections.
A batch also defines a draw type.
Typical draw types are `POINTS`, `LINES` and `TRIS`.
The draw type determines how the data will be interpreted and drawn.
Vertex Buffers
++++++++++++++
A *Vertex Buffer Object* (VBO) (:class:`gpu.types.GPUVertBuf`)
is an array that contains the vertex attributes needed for drawing using a specific shader.
Typical vertex attributes are *location*, *normal*, *color*, and *uv*.
Every vertex buffer has a *Vertex Format* (:class:`gpu.types.GPUVertFormat`)
and a length corresponding to the number of vertices in the buffer.
A vertex format describes the attributes stored per vertex and their types.
The following code demonstrates the creation of a vertex buffer that contains 6 vertices.
For each vertex 2 attributes will be stored: The position and the normal.
.. code-block:: python
import gpu
vertex_positions = [(0, 0, 0), ...]
vertex_normals = [(0, 0, 1), ...]
fmt = gpu.types.GPUVertFormat()
fmt.attr_add(id="pos", comp_type='F32', len=3, fetch_mode='FLOAT')
fmt.attr_add(id="normal", comp_type='F32', len=3, fetch_mode='FLOAT')
vbo = gpu.types.GPUVertBuf(len=6, format=fmt)
vbo.attr_fill(id="pos", data=vertex_positions)
vbo.attr_fill(id="normal", data=vertex_normals)
This vertex buffer could be used to draw 6 points, 3 separate lines, 5 consecutive lines, 2 separate triangles, ...
E.g. in the case of lines, each two consecutive vertices define a line.
The type that will actually be drawn is determined when the batch is created later.
Index Buffers
+++++++++++++
Often triangles and lines share one or more vertices.
With only a vertex buffer one would have to store all attributes for the these vertices multiple times.
This is very inefficient because in a connected triangle mesh every vertex is used 6 times on average.
A more efficient approach would be to use an *Index Buffer* (IBO) (:class:`gpu.types.GPUIndexBuf`),
sometimes referred to as *Element Buffer*.
An *Index Buffer* is an array that references vertices based on their index in the vertex buffer.
For instance, to draw a rectangle composed of two triangles, one could use an index buffer.
.. code-block:: python
positions = (
(-1, 1), (1, 1),
(-1, -1), (1, -1))
indices = ((0, 1, 2), (2, 1, 3))
ibo = gpu.types.GPUIndexBuf(type='TRIS', seq=indices)
Here the first tuple in `indices` describes which vertices should be used for the first triangle
(same for the second tuple).
Note how the diagonal vertices 1 and 2 are shared between both triangles.
Shaders
+++++++
A shader is a program that runs on the GPU (written in GLSL in our case).
There are multiple types of shaders.
The most important ones are *Vertex Shaders* and *Fragment Shaders*.
Typically multiple shaders are linked together into a *Program*.
However, in the Blender Python API the term *Shader* refers to an OpenGL Program.
Every :class:`gpu.types.GPUShader` consists of a vertex shader, a fragment shader and an optional geometry shader.
For common drawing tasks there are some built-in shaders accessible from :class:`gpu.shader.from_builtin`
with an identifier such as `2D_UNIFORM_COLOR` or `3D_FLAT_COLOR`.
Every shader defines a set of attributes and uniforms that have to be set in order to use the shader.
Attributes are properties that are set using a vertex buffer and can be different for individual vertices.
Uniforms are properties that are constant per draw call.
They can be set using the `shader.uniform_*` functions after the shader has been bound.
Batch Creation
++++++++++++++
Batches can be creates by first manually creating VBOs and IBOs.
However, it is recommended to use the :class:`gpu_extras.batch.batch_for_shader` function.
It makes sure that all the vertex attributes necessary for a specific shader are provided.
Consequently, the shader has to be passed to the function as well.
When using this function one rarely has to care about the vertex format, VBOs and IBOs created in the background.
This is still something one should know when drawing stuff though.
Since batches can be drawn multiple times, they should be cached and reused whenever possible.
Offscreen Rendering
+++++++++++++++++++
What one can see on the screen after rendering is called the *Front Buffer*.
When draw calls are issued, batches are drawn on a *Back Buffer* that will only be displayed
when all drawing is done and the current back buffer will become the new front buffer.
Sometimes, one might want to draw the batches into a distinct buffer that could be used as
texture to display on another object or to be saved as image on disk.
This is called Offscreen Rendering.
In Blender Offscreen Rendering is done using the :class:`gpu.types.GPUOffScreen` type.
.. warning::
`GPUOffScreen` objects are bound to the OpenGL context they have been created in.
This means that once Blender discards this context (i.e. the window is closed),
the offscreen instance will be freed.
Examples
++++++++
To try these examples, just copy them into Blenders text editor and execute them.
To keep the examples relatively small, they just register a draw function that can't easily be removed anymore.
Blender has to be restarted in order to delete the draw handlers.
"""

View File

@@ -1,41 +0,0 @@
"""
Rendering the 3D View into a Texture
------------------------------------
The scene has to have a camera for this example to work.
You could also make this independent of a specific camera,
but Blender does not expose good functions to create view and projection matrices yet.
"""
import bpy
import bgl
import gpu
from gpu_extras.presets import draw_texture_2d
WIDTH = 512
HEIGHT = 256
offscreen = gpu.types.GPUOffScreen(WIDTH, HEIGHT)
def draw():
context = bpy.context
scene = context.scene
view_matrix = scene.camera.matrix_world.inverted()
projection_matrix = scene.camera.calc_matrix_camera(
context.depsgraph, x=WIDTH, y=HEIGHT)
offscreen.draw_view3d(
scene,
context.view_layer,
context.space_data,
context.region,
view_matrix,
projection_matrix)
bgl.glDisable(bgl.GL_DEPTH_TEST)
draw_texture_2d(offscreen.color_texture, (10, 10), WIDTH, HEIGHT)
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_PIXEL')

View File

@@ -1,65 +0,0 @@
"""
Custom Shader for dotted 3D Line
--------------------------------
In this example the arc length (distance to the first point on the line) is calculated in every vertex.
Between the vertex and fragment shader that value is automatically interpolated
for all points that will be visible on the screen.
In the fragment shader the ``sin`` of the arc length is calculated.
Based on the result a decision is made on whether the fragment should be drawn or not.
"""
import bpy
import gpu
from random import random
from mathutils import Vector
from gpu_extras.batch import batch_for_shader
vertex_shader = '''
uniform mat4 u_ViewProjectionMatrix;
in vec3 position;
in float arcLength;
out float v_ArcLength;
void main()
{
v_ArcLength = arcLength;
gl_Position = u_ViewProjectionMatrix * vec4(position, 1.0f);
}
'''
fragment_shader = '''
uniform float u_Scale;
in float v_ArcLength;
void main()
{
if (step(sin(v_ArcLength * u_Scale), 0.5) == 1) discard;
gl_FragColor = vec4(1.0);
}
'''
coords = [Vector((random(), random(), random())) * 5 for _ in range(5)]
arc_lengths = [0]
for a, b in zip(coords[:-1], coords[1:]):
arc_lengths.append(arc_lengths[-1] + (a - b).length)
shader = gpu.types.GPUShader(vertex_shader, fragment_shader)
batch = batch_for_shader(
shader, 'LINE_STRIP',
{"position": coords, "arcLength": arc_lengths},
)
def draw():
shader.bind()
matrix = bpy.context.region_data.perspective_matrix
shader.uniform_float("u_ViewProjectionMatrix", matrix)
shader.uniform_float("u_Scale", 10)
batch.draw(shader)
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_VIEW')

View File

@@ -1,20 +0,0 @@
"""
3D Lines with Single Color
--------------------------
"""
import bpy
import gpu
from gpu_extras.batch import batch_for_shader
coords = [(1, 1, 1), (-2, 0, 0), (-2, -1, 3), (0, 1, 1)]
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader(shader, 'LINES', {"pos": coords})
def draw():
shader.bind()
shader.uniform_float("color", (1, 1, 0, 1))
batch.draw(shader)
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_VIEW')

View File

@@ -1,46 +0,0 @@
"""
Triangle with Custom Shader
---------------------------
"""
import bpy
import gpu
from gpu_extras.batch import batch_for_shader
vertex_shader = '''
uniform mat4 viewProjectionMatrix;
in vec3 position;
out vec3 pos;
void main()
{
pos = position;
gl_Position = viewProjectionMatrix * vec4(position, 1.0f);
}
'''
fragment_shader = '''
uniform float brightness;
in vec3 pos;
void main()
{
gl_FragColor = vec4(pos * brightness, 1.0);
}
'''
coords = [(1, 1, 1), (2, 0, 0), (-2, -1, 3)]
shader = gpu.types.GPUShader(vertex_shader, fragment_shader)
batch = batch_for_shader(shader, 'TRIS', {"position": coords})
def draw():
shader.bind()
matrix = bpy.context.region_data.perspective_matrix
shader.uniform_float("viewProjectionMatrix", matrix)
shader.uniform_float("brightness", 0.5)
batch.draw(shader)
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_VIEW')

View File

@@ -1,30 +0,0 @@
"""
Wireframe Cube using Index Buffer
---------------------------------
"""
import bpy
import gpu
from gpu_extras.batch import batch_for_shader
coords = (
(-1, -1, -1), (+1, -1, -1),
(-1, +1, -1), (+1, +1, -1),
(-1, -1, +1), (+1, -1, +1),
(-1, +1, +1), (+1, +1, +1))
indices = (
(0, 1), (0, 2), (1, 3), (2, 3),
(4, 5), (4, 6), (5, 7), (6, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader(shader, 'LINES', {"pos": coords}, indices=indices)
def draw():
shader.bind()
shader.uniform_float("color", (1, 0, 0, 1))
batch.draw(shader)
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_VIEW')

View File

@@ -1,36 +0,0 @@
"""
Mesh with Random Vertex Colors
------------------------------
"""
import bpy
import gpu
import numpy as np
from random import random
from gpu_extras.batch import batch_for_shader
mesh = bpy.context.active_object.data
mesh.calc_loop_triangles()
vertices = np.empty((len(mesh.vertices), 3), 'f')
indices = np.empty((len(mesh.loop_triangles), 3), 'i')
mesh.vertices.foreach_get(
"co", np.reshape(vertices, len(mesh.vertices) * 3))
mesh.loop_triangles.foreach_get(
"vertices", np.reshape(indices, len(mesh.loop_triangles) * 3))
vertex_colors = [(random(), random(), random(), 1) for _ in range(len(mesh.vertices))]
shader = gpu.shader.from_builtin('3D_SMOOTH_COLOR')
batch = batch_for_shader(
shader, 'TRIS',
{"pos": vertices, "color": vertex_colors},
indices=indices,
)
def draw():
batch.draw(shader)
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_VIEW')

View File

@@ -1,26 +0,0 @@
"""
2D Rectangle
------------
"""
import bpy
import gpu
from gpu_extras.batch import batch_for_shader
vertices = (
(100, 100), (300, 100),
(100, 200), (300, 200))
indices = (
(0, 1, 2), (2, 1, 3))
shader = gpu.shader.from_builtin('2D_UNIFORM_COLOR')
batch = batch_for_shader(shader, 'TRIS', {"pos": vertices}, indices=indices)
def draw():
shader.bind()
shader.uniform_float("color", (0, 0.5, 0.5, 1.0))
batch.draw(shader)
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_PIXEL')

View File

@@ -1,37 +0,0 @@
"""
2D Image
--------
To use this example you have to provide an image that should be displayed.
"""
import bpy
import gpu
import bgl
from gpu_extras.batch import batch_for_shader
IMAGE_NAME = "Untitled"
image = bpy.data.images[IMAGE_NAME]
shader = gpu.shader.from_builtin('2D_IMAGE')
batch = batch_for_shader(
shader, 'TRI_FAN',
{
"pos": ((100, 100), (200, 100), (200, 200), (100, 200)),
"texCoord": ((0, 0), (1, 0), (1, 1), (0, 1)),
},
)
if image.gl_load():
raise Exception()
def draw():
bgl.glActiveTexture(bgl.GL_TEXTURE0)
bgl.glBindTexture(bgl.GL_TEXTURE_2D, image.bindcode)
shader.bind()
shader.uniform_int("image", 0)
batch.draw(shader)
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_PIXEL')

View File

@@ -1,86 +0,0 @@
"""
Generate a texture using Offscreen Rendering
--------------------------------------------
#. Create an :class:`gpu.types.GPUOffScreen` object.
#. Draw some circles into it.
#. Make a new shader for drawing a planar texture in 3D.
#. Draw the generated texture using the new shader.
"""
import bpy
import gpu
import bgl
from mathutils import Matrix
from gpu_extras.batch import batch_for_shader
from gpu_extras.presets import draw_circle_2d
# Create and fill offscreen
##########################################
offscreen = gpu.types.GPUOffScreen(512, 512)
with offscreen.bind():
bgl.glClear(bgl.GL_COLOR_BUFFER_BIT)
with gpu.matrix.push_pop():
# reset matrices -> use normalized device coordinates [-1, 1]
gpu.matrix.load_matrix(Matrix.Identity(4))
gpu.matrix.load_projection_matrix(Matrix.Identity(4))
amount = 10
for i in range(-amount, amount + 1):
x_pos = i / amount
draw_circle_2d((x_pos, 0.0), (1, 1, 1, 1), 0.5, 200)
# Drawing the generated texture in 3D space
#############################################
vertex_shader = '''
uniform mat4 modelMatrix;
uniform mat4 viewProjectionMatrix;
in vec2 position;
in vec2 uv;
out vec2 uvInterp;
void main()
{
uvInterp = uv;
gl_Position = viewProjectionMatrix * modelMatrix * vec4(position, 0.0, 1.0);
}
'''
fragment_shader = '''
uniform sampler2D image;
in vec2 uvInterp;
void main()
{
gl_FragColor = texture(image, uvInterp);
}
'''
shader = gpu.types.GPUShader(vertex_shader, fragment_shader)
batch = batch_for_shader(
shader, 'TRI_FAN',
{
"position": ((-1, -1), (1, -1), (1, 1), (-1, 1)),
"uv": ((0, 0), (1, 0), (1, 1), (0, 1)),
},
)
def draw():
bgl.glActiveTexture(bgl.GL_TEXTURE0)
bgl.glBindTexture(bgl.GL_TEXTURE_2D, offscreen.color_texture)
shader.bind()
shader.uniform_float("modelMatrix", Matrix.Translation((1, 2, 3)) @ Matrix.Scale(3, 4))
shader.uniform_float("viewProjectionMatrix", bpy.context.region_data.perspective_matrix)
shader.uniform_float("image", 0)
batch.draw(shader)
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_VIEW')

View File

@@ -1,50 +0,0 @@
"""
Copy Offscreen Rendering result back to RAM
-------------------------------------------
This will create a new image with the given name.
If it already exists, it will override the existing one.
Currently almost all of the execution time is spent in the last line.
In the future this will hopefully be solved by implementing the Python buffer protocol
for :class:`bgl.Buffer` and :class:`bpy.types.Image.pixels` (aka ``bpy_prop_array``).
"""
import bpy
import gpu
import bgl
import random
from mathutils import Matrix
from gpu_extras.presets import draw_circle_2d
IMAGE_NAME = "Generated Image"
WIDTH = 512
HEIGHT = 512
RING_AMOUNT = 10
offscreen = gpu.types.GPUOffScreen(WIDTH, HEIGHT)
with offscreen.bind():
bgl.glClear(bgl.GL_COLOR_BUFFER_BIT)
with gpu.matrix.push_pop():
# reset matrices -> use normalized device coordinates [-1, 1]
gpu.matrix.load_matrix(Matrix.Identity(4))
gpu.matrix.load_projection_matrix(Matrix.Identity(4))
for i in range(RING_AMOUNT):
draw_circle_2d(
(random.uniform(-1, 1), random.uniform(-1, 1)),
(1, 1, 1, 1), random.uniform(0.1, 1), 20)
buffer = bgl.Buffer(bgl.GL_BYTE, WIDTH * HEIGHT * 4)
bgl.glReadBuffer(bgl.GL_BACK)
bgl.glReadPixels(0, 0, WIDTH, HEIGHT, bgl.GL_RGBA, bgl.GL_UNSIGNED_BYTE, buffer)
offscreen.free()
if not IMAGE_NAME in bpy.data.images:
bpy.data.images.new(IMAGE_NAME, WIDTH, HEIGHT)
image = bpy.data.images[IMAGE_NAME]
image.scale(WIDTH, HEIGHT)
image.pixels = [v / 255 for v in buffer]

View File

@@ -1,36 +0,0 @@
"""
Built-in shaders
++++++++++++++++
All built-in shaders have the ``mat4 ModelViewProjectionMatrix`` uniform.
The value of it can only be modified using the :class:`gpu.matrix` module.
2D_UNIFORM_COLOR:
attributes: vec3 pos
uniforms: vec4 color
2D_FLAT_COLOR:
attributes: vec3 pos, vec4 color
uniforms: -
2D_SMOOTH_COLOR:
attributes: vec3 pos, vec4 color
uniforms: -
2D_IMAGE:
attributes: vec3 pos, vec2 texCoord
uniforms: sampler2D image
3D_UNIFORM_COLOR:
attributes: vec3 pos
uniforms: vec4 color
3D_FLAT_COLOR:
attributes: vec3 pos, vec4 color
uniforms: -
3D_SMOOTH_COLOR:
attributes: vec3 pos, vec4 color
uniforms: -
"""

View File

@@ -0,0 +1,264 @@
# Draws an off-screen buffer and display it in the corner of the view.
import bpy
import bgl
import gpu
import numpy as np
g_imageVertSrc = '''
in vec2 texCoord;
in vec2 pos;
out vec2 texCoord_interp;
void main()
{
gl_Position = vec4(pos.xy, 0.0f, 1.0);
gl_Position.z = 1.0f;
texCoord_interp = texCoord;
}
'''
g_imageFragSrc = '''
in vec2 texCoord_interp;
out vec4 fragColor;
uniform sampler2D image;
void main()
{
fragColor = texture(image, texCoord_interp);
}
'''
g_plane_vertices = np.array([
([-1.0, -1.0], [0.0, 0.0]),
([1.0, -1.0], [1.0, 0.0]),
([1.0, 1.0], [1.0, 1.0]),
([1.0, 1.0], [1.0, 1.0]),
([-1.0, 1.0], [0.0, 1.0]),
([-1.0, -1.0], [0.0, 0.0]),
], [('pos', 'f4', 2), ('uv', 'f4', 2)])
class VIEW3D_OT_draw_offscreen(bpy.types.Operator):
bl_idname = "view3d.offscreen_draw"
bl_label = "Viewport Offscreen Draw"
_handle_calc = None
_handle_draw = None
is_enabled = False
global_shader = None
batch_plane = None
uniform_image = -1
shader = None
# manage draw handler
@staticmethod
def draw_callback_px(self, context):
scene = context.scene
aspect_ratio = scene.render.resolution_x / scene.render.resolution_y
self._update_offscreen(context, self._offscreen)
self._opengl_draw(context, self._texture, aspect_ratio, 0.2)
@staticmethod
def handle_add(self, context):
VIEW3D_OT_draw_offscreen._handle_draw = bpy.types.SpaceView3D.draw_handler_add(
self.draw_callback_px, (self, context),
'WINDOW', 'POST_PIXEL',
)
@staticmethod
def handle_remove():
if VIEW3D_OT_draw_offscreen._handle_draw is not None:
bpy.types.SpaceView3D.draw_handler_remove(VIEW3D_OT_draw_offscreen._handle_draw, 'WINDOW')
VIEW3D_OT_draw_offscreen._handle_draw = None
# off-screen buffer
@staticmethod
def _setup_offscreen(context):
scene = context.scene
aspect_ratio = scene.render.resolution_x / scene.render.resolution_y
try:
offscreen = gpu.types.GPUOffScreen(512, int(512 / aspect_ratio))
except Exception as e:
print(e)
offscreen = None
return offscreen
@staticmethod
def _update_offscreen(context, offscreen):
scene = context.scene
view_layer = context.view_layer
render = scene.render
camera = scene.camera
modelview_matrix = camera.matrix_world.inverted()
projection_matrix = camera.calc_matrix_camera(
context.depsgraph,
x=render.resolution_x,
y=render.resolution_y,
scale_x=render.pixel_aspect_x,
scale_y=render.pixel_aspect_y,
)
offscreen.draw_view3d(
scene,
view_layer,
context.space_data,
context.region,
projection_matrix,
modelview_matrix,
)
def _opengl_draw(self, context, texture, aspect_ratio, scale):
"""
OpenGL code to draw a rectangle in the viewport
"""
# view setup
bgl.glDisable(bgl.GL_DEPTH_TEST)
viewport = bgl.Buffer(bgl.GL_INT, 4)
bgl.glGetIntegerv(bgl.GL_VIEWPORT, viewport)
active_texture = bgl.Buffer(bgl.GL_INT, 1)
bgl.glGetIntegerv(bgl.GL_TEXTURE_2D, active_texture)
width = int(scale * viewport[2])
height = int(width / aspect_ratio)
bgl.glViewport(viewport[0], viewport[1], width, height)
bgl.glScissor(viewport[0], viewport[1], width, height)
# draw routine
batch_plane = self.get_batch_plane()
shader = VIEW3D_OT_draw_offscreen.shader
# bind it so we can pass the new uniform values
shader.bind()
bgl.glEnable(bgl.GL_TEXTURE_2D)
bgl.glActiveTexture(bgl.GL_TEXTURE0)
bgl.glBindTexture(bgl.GL_TEXTURE_2D, texture)
shader.uniform_int(VIEW3D_OT_draw_offscreen.uniform_image, 0)
batch_plane.draw()
# restoring settings
bgl.glBindTexture(bgl.GL_TEXTURE_2D, active_texture[0])
bgl.glDisable(bgl.GL_TEXTURE_2D)
# reset view
bgl.glViewport(viewport[0], viewport[1], viewport[2], viewport[3])
bgl.glScissor(viewport[0], viewport[1], viewport[2], viewport[3])
def get_batch_plane(self):
if self.batch_plane is None:
global g_plane_vertices
format = gpu.types.GPUVertFormat()
pos_id = format.attr_add(
id="pos",
comp_type="F32",
len=2,
fetch_mode="FLOAT")
uv_id = format.attr_add(
id="texCoord",
comp_type="F32",
len=2,
fetch_mode="FLOAT")
vbo = gpu.types.GPUVertBuf(
len=len(g_plane_vertices),
format=format)
vbo.fill(id=pos_id, data=g_plane_vertices["pos"])
vbo.fill(id=uv_id, data=g_plane_vertices["uv"])
batch_plane = gpu.types.GPUBatch(type="TRIS", buf=vbo)
shader = self.global_shader
VIEW3D_OT_draw_offscreen.shader = shader
VIEW3D_OT_draw_offscreen.uniform_image = shader.uniform_from_name("image")
batch_plane.program_set(shader)
VIEW3D_OT_draw_offscreen.batch_plane = batch_plane
return VIEW3D_OT_draw_offscreen.batch_plane
# operator functions
@classmethod
def poll(cls, context):
return context.area.type == 'VIEW_3D'
def modal(self, context, event):
if context.area:
context.area.tag_redraw()
if event.type in {'RIGHTMOUSE', 'ESC'}:
self.cancel(context)
return {'CANCELLED'}
return {'PASS_THROUGH'}
def invoke(self, context, event):
if VIEW3D_OT_draw_offscreen.is_enabled:
self.cancel(context)
return {'FINISHED'}
else:
self._offscreen = VIEW3D_OT_draw_offscreen._setup_offscreen(context)
if self._offscreen:
self._texture = self._offscreen.color_texture
else:
self.report({'ERROR'}, "Error initializing offscreen buffer. More details in the console")
return {'CANCELLED'}
VIEW3D_OT_draw_offscreen.handle_add(self, context)
VIEW3D_OT_draw_offscreen.is_enabled = True
if context.area:
context.area.tag_redraw()
context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'}
def cancel(self, context):
VIEW3D_OT_draw_offscreen.handle_remove()
VIEW3D_OT_draw_offscreen.is_enabled = False
if VIEW3D_OT_draw_offscreen.batch_plane is not None:
del VIEW3D_OT_draw_offscreen.batch_plane
VIEW3D_OT_draw_offscreen.batch_plane = None
VIEW3D_OT_draw_offscreen.shader = None
if context.area:
context.area.tag_redraw()
def register():
shader = gpu.types.GPUShader(g_imageVertSrc, g_imageFragSrc)
VIEW3D_OT_draw_offscreen.global_shader = shader
bpy.utils.register_class(VIEW3D_OT_draw_offscreen)
def unregister():
bpy.utils.unregister_class(VIEW3D_OT_draw_offscreen)
VIEW3D_OT_draw_offscreen.global_shader = None
if __name__ == "__main__":
try:
unregister()
except RuntimeError:
pass
else:
if hasattr(bpy.types, "VIEW3D_OT_draw_offscreen"):
del bpy.types.VIEW3D_OT_draw_offscreen.global_shader
register()

View File

@@ -83,7 +83,7 @@ When writing scripts that operate on editmode data you will normally want to re-
running the script, this needs to be called explicitly.
The BMesh its self does not store the triangulated faces, they are stored in the :class:`bpy.types.Mesh`,
to refresh tessellation triangles call :class:`bpy.types.Mesh.calc_loop_triangles`.
to refresh tessellation faces call :class:`bpy.types.Mesh.calc_tessface`.
CustomData Access

View File

@@ -298,3 +298,4 @@ then copied :kbd:`Control-C`, usually to paste in the text editor or python cons
zooming the view for example isn't so useful to repeat so its excluded from the output.
To display *every* operator that runs see :ref:`Show All Operators <info_show_all_operators>`

View File

@@ -164,26 +164,26 @@ for list removal, but these are slower.
Sometimes its faster (but more memory hungry) to just rebuild the list.
Say you want to remove all triangular polygons in a list.
Say you want to remove all triangular faces in a list.
Rather than...
.. code-block:: python
polygons = mesh.polygons[:] # make a list copy of the meshes polygons
p_idx = len(polygons) # Loop backwards
while p_idx: # while the value is not 0
p_idx -= 1
faces = mesh.tessfaces[:] # make a list copy of the meshes faces
f_idx = len(faces) # Loop backwards
while f_idx: # while the value is not 0
f_idx -= 1
if len(polygons[p_idx].vertices) == 3:
polygons.pop(p_idx) # remove the triangle
if len(faces[f_idx].vertices) == 3:
faces.pop(f_idx) # remove the triangle
It's faster to build a new list with list comprehension.
.. code-block:: python
polygons = [p for p in mesh.polygons if len(p.vertices) != 3]
faces = [f for f in mesh.tessfaces if len(f.vertices) != 3]
Adding List Items
@@ -297,13 +297,13 @@ Here are 3 ways of joining multiple strings into one string for writing.
This also applies to any area of your code that involves a lot of string joining.
``String addition`` -
``String addition`` -
this is the slowest option, *don't use if you can help it, especially when writing data in a loop*.
>>> file.write(str1 + " " + str2 + " " + str3 + "\n")
``String formatting`` -
``String formatting`` -
use this when you are writing string data from floats and ints.
>>> file.write("%s %s %s\n" % (str1, str2, str3))
@@ -392,3 +392,4 @@ While developing a script it is good to time it to be aware of any changes in pe
# do something...
print("My Script Finished: %.4f sec" % (time.time() - time_start))

View File

@@ -173,25 +173,25 @@ In this situation you can...
.. _info_gotcha_mesh_faces:
N-Gons and Tessellation
=======================
N-Gons and Tessellation Faces
=============================
Since 2.63 NGons are supported, this adds some complexity
since in some cases you need to access triangles still (some exporters for example).
since in some cases you need to access triangles/quads still (some exporters for example).
There are now 3 ways to access faces:
- :class:`bpy.types.MeshPolygon` -
this is the data structure which now stores faces in object mode
(access as ``mesh.polygons`` rather than ``mesh.faces``).
- :class:`bpy.types.MeshLoopTriangle` -
the result of tessellating polygons into triangles
(access as ``mesh.loop_triangles``).
- :class:`bpy.types.MeshTessFace` -
the result of triangulating (tessellated) polygons,
the main method of face access in 2.62 or older (access as ``mesh.tessfaces``).
- :class:`bmesh.types.BMFace` -
the polygons as used in editmode.
For the purpose of the following documentation,
these will be referred to as polygons, loop triangles and bmesh-faces respectively.
these will be referred to as polygons, tessfaces and bmesh-faces respectively.
5+ sided faces will be referred to as ``ngons``.
@@ -205,8 +205,8 @@ Support Overview
* - Usage
- :class:`bpy.types.MeshPolygon`
- :class:`bpy.types.MeshTessFace`
- :class:`bmesh.types.BMFace`
- :class:`bpy.types.MeshTessFace`
- :class:`bmesh.types.BMFace`
* - Import/Create
- Poor *(inflexible)*
- Good *(supported as upgrade path)*
@@ -222,7 +222,7 @@ Support Overview
.. note::
Using the :mod:`bmesh` API is completely separate API from :mod:`bpy`,
Using the :mod:`bmesh` api is completely separate api from :mod:`bpy`,
typically you would would use one or the other based on the level of editing needed,
not simply for a different way to access faces.
@@ -233,9 +233,14 @@ Creating
All 3 datatypes can be used for face creation.
- polygons are the most efficient way to create faces but the data structure is _very_ rigid and inflexible,
you must have all your vertices and faces ready and create them all at once.
This is further complicated by the fact that each polygon does not store its own verts,
you must have all your vertes and faces ready and create them all at once.
This is further complicated by the fact that each polygon does not store its own verts (as with tessfaces),
rather they reference an index and size in :class:`bpy.types.Mesh.loops` which are a fixed array too.
- tessfaces ideally should not be used for creating faces since they are really only tessellation cache of polygons,
however for scripts upgrading from 2.62 this is by far the most straightforward option.
This works by creating tessfaces and when finished -
they can be converted into polygons by calling :class:`bpy.types.Mesh.update`.
The obvious limitation is ngons can't be created this way.
- bmesh-faces are most likely the easiest way for new scripts to create faces,
since faces can be added one by one and the api has features intended for mesh manipulation.
While :class:`bmesh.types.BMesh` uses more memory it can be managed by only operating on one mesh at a time.
@@ -266,6 +271,30 @@ the choice mostly depends on whether the target format supports ngons or not.
since using bmesh gives some overhead because its not the native storage format in object mode.
Upgrading Importers from 2.62
-----------------------------
Importers can be upgraded to work with only minor changes.
The main change to be made is used the tessellation versions of each attribute.
- mesh.faces --> :class:`bpy.types.Mesh.tessfaces`
- mesh.uv_textures --> :class:`bpy.types.Mesh.tessface_uv_textures`
- mesh.vertex_colors --> :class:`bpy.types.Mesh.tessface_vertex_colors`
Once the data is created call :class:`bpy.types.Mesh.update` to convert the tessfaces into polygons.
Upgrading Exporters from 2.62
-----------------------------
For exporters the most direct way to upgrade is to use tessfaces as with importing
however its important to know that tessfaces may **not** exist for a mesh,
the array will be empty as if there are no faces.
So before accessing tessface data call: :class:`bpy.types.Mesh.update` ``(calc_tessface=True)``.
EditBones, PoseBones, Bone... Bones
===================================
@@ -288,7 +317,7 @@ Example using :class:`bpy.types.EditBone` in armature editmode:
This is only possible in edit mode.
>>> bpy.context.object.data.edit_bones["Bone"].head = Vector((1.0, 2.0, 3.0))
>>> bpy.context.object.data.edit_bones["Bone"].head = Vector((1.0, 2.0, 3.0))
This will be empty outside of editmode.
@@ -333,7 +362,7 @@ Examples using :class:`bpy.types.PoseBone` in object or pose mode:
.. code-block:: python
# Gets the name of the first constraint (if it exists)
bpy.context.object.pose.bones["Bone"].constraints[0].name
bpy.context.object.pose.bones["Bone"].constraints[0].name
# Gets the last selected pose bone (pose mode only)
bpy.context.active_pose_bone
@@ -378,7 +407,7 @@ This can cause bugs when you add some data (normally imported) then reference it
.. code-block:: python
bpy.data.meshes.new(name=meshid)
# normally some code, function calls...
bpy.data.meshes[meshid]
@@ -388,7 +417,7 @@ Or with name assignment...
.. code-block:: python
obj.name = objname
# normally some code, function calls...
obj = bpy.data.meshes[objname]
@@ -408,12 +437,12 @@ this way you don't run this risk of referencing existing data from the blend fil
# typically declared in the main body of the function.
mesh_name_mapping = {}
mesh = bpy.data.meshes.new(name=meshid)
mesh_name_mapping[meshid] = mesh
# normally some code, or function calls...
# use own dictionary rather than bpy.data
mesh = mesh_name_mapping[meshid]
@@ -534,7 +563,7 @@ Strange errors using 'threading' module
Python threading with Blender only works properly when the threads finish up before the script does.
By using ``threading.join()`` for example.
Here is an example of threading supported by Blender:
Heres an example of threading supported by Blender:
.. code-block:: python
@@ -604,17 +633,12 @@ so until its properly supported, best not make use of this.
Help! My script crashes Blender
===============================
**TL;DR:** Do not keep direct references to Blender data (of any kind) when modifying the container
of that data, and/or when some undo/redo may happen (e.g. during modal operators execution...).
Instead, use indices (or other data always stored by value in Python, like string keys...),
that allow you to get access to the desired data.
Ideally it would be impossible to crash Blender from Python
however there are some problems with the API where it can be made to crash.
Strictly speaking this is a bug in the API but fixing it would mean adding memory verification
on every access since most crashes are caused by the Python objects referencing Blenders memory directly,
whenever the memory is freed or re-allocated, further Python access to it can crash the script.
whenever the memory is freed, further Python access to it can crash the script.
But fixing this would make the scripts run very slow,
or writing a very different kind of API which doesn't reference the memory directly.
@@ -624,15 +648,10 @@ Here are some general hints to avoid running into these problems.
especially when working with large lists since Blender can crash simply by running out of memory.
- Many hard to fix crashes end up being because of referencing freed data,
when removing data be sure not to hold any references to it.
- Re-allocation can lead to the same issues
(e.g. if you add a lot of items to some Collection,
this can lead to re-allocating the underlying container's memory,
invalidating all previous references to existing items).
- Modules or classes that remain active while Blender is used,
should not hold references to data the user may remove, instead,
fetch data from the context each time the script is activated.
- Crashes may not happen every time, they may happen more on some configurations/operating-systems.
- Be wary of recursive patterns, those are very efficient at hiding the issues described here.
.. note::
@@ -642,55 +661,6 @@ Here are some general hints to avoid running into these problems.
While the crash may be in Blenders C/C++ code,
this can help a lot to track down the area of the script that causes the crash.
.. note::
Some container modifications are actually safe, because they will never re-allocate existing data
(e.g. linked lists containers will never re-allocate existing items when adding or removing others).
But knowing which cases are safe and which aren't implies a deep understanding of Blender's internals.
That's why, unless you are willing to dive into the RNA C implementation, it's simpler to
always assume that data references will become invalid when modifying their containers,
in any possible way.
**Dont:**
.. code-block:: python
class TestItems(bpy.types.PropertyGroup):
name: bpy.props.StringProperty()
bpy.utils.register_class(TestItems)
bpy.types.Scene.test_items = bpy.props.CollectionProperty(type=TestItems)
first_item = bpy.context.scene.test_items.add()
for i in range(100):
bpy.context.scene.test_items.add()
# This is likely to crash, as internal code may re-allocate
# the whole container (the collection) memory at some point.
first_item.name = "foobar"
**Do:**
.. code-block:: python
class TestItems(bpy.types.PropertyGroup):
name: bpy.props.StringProperty()
bpy.utils.register_class(TestItems)
bpy.types.Scene.test_items = bpy.props.CollectionProperty(type=TestItems)
first_item = bpy.context.scene.test_items.add()
for i in range(100):
bpy.context.scene.test_items.add()
# This is safe, we are getting again desired data *after*
# all modifications to its container are done.
first_item = bpy.context.scene.test_items[0]
first_item.name = "foobar"
Undo/Redo
---------
@@ -775,7 +745,7 @@ the object data but are most common when switching edit-mode.
Array Re-Allocation
-------------------
When adding new points to a curve or vertices/edges/polygons to a mesh,
When adding new points to a curve or vertices's/edges/polygons to a mesh,
internally the array which stores this data is re-allocated.
.. code-block:: python
@@ -788,7 +758,7 @@ internally the array which stores this data is re-allocated.
point.co = 1.0, 2.0, 3.0
This can be avoided by re-assigning the point variables after adding the new one or by storing
indices to the points rather than the points themselves.
indices's to the points rather than the points themselves.
The best way is to sidestep the problem altogether add all the points to the curve at once.
This means you don't have to worry about array re-allocation and its faster too
@@ -800,11 +770,11 @@ Removing Data
**Any** data that you remove shouldn't be modified or accessed afterwards,
this includes f-curves, drivers, render layers, timeline markers, modifiers, constraints
along with objects, scenes, collections, bones.. etc.
along with objects, scenes, groups, bones.. etc.
The ``remove()`` api calls will invalidate the data they free to prevent common mistakes.
The following example shows how this precaution works.
The following example shows how this precortion works.
.. code-block:: python

View File

@@ -120,12 +120,12 @@ however, the index of a member may change while running Blender.
Accessing Attributes
^^^^^^^^^^^^^^^^^^^^
Once you have a data block, such as a material, object, collections etc.,
Once you have a data block, such as a material, object, groups etc.,
its attributes can be accessed much like you would change a setting using the graphical interface.
In fact, the tooltip for each button also displays the Python attribute
which can help in finding what settings to change in a script.
>>> bpy.data.objects[0].name
>>> bpy.data.objects[0].name
'Camera'
>>> bpy.data.scenes["Scene"]
@@ -195,10 +195,10 @@ Example:
value = bpy.data.scenes["Scene"].get("test_prop", "fallback value")
# dictionaries can be assigned as long as they only use basic types.
collection = bpy.data.collections.new("MyTestCollection")
collection["MySettings"] = {"foo": 10, "bar": "spam", "baz": {}}
group = bpy.data.groups.new("MyTestGroup")
group["MySettings"] = {"foo": 10, "bar": "spam", "baz": {}}
del collection["MySettings"]
del group["MySettings"]
Note that these properties can only be assigned basic Python types.
@@ -267,7 +267,7 @@ Operator Poll()
^^^^^^^^^^^^^^^
Many operators have a "poll" function which may check that the cursor
is in a valid area or that the object is in the correct mode (Edit Mode, Weight Paint etc).
is in a valid area or that the object is in the correct mode (Edit Mode, Weight Paint etc).
When an operator's poll function fails within Python, an exception is raised.
For example, calling ``bpy.ops.view3d.render_border()`` from the console raises the following error:
@@ -339,11 +339,11 @@ Notice the extra ``bl_`` variables used to set the context they display in.
To run the script:
#. Highlight the above code then press :kbd:`Ctrl-C` to copy it.
#. Start Blender.
#. Click on the tab for the *Scripting* workspace.
#. Click the button labeled ``New`` to create a new text block.
#. Press :kbd:`Ctrl-V` to paste the code into the text panel (the upper left frame).
#. Highlight the above code then press :kbd:`Ctrl-C` to copy it
#. Start Blender
#. Press :kbd:`Ctrl-Right` twice to change to the Scripting layout
#. Click the button labeled ``New`` and the confirmation pop up in order to create a new text block.
#. Press :kbd:`Ctrl-V` to paste the code into the text panel (the upper left frame)
#. Click on the button **Run Script**.
@@ -394,7 +394,7 @@ Internal Types
Used for Blender data-blocks and collections: :class:`bpy.types.bpy_struct`
For data that contains its own attributes collections/meshes/bones/scenes... etc.
For data that contains its own attributes groups/meshes/bones/scenes... etc.
There are 2 main types that wrap Blenders data, one for data-blocks
(known internally as ``bpy_struct``), another for properties.
@@ -477,3 +477,4 @@ Using Low-Level Functions:
fcu_z.keyframe_points.add(2)
fcu_z.keyframe_points[0].co = 10.0, 0.0
fcu_z.keyframe_points[1].co = 20.0, 1.0

View File

@@ -315,3 +315,4 @@ enable the CMake build option ``WITH_PYTHON_SAFETY``.
This enables data tracking which makes data access about 2x slower
which is why the option isn't enabled in release builds.

View File

@@ -14,6 +14,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
@@ -222,7 +224,6 @@ else:
"bmesh.geometry",
"bpy.app",
"bpy.app.handlers",
"bpy.app.timers",
"bpy.app.translations",
"bpy.context",
"bpy.data",
@@ -237,7 +238,6 @@ else:
"gpu.types",
"gpu.matrix",
"gpu.select",
"gpu_extras",
"idprop.types",
"mathutils",
"mathutils.bvhtree",
@@ -340,11 +340,11 @@ RST_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, "rst"))
# stored in ./rst/info_*
INFO_DOCS = (
("info_quickstart.rst",
"Quickstart: new to Blender or scripting and want to get your feet wet?"),
"Blender/Python Quickstart: new to Blender/scripting and want to get your feet wet?"),
("info_overview.rst",
"API Overview: a more complete explanation of Python integration"),
"Blender/Python API Overview: a more complete explanation of Python integration"),
("info_api_reference.rst",
"API Reference Usage: examples of how to use the API reference docs"),
"Blender/Python API Reference Usage: examples of how to use the API reference docs"),
("info_best_practice.rst",
"Best Practice: Conventions to follow for writing good scripts"),
("info_tips_and_tricks.rst",
@@ -357,7 +357,7 @@ INFO_DOCS = (
# only support for properties atm.
RNA_BLACKLIST = {
# XXX messes up PDF!, really a bug but for now just workaround.
"PreferencesSystem": {"language", }
"UserPreferencesSystem": {"language", }
}
MODULE_GROUPING = {
@@ -979,9 +979,9 @@ def pymodule2sphinx(basepath, module_name, module, title):
# Changes in Blender will force errors here
context_type_map = {
# context_member: (RNA type, is_collection)
"active_base": ("ObjectBase", False),
"active_bone": ("EditBone", False),
"active_gpencil_brush": ("GPencilSculptBrush", False),
"active_gpencil_frame": ("GreasePencilLayer", True),
"active_gpencil_layer": ("GPencilLayer", True),
"active_node": ("Node", False),
@@ -1012,8 +1012,8 @@ context_type_map = {
"gpencil_data": ("GreasePencel", False),
"gpencil_data_owner": ("ID", False),
"image_paint_object": ("Object", False),
"lattice": ("Lattice", False),
"light": ("Light", False),
"lattice": ("Lattice", False),
"lightprobe": ("LightProbe", False),
"line_style": ("FreestyleLineStyle", False),
"material": ("Material", False),
@@ -1021,8 +1021,6 @@ context_type_map = {
"mesh": ("Mesh", False),
"meta_ball": ("MetaBall", False),
"object": ("Object", False),
"objects_in_mode": ("Object", True),
"objects_in_mode_unique_data": ("Object", True),
"particle_edit_object": ("Object", False),
"particle_settings": ("ParticleSettings", False),
"particle_system": ("ParticleSystem", False),
@@ -1036,13 +1034,12 @@ context_type_map = {
"selected_bones": ("EditBone", True),
"selected_editable_bases": ("ObjectBase", True),
"selected_editable_bones": ("EditBone", True),
"selected_editable_fcurves": ("FCurve", True),
"selected_editable_fcurves": ("FCurce", True),
"selected_editable_objects": ("Object", True),
"selected_editable_sequences": ("Sequence", True),
"selected_nodes": ("Node", True),
"selected_objects": ("Object", True),
"selected_pose_bones": ("PoseBone", True),
"selected_pose_bones_from_active_object": ("PoseBone", True),
"selected_sequences": ("Sequence", True),
"sequences": ("Sequence", True),
"smoke": ("SmokeModifier", False),
@@ -1052,9 +1049,7 @@ context_type_map = {
"texture_slot": ("MaterialTextureSlot", False),
"texture_user": ("ID", False),
"texture_user_property": ("Property", False),
"uv_sculpt_object": ("Object", False),
"vertex_paint_object": ("Object", False),
"view_layer": ("ViewLayer", False),
"visible_bases": ("ObjectBase", True),
"visible_bones": ("EditBone", True),
"visible_gpencil_layers": ("GPencilLayer", True),
@@ -1062,6 +1057,7 @@ context_type_map = {
"visible_pose_bones": ("PoseBone", True),
"weight_paint_object": ("Object", False),
"world": ("World", False),
"view_layer": ("ViewLayer", False),
}
@@ -1621,7 +1617,6 @@ def write_sphinx_conf_py(basepath):
fw(" 'include__bmesh.rst',\n")
fw("]\n\n")
fw("html_title = 'Blender %s Python API'\n" % BLENDER_VERSION_DOTS)
fw("html_theme = 'sphinx_rtd_theme'\n")
# not helpful since the source is generated, adds to upload size.
fw("html_copy_source = False\n")
@@ -1673,21 +1668,21 @@ def write_rst_contents(basepath):
file = open(filepath, "w", encoding="utf-8")
fw = file.write
fw(title_string("Blender Python API Documentation", "%", double=True))
fw(title_string("Blender Documentation Contents", "%", double=True))
fw("\n")
fw("Welcome to the API reference for Blender %s, built %s.\n" %
fw("Welcome, this document is an API reference for Blender %s, built %s.\n" %
(BLENDER_VERSION_DOTS, BLENDER_DATE))
fw("\n")
# fw("`A PDF version of this document is also available <%s>`_\n" % BLENDER_PDF_FILENAME)
fw("This site can be downloaded for offline use: `Download the full Documentation (zipped HTML files) <%s>`_\n" %
fw("This site can be downloaded for offline use `Download the full Documentation (zipped HTML files) <%s>`_\n" %
BLENDER_ZIP_FILENAME)
fw("\n")
if not EXCLUDE_INFO_DOCS:
fw(".. toctree::\n")
fw(" :maxdepth: 1\n")
fw(" :caption: Documentation\n\n")
fw(" :caption: Blender/Python Documentation\n\n")
for info, info_desc in INFO_DOCS:
fw(" %s <%s>\n" % (info_desc, info))
fw("\n")
@@ -1723,7 +1718,7 @@ def write_rst_contents(basepath):
standalone_modules = (
# submodules are added in parent page
"mathutils", "freestyle", "bgl", "blf", "gpu", "gpu_extras",
"mathutils", "freestyle", "bgl", "blf", "gpu",
"aud", "bpy_extras", "idprop.types", "bmesh",
)
@@ -1825,7 +1820,6 @@ def write_rst_importable_modules(basepath):
"bpy.path": "Path Utilities",
"bpy.utils": "Utilities",
"bpy_extras": "Extra Utilities",
"gpu_extras": "GPU Utilities",
# C_modules
"aud": "Audio System",
@@ -1843,7 +1837,6 @@ def write_rst_importable_modules(basepath):
"bpy.app.handlers": "Application Handlers",
"bpy.app.translations": "Application Translations",
"bpy.app.icons": "Application Icons",
"bpy.app.timers": "Application Timers",
"bpy.props": "Property Definitions",
"idprop.types": "ID Property Access",
"mathutils": "Math Types & Utilities",

View File

@@ -14,6 +14,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>

View File

@@ -16,6 +16,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Bastien Montagne
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>

View File

@@ -16,6 +16,11 @@
#
# The Original Code is Copyright (C) 2006, Blender Foundation
# All rights reserved.
#
# The Original Code is: all of this file.
#
# Contributor(s): Jacques Beaurain.
#
# ***** END GPL LICENSE BLOCK *****
# Libs that adhere to strict flags

View File

@@ -212,7 +212,7 @@ AUD_API const char* AUD_Sound_write(AUD_Sound* sound, const char* filename, AUD_
std::shared_ptr<IWriter> writer = FileWriter::createWriter(filename, specs, static_cast<Container>(container), static_cast<Codec>(codec), bitrate);
FileWriter::writeReader(reader, writer, 0, buffersize);
}
catch(Exception&)
catch(Exception& e)
{
return "An exception occured while writing.";
}

View File

@@ -140,6 +140,8 @@ Sound_data(Sound* self)
std::memcpy(data, buffer->getBuffer(), buffer->getSize());
Py_INCREF(array);
return reinterpret_cast<PyObject*>(array);
}

View File

@@ -261,8 +261,6 @@ FFMPEGWriter::FFMPEGWriter(std::string filename, DeviceSpecs specs, Container fo
case CHANNELS_SURROUND71:
channel_layout = AV_CH_LAYOUT_7POINT1;
break;
default:
AUD_THROW(FileException, "File couldn't be written, channel layout not supported.");
}
try

View File

@@ -61,58 +61,10 @@ bool OpenALDevice::OpenALHandle::pause(bool keep)
return false;
}
bool OpenALDevice::OpenALHandle::reinitialize()
{
DeviceSpecs specs = m_device->m_specs;
specs.specs = m_reader->getSpecs();
ALenum format;
if(!m_device->getFormat(format, specs.specs))
return true;
m_format = format;
// OpenAL playback code
alGenBuffers(CYCLE_BUFFERS, m_buffers);
if(alGetError() != AL_NO_ERROR)
return true;
m_device->m_buffer.assureSize(m_device->m_buffersize * AUD_DEVICE_SAMPLE_SIZE(specs));
int length;
bool eos;
for(m_current = 0; m_current < CYCLE_BUFFERS; m_current++)
{
length = m_device->m_buffersize;
m_reader->read(length, eos, m_device->m_buffer.getBuffer());
if(length == 0)
break;
alBufferData(m_buffers[m_current], m_format, m_device->m_buffer.getBuffer(), length * AUD_DEVICE_SAMPLE_SIZE(specs), specs.rate);
if(alGetError() != AL_NO_ERROR)
return true;
}
alGenSources(1, &m_source);
if(alGetError() != AL_NO_ERROR)
return true;
alSourceQueueBuffers(m_source, m_current, m_buffers);
if(alGetError() != AL_NO_ERROR)
return true;
alSourcei(m_source, AL_SOURCE_RELATIVE, m_relative);
return false;
}
OpenALDevice::OpenALHandle::OpenALHandle(OpenALDevice* device, ALenum format, std::shared_ptr<IReader> reader, bool keep) :
m_isBuffered(false), m_reader(reader), m_keep(keep), m_format(format),
m_eos(false), m_loopcount(0), m_stop(nullptr), m_stop_data(nullptr), m_status(STATUS_PLAYING),
m_relative(1), m_device(device)
m_device(device)
{
DeviceSpecs specs = m_device->m_specs;
specs.specs = m_reader->getSpecs();
@@ -210,9 +162,6 @@ bool OpenALDevice::OpenALHandle::stop()
if(!m_status)
return false;
if(m_stop)
m_stop(m_stop_data);
m_status = STATUS_INVALID;
alDeleteSources(1, &m_source);
@@ -576,6 +525,8 @@ bool OpenALDevice::OpenALHandle::setOrientation(const Quaternion& orientation)
bool OpenALDevice::OpenALHandle::isRelative()
{
int result;
if(!m_status)
return false;
@@ -584,9 +535,9 @@ bool OpenALDevice::OpenALHandle::isRelative()
if(!m_status)
return false;
alGetSourcei(m_source, AL_SOURCE_RELATIVE, &m_relative);
alGetSourcei(m_source, AL_SOURCE_RELATIVE, &result);
return m_relative;
return result;
}
bool OpenALDevice::OpenALHandle::setRelative(bool relative)
@@ -599,9 +550,7 @@ bool OpenALDevice::OpenALHandle::setRelative(bool relative)
if(!m_status)
return false;
m_relative = relative;
alSourcei(m_source, AL_SOURCE_RELATIVE, m_relative);
alSourcei(m_source, AL_SOURCE_RELATIVE, relative);
return true;
}
@@ -903,80 +852,6 @@ void OpenALDevice::updateStreams()
{
lock();
if(m_checkDisconnect)
{
ALCint connected;
alcGetIntegerv(m_device, alcGetEnumValue(m_device, "ALC_CONNECTED"), 1, &connected);
if(!connected)
{
// quit OpenAL
alcMakeContextCurrent(nullptr);
alcDestroyContext(m_context);
alcCloseDevice(m_device);
// restart
if(m_name.empty())
m_device = alcOpenDevice(nullptr);
else
m_device = alcOpenDevice(m_name.c_str());
// if device opening failed, there's really nothing we can do
if(m_device)
{
// at least try to set the frequency
ALCint attribs[] = { ALC_FREQUENCY, (ALCint)specs.rate, 0 };
ALCint* attributes = attribs;
if(specs.rate == RATE_INVALID)
attributes = nullptr;
m_context = alcCreateContext(m_device, attributes);
alcMakeContextCurrent(m_context);
m_checkDisconnect = alcIsExtensionPresent(m_device, "ALC_EXT_disconnect");
alcGetIntegerv(m_device, ALC_FREQUENCY, 1, (ALCint*)&specs.rate);
// check for specific formats and channel counts to be played back
if(alIsExtensionPresent("AL_EXT_FLOAT32") == AL_TRUE)
specs.format = FORMAT_FLOAT32;
else
specs.format = FORMAT_S16;
// if the format of the device changed, all handles are invalidated
// this is unlikely to happen though
if(specs.format != m_specs.format)
stopAll();
m_useMC = alIsExtensionPresent("AL_EXT_MCFORMATS") == AL_TRUE;
if((!m_useMC && specs.channels > CHANNELS_STEREO) ||
specs.channels == CHANNELS_STEREO_LFE ||
specs.channels == CHANNELS_SURROUND5)
specs.channels = CHANNELS_STEREO;
alGetError();
alcGetError(m_device);
m_specs = specs;
std::list<std::shared_ptr<OpenALHandle> > stopSounds;
for(auto& handle : m_playingSounds)
if(handle->reinitialize())
stopSounds.push_back(handle);
for(auto& handle : m_pausedSounds)
if(handle->reinitialize())
stopSounds.push_back(handle);
for(auto& sound : stopSounds)
sound->stop();
}
}
}
alcSuspendContext(m_context);
cerr = alcGetError(m_device);
if(cerr == ALC_NO_ERROR)
@@ -1082,14 +957,12 @@ void OpenALDevice::updateStreams()
// if it really stopped
if(sound->m_eos && info != AL_INITIAL)
{
if(sound->m_stop)
sound->m_stop(sound->m_stop_data);
// pause or
if(sound->m_keep)
{
if(sound->m_stop)
sound->m_stop(sound->m_stop_data);
pauseSounds.push_back(sound);
}
// stop
else
stopSounds.push_back(sound);
@@ -1132,16 +1005,16 @@ void OpenALDevice::updateStreams()
/******************************************************************************/
OpenALDevice::OpenALDevice(DeviceSpecs specs, int buffersize, std::string name) :
m_name(name), m_playing(false), m_buffersize(buffersize)
m_playing(false), m_buffersize(buffersize)
{
// cannot determine how many channels or which format OpenAL uses, but
// it at least is able to play 16 bit stereo audio
specs.format = FORMAT_S16;
if(m_name.empty())
if(name.empty())
m_device = alcOpenDevice(nullptr);
else
m_device = alcOpenDevice(m_name.c_str());
m_device = alcOpenDevice(name.c_str());
if(!m_device)
AUD_THROW(DeviceException, "The audio device couldn't be opened with OpenAL.");
@@ -1155,8 +1028,6 @@ OpenALDevice::OpenALDevice(DeviceSpecs specs, int buffersize, std::string name)
m_context = alcCreateContext(m_device, attributes);
alcMakeContextCurrent(m_context);
m_checkDisconnect = alcIsExtensionPresent(m_device, "ALC_EXT_disconnect");
alcGetIntegerv(m_device, ALC_FREQUENCY, 1, (ALCint*)&specs.rate);
// check for specific formats and channel counts to be played back

View File

@@ -95,16 +95,11 @@ private:
/// Current status of the handle
Status m_status;
/// Whether the source is relative or not.
ALint m_relative;
/// Own device.
OpenALDevice* m_device;
AUD_LOCAL bool pause(bool keep);
AUD_LOCAL bool reinitialize();
// delete copy constructor and operator=
OpenALHandle(const OpenALHandle&) = delete;
OpenALHandle& operator=(const OpenALHandle&) = delete;
@@ -178,21 +173,11 @@ private:
*/
DeviceSpecs m_specs;
/**
* The device name.
*/
std::string m_name;
/**
* Whether the device has the AL_EXT_MCFORMATS extension.
*/
bool m_useMC;
/**
* Whether the ALC_EXT_disconnect extension is present and device disconnect should be checked repeatedly.
*/
bool m_checkDisconnect;
/**
* The list of sounds that are currently playing.
*/

View File

@@ -296,77 +296,72 @@ const Channel* ChannelMapperReader::CHANNEL_MAPS[] =
ChannelMapperReader::SURROUND71_MAP
};
constexpr float deg2rad(double angle)
{
return float(angle * M_PI / 180.0);
}
const float ChannelMapperReader::MONO_ANGLES[] =
{
deg2rad(0.0)
0.0f * M_PI / 180.0f
};
const float ChannelMapperReader::STEREO_ANGLES[] =
{
deg2rad(-90.0),
deg2rad( 90.0)
-90.0f * M_PI / 180.0f,
90.0f * M_PI / 180.0f
};
const float ChannelMapperReader::STEREO_LFE_ANGLES[] =
{
deg2rad(-90.0),
deg2rad( 90.0),
deg2rad( 0.0)
-90.0f * M_PI / 180.0f,
90.0f * M_PI / 180.0f,
0.0f * M_PI / 180.0f
};
const float ChannelMapperReader::SURROUND4_ANGLES[] =
{
deg2rad( -45.0),
deg2rad( 45.0),
deg2rad(-135.0),
deg2rad( 135.0)
-45.0f * M_PI / 180.0f,
45.0f * M_PI / 180.0f,
-135.0f * M_PI / 180.0f,
135.0f * M_PI / 180.0f
};
const float ChannelMapperReader::SURROUND5_ANGLES[] =
{
deg2rad( -30.0),
deg2rad( 30.0),
deg2rad( 0.0),
deg2rad(-110.0),
deg2rad( 110.0)
-30.0f * M_PI / 180.0f,
30.0f * M_PI / 180.0f,
0.0f * M_PI / 180.0f,
-110.0f * M_PI / 180.0f,
110.0f * M_PI / 180.0f
};
const float ChannelMapperReader::SURROUND51_ANGLES[] =
{
deg2rad( -30.0),
deg2rad( 30.0),
deg2rad( 0.0),
deg2rad( 0.0),
deg2rad(-110.0),
deg2rad( 110.0)
-30.0f * M_PI / 180.0f,
30.0f * M_PI / 180.0f,
0.0f * M_PI / 180.0f,
0.0f * M_PI / 180.0f,
-110.0f * M_PI / 180.0f,
110.0f * M_PI / 180.0f
};
const float ChannelMapperReader::SURROUND61_ANGLES[] =
{
deg2rad( -30.0),
deg2rad( 30.0),
deg2rad( 0.0),
deg2rad( 0.0),
deg2rad( 180.0),
deg2rad(-110.0),
deg2rad( 110.0)
-30.0f * M_PI / 180.0f,
30.0f * M_PI / 180.0f,
0.0f * M_PI / 180.0f,
0.0f * M_PI / 180.0f,
180.0f * M_PI / 180.0f,
-110.0f * M_PI / 180.0f,
110.0f * M_PI / 180.0f
};
const float ChannelMapperReader::SURROUND71_ANGLES[] =
{
deg2rad( -30.0),
deg2rad( 30.0),
deg2rad( 0.0),
deg2rad( 0.0),
deg2rad(-110.0),
deg2rad( 110.0),
deg2rad(-150.0),
deg2rad( 150.0)
-30.0f * M_PI / 180.0f,
30.0f * M_PI / 180.0f,
0.0f * M_PI / 180.0f,
0.0f * M_PI / 180.0f,
-110.0f * M_PI / 180.0f,
110.0f * M_PI / 180.0f,
-150.0f * M_PI / 180.0f,
150.0f * M_PI / 180.0f
};
const float* ChannelMapperReader::CHANNEL_ANGLES[] =

View File

@@ -16,6 +16,11 @@
#
# The Original Code is Copyright (C) 2006, Blender Foundation
# All rights reserved.
#
# The Original Code is: all of this file.
#
# Contributor(s): Jacques Beaurai, Erwin Coumans
#
# ***** END GPL LICENSE BLOCK *****
set(INC

View File

@@ -16,6 +16,10 @@
#
# The Original Code is Copyright (C) 2012, Blender Foundation
# All rights reserved.
#
# Contributor(s): Blender Foundation,
# Sergey Sharybin
#
# ***** END GPL LICENSE BLOCK *****
# NOTE: This file is automatically generated by bundle.sh script

Some files were not shown because too many files have changed in this diff Show More