Merge with trunk/2.5 at r25563
Most likely will not compile for others, I'd appreciate any build errors and missing files reports (I can never seem to get everything committed and all the build systems working without help). Porting over the sculpt/multires tools was a breeze, thanks goes to brecht for a design that didn't exclude ngons and was easy to port. Note that I've not tested externally-backed multires file support yet. Also, I still need to write version patch code for some cases. Some notes: * Like trunk, topological changes don't update multires right, so e.g. subdivide will duplicate multires data on the new faces, instead of subdividing it. * If you set the debug value (ctrl-alt-d) to 1 it'll turn on my experiments in speeding up sculpting on higher-res multires meshes (but note it makes partial redraw not completely accurate). * There's a bug where you have to go through editmode to get out of sculpt mode, not sure if I inherited or created this myself.
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
# $Id$
|
||||
# $Id$
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
@@ -216,8 +216,8 @@ IF(UNIX AND NOT APPLE)
|
||||
|
||||
IF (WITH_OPENCOLLADA)
|
||||
SET(OPENCOLLADA /usr/local/opencollada CACHE FILEPATH "OpenCollada Directory")
|
||||
SET(OPENCOLLADA_LIBPATH ${OPENCOLLADA})
|
||||
SET(OPENCOLLADA_LIB OpenCollada)
|
||||
SET(OPENCOLLADA_LIBPATH ${OPENCOLLADA}/lib)
|
||||
SET(OPENCOLLADA_LIB OpenCOLLADAStreamWriter OpenCOLLADASaxFrameworkLoader OpenCOLLADAFramework OpenCOLLADABaseUtils GeneratedSaxParser UTF MathMLSolver pcre ftoa Buffer)
|
||||
SET(OPENCOLLADA_INC ${OPENCOLLADA})
|
||||
SET(PCRE /usr CACHE FILEPATH "PCRE Directory")
|
||||
SET(PCRE_LIBPATH ${PCRE}/lib)
|
||||
@@ -437,7 +437,7 @@ IF(APPLE)
|
||||
IF(CMAKE_OSX_ARCHITECTURES MATCHES i386)
|
||||
SET(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin-8.x.i386)
|
||||
ELSE(CMAKE_OSX_ARCHITECTURES MATCHES i386)
|
||||
SET(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin-6.1-powerpc)
|
||||
SET(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin-8.0.0-powerpc)
|
||||
ENDIF(CMAKE_OSX_ARCHITECTURES MATCHES i386)
|
||||
ENDIF(WITH_LIBS10.5)
|
||||
|
||||
@@ -531,19 +531,21 @@ IF(APPLE)
|
||||
SET(LLIBS stdc++ SystemStubs)
|
||||
|
||||
IF (WITH_COCOA)
|
||||
SET(PLATFORM_CFLAGS "-pipe -fPIC -funsigned-char -fno-strict-aliasing -DGHOST_COCOA")
|
||||
SET(PLATFORM_LINKFLAGS "-fexceptions -framework CoreServices -framework Foundation -framework IOKit -framework AppKit -framework Cocoa -framework Carbon -framework AudioUnit -framework AudioToolbox -framework CoreAudio")
|
||||
IF(USE_QTKIT)
|
||||
SET(PLATFORM_CFLAGS "${PLATFORM_CFLAGS} -DUSE_QTKIT")
|
||||
SET(PLATFORM_LINKFLAGS "${PLATFORM_LINKFLAGS} -framework QTKit")
|
||||
ELSE(USE_QTKIT)
|
||||
IF(WITH_QUICKTIME)
|
||||
SET(PLATFORM_LINKFLAGS "${PLATFORM_LINKFLAGS} -framework QuickTime")
|
||||
ENDIF(WITH_QUICKTIME)
|
||||
ENDIF(USE_QTKIT)
|
||||
SET(PLATFORM_CFLAGS "-pipe -funsigned-char -DGHOST_COCOA")
|
||||
SET(PLATFORM_LINKFLAGS "-fexceptions -framework CoreServices -framework Foundation -framework IOKit -framework AppKit -framework Cocoa -framework Carbon -framework AudioUnit -framework AudioToolbox -framework CoreAudio")
|
||||
IF(USE_QTKIT)
|
||||
SET(PLATFORM_CFLAGS "${PLATFORM_CFLAGS} -DUSE_QTKIT")
|
||||
SET(PLATFORM_LINKFLAGS "${PLATFORM_LINKFLAGS} -framework QTKit")
|
||||
IF(CMAKE_OSX_ARCHITECTURES MATCHES i386)
|
||||
SET(PLATFORM_LINKFLAGS "${PLATFORM_LINKFLAGS} -framework QuickTime")
|
||||
#libSDL still needs 32bit carbon quicktime
|
||||
ENDIF(CMAKE_OSX_ARCHITECTURES MATCHES i386)
|
||||
ELSEIF(WITH_QUICKTIME)
|
||||
SET(PLATFORM_LINKFLAGS "${PLATFORM_LINKFLAGS} -framework QuickTime")
|
||||
ENDIF(USE_QTKIT)
|
||||
ELSE (WITH_COCOA)
|
||||
SET(PLATFORM_CFLAGS "-pipe -fPIC -funsigned-char -fno-strict-aliasing")
|
||||
SET(PLATFORM_LINKFLAGS "-fexceptions -framework CoreServices -framework Foundation -framework IOKit -framework AppKit -framework Carbon -framework AGL -framework AudioUnit -framework AudioToolbox -framework CoreAudio -framework QuickTime")
|
||||
SET(PLATFORM_CFLAGS "-pipe -funsigned-char")
|
||||
SET(PLATFORM_LINKFLAGS "-fexceptions -framework CoreServices -framework Foundation -framework IOKit -framework AppKit -framework Carbon -framework AGL -framework AudioUnit -framework AudioToolbox -framework CoreAudio -framework QuickTime")
|
||||
ENDIF (WITH_COCOA)
|
||||
|
||||
IF(WITH_OPENMP)
|
||||
@@ -585,13 +587,17 @@ IF(APPLE)
|
||||
|
||||
SET(EXETYPE MACOSX_BUNDLE)
|
||||
|
||||
|
||||
SET(CMAKE_C_FLAGS_DEBUG "-fno-strict-aliasing -g")
|
||||
SET(CMAKE_CXX_FLAGS_DEBUG "-fno-strict-aliasing -g")
|
||||
IF(CMAKE_OSX_ARCHITECTURES MATCHES "i386")
|
||||
SET(CMAKE_CXX_FLAGS_RELEASE "-O3 -ftree-vectorize -msse -msse2 -fvariable-expansion-in-unroller")
|
||||
SET(CMAKE_C_FLAGS_RELEASE "-O3 -ftree-vectorize -msse -msse2 -fvariable-expansion-in-unroller")
|
||||
SET(CMAKE_CXX_FLAGS_RELEASE "-O3 -mdynamic-no-pic -ftree-vectorize -msse -msse2 -fvariable-expansion-in-unroller")
|
||||
SET(CMAKE_C_FLAGS_RELEASE "-O3 -mdynamic-no-pic -ftree-vectorize -msse -msse2 -fvariable-expansion-in-unroller")
|
||||
ELSEIF(CMAKE_OSX_ARCHITECTURES MATCHES "x86_64")
|
||||
SET(CMAKE_CXX_FLAGS_RELEASE "-O3 -ftree-vectorize -msse -msse2 -msse3 -mssse3 -fvariable-expansion-in-unroller")
|
||||
SET(CMAKE_C_FLAGS_RELEASE "-O3 -ftree-vectorize -msse -msse2 -msse3 -mssse3 -fvariable-expansion-in-unroller")
|
||||
SET(CMAKE_CXX_FLAGS_RELEASE "-O3 -mdynamic-no-pic -ftree-vectorize -msse -msse2 -msse3 -mssse3 -fvariable-expansion-in-unroller")
|
||||
SET(CMAKE_C_FLAGS_RELEASE "-O3 -mdynamic-no-pic -ftree-vectorize -msse -msse2 -msse3 -mssse3 -fvariable-expansion-in-unroller")
|
||||
ELSE(CMAKE_OSX_ARCHITECTURES MATCHES "x86_64")
|
||||
SET(CMAKE_C_FLAGS_RELEASE "-mdynamic-no-pic -fno-strict-aliasing")
|
||||
SET(CMAKE_CXX_FLAGS_RELEASE "-mdynamic-no-pic -fno-strict-aliasing")
|
||||
ENDIF(CMAKE_OSX_ARCHITECTURES MATCHES "i386")
|
||||
|
||||
# Better warnings
|
||||
|
||||
23
SConstruct
23
SConstruct
@@ -378,7 +378,7 @@ if not os.path.isdir ( B.root_build_dir):
|
||||
os.makedirs ( B.root_build_dir + 'extern' )
|
||||
os.makedirs ( B.root_build_dir + 'lib' )
|
||||
os.makedirs ( B.root_build_dir + 'bin' )
|
||||
if not os.path.isdir(B.doc_build_dir):
|
||||
if not os.path.isdir(B.doc_build_dir) and env['WITH_BF_DOCS']:
|
||||
os.makedirs ( B.doc_build_dir )
|
||||
|
||||
Help(opts.GenerateHelpText(env))
|
||||
@@ -533,9 +533,10 @@ plugtargetlist = []
|
||||
for tp, tn, tf in os.walk('release/plugins'):
|
||||
if '.svn' in tn:
|
||||
tn.remove('.svn')
|
||||
df = tp[8:] # remove 'release/'
|
||||
for f in tf:
|
||||
pluglist.append(os.path.join(tp, f))
|
||||
plugtargetlist.append( os.path.join(*([BLENDERPATH] + tp.split(os.sep)[1:] + [f])) )
|
||||
plugtargetlist.append( os.path.join(BLENDERPATH, df, f) )
|
||||
|
||||
|
||||
# header files for plugins
|
||||
@@ -648,12 +649,14 @@ if not env['WITHOUT_BF_INSTALL']:
|
||||
|
||||
#------------ EPYDOC
|
||||
if env['WITH_BF_DOCS']:
|
||||
try: import epydoc
|
||||
except: epydoc = None
|
||||
|
||||
if epydoc:
|
||||
SConscript('source/gameengine/PyDoc/SConscript')
|
||||
else:
|
||||
print "No epydoc install detected, Python API and Gameengine API Docs will not be generated "
|
||||
|
||||
try:
|
||||
import epydoc
|
||||
except ImportError:
|
||||
epydoc = None
|
||||
|
||||
if epydoc:
|
||||
SConscript('source/gameengine/PyDoc/SConscript')
|
||||
else:
|
||||
print "No epydoc install detected, Python API and Gameengine API Docs will not be generated "
|
||||
|
||||
|
||||
|
||||
@@ -40,19 +40,28 @@ if MACOSX_ARCHITECTURE == 'x86_64' or MACOSX_ARCHITECTURE == 'ppc64':
|
||||
# Can be customized
|
||||
|
||||
if MACOSX_ARCHITECTURE == 'ppc':
|
||||
MAC_MIN_VERS = '10.3'
|
||||
MACOSX_SDK='/Developer/SDKs/MacOSX10.3.9.sdk'
|
||||
LCGDIR = '#../lib/darwin-6.1-powerpc'
|
||||
CC = 'gcc-3.3'
|
||||
CXX = 'g++-3.3'
|
||||
# ppc release are now made for 10.4
|
||||
# MAC_MIN_VERS = '10.3'
|
||||
# MACOSX_SDK='/Developer/SDKs/MacOSX10.3.9.sdk'
|
||||
# LCGDIR = '#../lib/darwin-6.1-powerpc'
|
||||
# CC = 'gcc-3.3'
|
||||
# CXX = 'g++-3.3'
|
||||
MAC_MIN_VERS = '10.4'
|
||||
MACOSX_DEPLOYMENT_TARGET = '10.4'
|
||||
MACOSX_SDK='/Developer/SDKs/MacOSX10.4u.sdk'
|
||||
LCGDIR = '#../lib/darwin-8.0.0-powerpc'
|
||||
CC = 'gcc-4.0'
|
||||
CXX = 'g++-4.0'
|
||||
elif MACOSX_ARCHITECTURE == 'i386':
|
||||
MAC_MIN_VERS = '10.4'
|
||||
MACOSX_DEPLOYMENT_TARGET = '10.4'
|
||||
MACOSX_SDK='/Developer/SDKs/MacOSX10.4u.sdk'
|
||||
LCGDIR = '#../lib/darwin-8.x.i386'
|
||||
CC = 'gcc-4.0'
|
||||
CXX = 'g++-4.0'
|
||||
else :
|
||||
MAC_MIN_VERS = '10.5'
|
||||
MACOSX_DEPLOYMENT_TARGET = '10.5'
|
||||
MACOSX_SDK='/Developer/SDKs/MacOSX10.5.sdk'
|
||||
LCGDIR = '#../lib/darwin-9.x.universal'
|
||||
CC = 'gcc-4.2'
|
||||
@@ -147,7 +156,7 @@ BF_JACK_INC = '${BF_JACK}/include/jack'
|
||||
BF_JACK_LIB = 'jack'
|
||||
BF_JACK_LIBPATH = '${BF_JACK}/lib'
|
||||
|
||||
WITH_BF_SNDFILE = False
|
||||
WITH_BF_SNDFILE = True
|
||||
BF_SNDFILE = LIBDIR + '/sndfile'
|
||||
BF_SNDFILE_INC = '${BF_SNDFILE}/include'
|
||||
BF_SNDFILE_LIB = 'sndfile'
|
||||
@@ -205,7 +214,7 @@ BF_BULLET = '#extern/bullet2/src'
|
||||
BF_BULLET_INC = '${BF_BULLET}'
|
||||
BF_BULLET_LIB = 'extern_bullet'
|
||||
|
||||
WITH_BF_FFTW3 = False
|
||||
WITH_BF_FFTW3 = True
|
||||
BF_FFTW3 = LIBDIR + '/fftw3'
|
||||
BF_FFTW3_INC = '${BF_FFTW3}/include'
|
||||
BF_FFTW3_LIB = 'libfftw3'
|
||||
@@ -249,7 +258,7 @@ BF_OPENGL_LIBPATH = '/System/Library/Frameworks/OpenGL.framework/Libraries'
|
||||
BF_OPENGL_LINKFLAGS = ['-framework', 'OpenGL']
|
||||
|
||||
#OpenCollada flags
|
||||
WITH_BF_COLLADA = True
|
||||
WITH_BF_COLLADA = False
|
||||
BF_COLLADA = '#source/blender/collada'
|
||||
BF_COLLADA_INC = '${BF_COLLADA}'
|
||||
BF_COLLADA_LIB = 'bf_collada'
|
||||
@@ -264,6 +273,14 @@ BF_PCRE_LIBPATH = '${BF_PCRE}/lib'
|
||||
#BF_EXPAT_LIB = 'expat'
|
||||
#BF_EXPAT_LIBPATH = '/usr/lib'
|
||||
|
||||
#Ray trace optimization
|
||||
WITH_BF_RAYOPTIMIZATION = False
|
||||
if MACOSX_ARCHITECTURE == 'i386':
|
||||
BF_RAYOPTIMIZATION_SSE_FLAGS = ['-msse']
|
||||
elif MACOSX_ARCHITECTURE == 'x86_64':
|
||||
BF_RAYOPTIMIZATION_SSE_FLAGS = ['-msse','-msse2']
|
||||
|
||||
|
||||
#############################################################################
|
||||
################### various compile settings and flags ##################
|
||||
#############################################################################
|
||||
@@ -303,8 +320,8 @@ if MAC_MIN_VERS == '10.3':
|
||||
LLIBS.append('crt3.o')
|
||||
|
||||
if USE_SDK==True:
|
||||
SDK_FLAGS=['-isysroot', MACOSX_SDK,'-mmacosx-version-min='+MAC_MIN_VERS]
|
||||
PLATFORM_LINKFLAGS = ['-mmacosx-version-min='+MAC_MIN_VERS,'-Wl','-syslibroot '+MACOSX_SDK]+PLATFORM_LINKFLAGS
|
||||
SDK_FLAGS=['-isysroot', MACOSX_SDK,'-mmacosx-version-min='+MAC_MIN_VERS,'-arch',MACOSX_ARCHITECTURE]
|
||||
PLATFORM_LINKFLAGS = ['-mmacosx-version-min='+MAC_MIN_VERS,'-Wl','-syslibroot '+MACOSX_SDK,'-arch',MACOSX_ARCHITECTURE]+PLATFORM_LINKFLAGS
|
||||
CCFLAGS=SDK_FLAGS+CCFLAGS
|
||||
CXXFLAGS=SDK_FLAGS+CXXFLAGS
|
||||
|
||||
|
||||
@@ -155,9 +155,9 @@ WITH_BF_COLLADA = False
|
||||
BF_COLLADA = '#source/blender/collada'
|
||||
BF_COLLADA_INC = '${BF_COLLADA}'
|
||||
BF_COLLADA_LIB = 'bf_collada'
|
||||
BF_OPENCOLLADA = ''
|
||||
BF_OPENCOLLADA_LIB = 'OpenCollada'
|
||||
BF_OPENCOLLADA_LIBPATH = '/usr/lib'
|
||||
BF_OPENCOLLADA = '/usr'
|
||||
BF_OPENCOLLADA_LIB = 'OpenCOLLADAStreamWriter OpenCOLLADASaxFrameworkLoader OpenCOLLADAFramework OpenCOLLADABaseUtils GeneratedSaxParser UTF MathMLSolver pcre Buffer ftoa'
|
||||
BF_OPENCOLLADA_LIBPATH = '${BF_OPENCOLLADA}/lib'
|
||||
BF_PCRE = ''
|
||||
BF_PCRE_LIB = 'pcre'
|
||||
BF_PCRE_LIBPATH = '/usr/lib'
|
||||
@@ -167,6 +167,10 @@ BF_EXPAT_LIBPATH = '/usr/lib'
|
||||
|
||||
WITH_BF_OPENMP = True
|
||||
|
||||
#Ray trace optimization
|
||||
WITH_BF_RAYOPTIMIZATION = False
|
||||
BF_RAYOPTIMIZATION_SSE_FLAGS = ['-msse','-pthread']
|
||||
|
||||
##
|
||||
CC = 'gcc'
|
||||
CXX = 'g++'
|
||||
|
||||
@@ -127,8 +127,19 @@ BF_OPENGL_LIB_STATIC = [ '${BF_OPENGL}/lib/libGL.a', '${BF_OPENGL}/lib/libGLU.a'
|
||||
'${BF_OPENGL}/lib/libXmu.a', '${BF_OPENGL}/lib/libXext.a',
|
||||
'${BF_OPENGL}/lib/libX11.a', '${BF_OPENGL}/lib/libXi.a' ]
|
||||
|
||||
# Disable Collada by default
|
||||
WITH_BF_COLLADA = False
|
||||
WITH_BF_COLLADA = True
|
||||
BF_COLLADA = '#source/blender/collada'
|
||||
BF_COLLADA_INC = '${BF_COLLADA}'
|
||||
BF_COLLADA_LIB = 'bf_collada'
|
||||
|
||||
BF_OPENCOLLADA = LIBDIR + '/opencollada'
|
||||
BF_OPENCOLLADA_INC = '${BF_OPENCOLLADA}/include'
|
||||
BF_OPENCOLLADA_LIB = 'OpenCOLLADAStreamWriter OpenCOLLADASaxFrameworkLoader OpenCOLLADAFramework OpenCOLLADABaseUtils GeneratedSaxParser UTF MathMLSolver xml2 pcre'
|
||||
BF_OPENCOLLADA_LIBPATH = '${BF_OPENCOLLADA}/lib'
|
||||
|
||||
#Ray trace optimization
|
||||
WITH_BF_RAYOPTIMIZATION = False
|
||||
BF_RAYOPTIMIZATION_SSE_FLAGS = ['-msse']
|
||||
|
||||
##
|
||||
CC = 'gcc'
|
||||
@@ -145,7 +156,7 @@ C_WARN = [ '-Wno-char-subscripts', '-Wdeclaration-after-statement' ]
|
||||
|
||||
CC_WARN = [ '-Wall' ]
|
||||
|
||||
LLIBS = ['-lshell32', '-lshfolder', '-lgdi32', '-lmsvcrt', '-lwinmm', '-lmingw32', '-lm', '-lws2_32', '-lz', '-lstdc++']
|
||||
LLIBS = ['-lshell32', '-lshfolder', '-lgdi32', '-lmsvcrt', '-lwinmm', '-lmingw32', '-lm', '-lws2_32', '-lz', '-lstdc++','-lole32','-luuid']
|
||||
|
||||
BF_DEBUG = False
|
||||
BF_DEBUG_CCFLAGS= ['-g']
|
||||
|
||||
@@ -148,6 +148,10 @@ BF_OPENCOLLADA_INC = '${BF_OPENCOLLADA}/include'
|
||||
BF_OPENCOLLADA_LIB = 'OpenCOLLADAStreamWriter OpenCOLLADASaxFrameworkLoader OpenCOLLADAFramework OpenCOLLADABaseUtils GeneratedSaxParser UTF MathMLSolver xml2 pcre'
|
||||
BF_OPENCOLLADA_LIBPATH = '${BF_OPENCOLLADA}/lib'
|
||||
|
||||
#Ray trace optimization
|
||||
WITH_BF_RAYOPTIMIZATION = False
|
||||
BF_RAYOPTIMIZATION_SSE_FLAGS = ['/arch:SSE']
|
||||
|
||||
WITH_BF_STATICOPENGL = False
|
||||
BF_OPENGL_INC = '${BF_OPENGL}/include'
|
||||
BF_OPENGL_LIBINC = '${BF_OPENGL}/lib'
|
||||
@@ -155,6 +159,7 @@ BF_OPENGL_LIB = 'opengl32 glu32'
|
||||
BF_OPENGL_LIB_STATIC = [ '${BF_OPENGL}/lib/libGL.a', '${BF_OPENGL}/lib/libGLU.a',
|
||||
'${BF_OPENGL}/lib/libXmu.a', '${BF_OPENGL}/lib/libXext.a',
|
||||
'${BF_OPENGL}/lib/libX11.a', '${BF_OPENGL}/lib/libXi.a' ]
|
||||
|
||||
CC = 'cl.exe'
|
||||
CXX = 'cl.exe'
|
||||
|
||||
|
||||
@@ -161,6 +161,10 @@ BF_OPENCOLLADA_INC = '${BF_OPENCOLLADA}/include'
|
||||
BF_OPENCOLLADA_LIB = 'OpenCOLLADAStreamWriter OpenCOLLADASaxFrameworkLoader OpenCOLLADAFramework OpenCOLLADABaseUtils GeneratedSaxParser UTF MathMLSolver xml2 pcre'
|
||||
BF_OPENCOLLADA_LIBPATH = '${BF_OPENCOLLADA}/lib'
|
||||
|
||||
#Ray trace optimization
|
||||
WITH_BF_RAYOPTIMIZATION = False
|
||||
BF_RAYOPTIMIZATION_SSE_FLAGS = ['/arch:SSE','/arch:SSE2']
|
||||
|
||||
WITH_BF_STATICOPENGL = False
|
||||
BF_OPENGL_INC = '${BF_OPENGL}/include'
|
||||
BF_OPENGL_LIBINC = '${BF_OPENGL}/lib'
|
||||
|
||||
2
extern/Eigen2/Eigen/src/Core/util/Macros.h
vendored
2
extern/Eigen2/Eigen/src/Core/util/Macros.h
vendored
@@ -39,7 +39,7 @@
|
||||
// 16 byte alignment is only useful for vectorization. Since it affects the ABI, we need to enable 16 byte alignment on all
|
||||
// platforms where vectorization might be enabled. In theory we could always enable alignment, but it can be a cause of problems
|
||||
// on some platforms, so we just disable it in certain common platform (compiler+architecture combinations) to avoid these problems.
|
||||
#if defined(__GNUC__) && !(defined(__i386__) || defined(__x86_64__) || defined(__powerpc__) || defined(__ia64__))
|
||||
#if defined(__GNUC__) && !(defined(__i386__) || defined(__x86_64__) || defined(__powerpc__) || defined(__ia64__) || defined(__ppc__))
|
||||
#define EIGEN_GCC_AND_ARCH_DOESNT_WANT_ALIGNMENT 1
|
||||
#else
|
||||
#define EIGEN_GCC_AND_ARCH_DOESNT_WANT_ALIGNMENT 0
|
||||
|
||||
@@ -152,6 +152,10 @@ BT_DECLARE_ALIGNED_ALLOCATOR();
|
||||
{
|
||||
return (proxyType == STATIC_PLANE_PROXYTYPE);
|
||||
}
|
||||
static SIMD_FORCE_INLINE bool isSoftBody(int proxyType)
|
||||
{
|
||||
return (proxyType == SOFTBODY_SHAPE_PROXYTYPE);
|
||||
}
|
||||
|
||||
}
|
||||
;
|
||||
|
||||
@@ -31,6 +31,7 @@ subject to the following restrictions:
|
||||
#include "LinearMath/btAabbUtil2.h"
|
||||
#include "LinearMath/btQuickprof.h"
|
||||
#include "LinearMath/btStackAlloc.h"
|
||||
#include "BulletSoftBody/btSoftBody.h"
|
||||
|
||||
//#define USE_BRUTEFORCE_RAYBROADPHASE 1
|
||||
//RECALCULATE_AABB is slower, but benefit is that you don't need to call 'stepSimulation' or 'updateAabbs' before using a rayTest
|
||||
@@ -411,6 +412,31 @@ void btCollisionWorld::rayTestSingle(const btTransform& rayFromTrans,const btTra
|
||||
// restore
|
||||
collisionObject->internalSetTemporaryCollisionShape(saveCollisionShape);
|
||||
}
|
||||
} else {
|
||||
if (collisionShape->isSoftBody()) {
|
||||
btSoftBody* softBody = static_cast<btSoftBody*>(collisionObject);
|
||||
btSoftBody::sRayCast softResult;
|
||||
if (softBody->rayTest(rayFromTrans.getOrigin(), rayToTrans.getOrigin(), softResult))
|
||||
{
|
||||
btCollisionWorld::LocalShapeInfo shapeInfo;
|
||||
shapeInfo.m_shapePart = 0;
|
||||
shapeInfo.m_triangleIndex = softResult.index;
|
||||
// get the normal
|
||||
btVector3 normal = softBody->m_faces[softResult.index].m_normal;
|
||||
btVector3 rayDir = rayToTrans.getOrigin() - rayFromTrans.getOrigin();
|
||||
if (normal.dot(rayDir) > 0) {
|
||||
// normal always point toward origin of the ray
|
||||
normal = -normal;
|
||||
}
|
||||
btCollisionWorld::LocalRayResult rayResult
|
||||
(collisionObject,
|
||||
&shapeInfo,
|
||||
normal,
|
||||
softResult.fraction);
|
||||
bool normalInWorldSpace = true;
|
||||
resultCallback.addSingleResult(rayResult,normalInWorldSpace);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,6 +72,10 @@ public:
|
||||
{
|
||||
return btBroadphaseProxy::isCompound(getShapeType());
|
||||
}
|
||||
SIMD_FORCE_INLINE bool isSoftBody() const
|
||||
{
|
||||
return btBroadphaseProxy::isSoftBody(getShapeType());
|
||||
}
|
||||
|
||||
///isInfinite is used to catch simulation error (aabb check)
|
||||
SIMD_FORCE_INLINE bool isInfinite() const
|
||||
|
||||
@@ -793,7 +793,7 @@ btSoftBody* btSoftBodyHelpers::CreateEllipsoid(btSoftBodyWorldInfo& worldInfo,c
|
||||
//
|
||||
btSoftBody* btSoftBodyHelpers::CreateFromTriMesh(btSoftBodyWorldInfo& worldInfo,const btScalar* vertices,
|
||||
const int* triangles,
|
||||
int ntriangles)
|
||||
int ntriangles, bool randomizeConstraints)
|
||||
{
|
||||
int maxidx=0;
|
||||
int i,j,ni;
|
||||
@@ -828,13 +828,16 @@ btSoftBody* btSoftBodyHelpers::CreateFromTriMesh(btSoftBodyWorldInfo& worldInfo
|
||||
#undef IDX
|
||||
psb->appendFace(idx[0],idx[1],idx[2]);
|
||||
}
|
||||
psb->randomizeConstraints();
|
||||
if (randomizeConstraints)
|
||||
{
|
||||
psb->randomizeConstraints();
|
||||
}
|
||||
return(psb);
|
||||
}
|
||||
|
||||
//
|
||||
btSoftBody* btSoftBodyHelpers::CreateFromConvexHull(btSoftBodyWorldInfo& worldInfo, const btVector3* vertices,
|
||||
int nvertices)
|
||||
int nvertices, bool randomizeConstraints)
|
||||
{
|
||||
HullDesc hdsc(QF_TRIANGLES,nvertices,vertices);
|
||||
HullResult hres;
|
||||
@@ -854,6 +857,9 @@ btSoftBody* btSoftBodyHelpers::CreateFromConvexHull(btSoftBodyWorldInfo& worldI
|
||||
psb->appendFace(idx[0],idx[1],idx[2]);
|
||||
}
|
||||
hlib.ReleaseResult(hres);
|
||||
psb->randomizeConstraints();
|
||||
if (randomizeConstraints)
|
||||
{
|
||||
psb->randomizeConstraints();
|
||||
}
|
||||
return(psb);
|
||||
}
|
||||
|
||||
@@ -109,11 +109,13 @@ struct btSoftBodyHelpers
|
||||
static btSoftBody* CreateFromTriMesh( btSoftBodyWorldInfo& worldInfo,
|
||||
const btScalar* vertices,
|
||||
const int* triangles,
|
||||
int ntriangles);
|
||||
int ntriangles,
|
||||
bool randomizeConstraints = true);
|
||||
/* Create from convex-hull */
|
||||
static btSoftBody* CreateFromConvexHull( btSoftBodyWorldInfo& worldInfo,
|
||||
const btVector3* vertices,
|
||||
int nvertices);
|
||||
int nvertices,
|
||||
bool randomizeConstraints = true);
|
||||
};
|
||||
|
||||
#endif //SOFT_BODY_HELPERS_H
|
||||
|
||||
2
extern/bullet2/src/SConscript
vendored
2
extern/bullet2/src/SConscript
vendored
@@ -42,4 +42,4 @@ env.BlenderLib ( libname = 'extern_bullet2collision_dispatch', sources=collision
|
||||
env.BlenderLib ( libname = 'extern_bullet2collision_gimpact', sources=collision_gimpact_src, includes=Split(incs), defines=Split(defs), libtype=['extern','player'], priority=[20,138], compileflags=cflags )
|
||||
env.BlenderLib ( libname = 'extern_bullet2collision_shapes', sources=collision_shapes_src, includes=Split(incs), defines=Split(defs), libtype=['extern','player'], priority=[20,138], compileflags=cflags )
|
||||
env.BlenderLib ( libname = 'extern_bullet2collision_narrowphase', sources=collision_narrowphase_src, includes=Split(incs), defines=Split(defs), libtype=['extern','player'], priority=[20,138], compileflags=cflags )
|
||||
env.BlenderLib ( libname = 'extern_bullet2softbody', sources=softbody_src, includes=Split(incs), defines=Split(defs), libtype=['extern','player'], priority=[18,135], compileflags=cflags )
|
||||
env.BlenderLib ( libname = 'extern_bullet2softbody', sources=softbody_src, includes=Split(incs), defines=Split(defs), libtype=['extern','player'], priority=[30,135], compileflags=cflags )
|
||||
|
||||
45
intern/audaspace/FX/AUD_RectifyFactory.cpp
Normal file
45
intern/audaspace/FX/AUD_RectifyFactory.cpp
Normal file
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* $Id$
|
||||
*
|
||||
* ***** BEGIN LGPL LICENSE BLOCK *****
|
||||
*
|
||||
* Copyright 2009 Jörg Hermann Müller
|
||||
*
|
||||
* This file is part of AudaSpace.
|
||||
*
|
||||
* AudaSpace is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* AudaSpace is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with AudaSpace. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* ***** END LGPL LICENSE BLOCK *****
|
||||
*/
|
||||
|
||||
#include "AUD_RectifyFactory.h"
|
||||
#include "AUD_RectifyReader.h"
|
||||
|
||||
AUD_RectifyFactory::AUD_RectifyFactory(AUD_IFactory* factory) :
|
||||
AUD_EffectFactory(factory) {}
|
||||
|
||||
AUD_RectifyFactory::AUD_RectifyFactory() :
|
||||
AUD_EffectFactory(0) {}
|
||||
|
||||
AUD_IReader* AUD_RectifyFactory::createReader()
|
||||
{
|
||||
AUD_IReader* reader = getReader();
|
||||
|
||||
if(reader != 0)
|
||||
{
|
||||
reader = new AUD_RectifyReader(reader); AUD_NEW("reader")
|
||||
}
|
||||
|
||||
return reader;
|
||||
}
|
||||
51
intern/audaspace/FX/AUD_RectifyFactory.h
Normal file
51
intern/audaspace/FX/AUD_RectifyFactory.h
Normal file
@@ -0,0 +1,51 @@
|
||||
/*
|
||||
* $Id$
|
||||
*
|
||||
* ***** BEGIN LGPL LICENSE BLOCK *****
|
||||
*
|
||||
* Copyright 2009 Jörg Hermann Müller
|
||||
*
|
||||
* This file is part of AudaSpace.
|
||||
*
|
||||
* AudaSpace is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* AudaSpace is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with AudaSpace. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* ***** END LGPL LICENSE BLOCK *****
|
||||
*/
|
||||
|
||||
#ifndef AUD_RECTIFYFACTORY
|
||||
#define AUD_RECTIFYFACTORY
|
||||
|
||||
#include "AUD_EffectFactory.h"
|
||||
|
||||
/**
|
||||
* This factory rectifies another factory.
|
||||
*/
|
||||
class AUD_RectifyFactory : public AUD_EffectFactory
|
||||
{
|
||||
public:
|
||||
/**
|
||||
* Creates a new rectify factory.
|
||||
* \param factory The input factory.
|
||||
*/
|
||||
AUD_RectifyFactory(AUD_IFactory* factory = 0);
|
||||
|
||||
/**
|
||||
* Creates a new rectify factory.
|
||||
*/
|
||||
AUD_RectifyFactory();
|
||||
|
||||
virtual AUD_IReader* createReader();
|
||||
};
|
||||
|
||||
#endif //AUD_RECTIFYFACTORY
|
||||
82
intern/audaspace/FX/AUD_RectifyReader.cpp
Normal file
82
intern/audaspace/FX/AUD_RectifyReader.cpp
Normal file
@@ -0,0 +1,82 @@
|
||||
/*
|
||||
* $Id$
|
||||
*
|
||||
* ***** BEGIN LGPL LICENSE BLOCK *****
|
||||
*
|
||||
* Copyright 2009 Jörg Hermann Müller
|
||||
*
|
||||
* This file is part of AudaSpace.
|
||||
*
|
||||
* AudaSpace is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* AudaSpace is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with AudaSpace. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* ***** END LGPL LICENSE BLOCK *****
|
||||
*/
|
||||
|
||||
#include "AUD_RectifyReader.h"
|
||||
#include "AUD_Buffer.h"
|
||||
|
||||
#include <cstring>
|
||||
|
||||
AUD_RectifyReader::AUD_RectifyReader(AUD_IReader* reader) :
|
||||
AUD_EffectReader(reader)
|
||||
{
|
||||
int bigendian = 1;
|
||||
bigendian = (((char*)&bigendian)[0]) ? 0: 1; // 1 if Big Endian
|
||||
|
||||
switch(m_reader->getSpecs().format)
|
||||
{
|
||||
case AUD_FORMAT_S16:
|
||||
m_rectify = AUD_rectify<int16_t>;
|
||||
break;
|
||||
case AUD_FORMAT_S32:
|
||||
m_rectify = AUD_rectify<int32_t>;
|
||||
break;
|
||||
case AUD_FORMAT_FLOAT32:
|
||||
m_rectify = AUD_rectify<float>;
|
||||
break;
|
||||
case AUD_FORMAT_FLOAT64:
|
||||
m_rectify = AUD_rectify<double>;
|
||||
break;
|
||||
case AUD_FORMAT_U8:
|
||||
m_rectify = AUD_rectify_u8;
|
||||
break;
|
||||
case AUD_FORMAT_S24:
|
||||
m_rectify = bigendian ? AUD_rectify_s24_be : AUD_rectify_s24_le;
|
||||
break;
|
||||
default:
|
||||
delete m_reader;
|
||||
AUD_THROW(AUD_ERROR_READER);
|
||||
}
|
||||
|
||||
m_buffer = new AUD_Buffer(); AUD_NEW("buffer")
|
||||
}
|
||||
|
||||
AUD_RectifyReader::~AUD_RectifyReader()
|
||||
{
|
||||
delete m_buffer; AUD_DELETE("buffer")
|
||||
}
|
||||
|
||||
void AUD_RectifyReader::read(int & length, sample_t* & buffer)
|
||||
{
|
||||
sample_t* buf;
|
||||
AUD_Specs specs = m_reader->getSpecs();
|
||||
|
||||
m_reader->read(length, buf);
|
||||
if(m_buffer->getSize() < length*AUD_SAMPLE_SIZE(specs))
|
||||
m_buffer->resize(length*AUD_SAMPLE_SIZE(specs));
|
||||
|
||||
buffer = m_buffer->getBuffer();
|
||||
|
||||
m_rectify(buffer, buf, length * specs.channels);
|
||||
}
|
||||
65
intern/audaspace/FX/AUD_RectifyReader.h
Normal file
65
intern/audaspace/FX/AUD_RectifyReader.h
Normal file
@@ -0,0 +1,65 @@
|
||||
/*
|
||||
* $Id$
|
||||
*
|
||||
* ***** BEGIN LGPL LICENSE BLOCK *****
|
||||
*
|
||||
* Copyright 2009 Jörg Hermann Müller
|
||||
*
|
||||
* This file is part of AudaSpace.
|
||||
*
|
||||
* AudaSpace is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* AudaSpace is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with AudaSpace. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* ***** END LGPL LICENSE BLOCK *****
|
||||
*/
|
||||
|
||||
#ifndef AUD_RECTIFYREADER
|
||||
#define AUD_RECTIFYREADER
|
||||
|
||||
#include "AUD_EffectReader.h"
|
||||
#include "AUD_ConverterFunctions.h"
|
||||
class AUD_Buffer;
|
||||
|
||||
/**
|
||||
* This class reads another reader and rectifies it.
|
||||
*/
|
||||
class AUD_RectifyReader : public AUD_EffectReader
|
||||
{
|
||||
private:
|
||||
/**
|
||||
* The playback buffer.
|
||||
*/
|
||||
AUD_Buffer *m_buffer;
|
||||
|
||||
/**
|
||||
* Rectifying function.
|
||||
*/
|
||||
AUD_rectify_f m_rectify;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Creates a new rectify reader.
|
||||
* \param reader The reader to read from.
|
||||
* \exception AUD_Exception Thrown if the reader specified is NULL.
|
||||
*/
|
||||
AUD_RectifyReader(AUD_IReader* reader);
|
||||
|
||||
/**
|
||||
* Destroys the reader.
|
||||
*/
|
||||
virtual ~AUD_RectifyReader();
|
||||
|
||||
virtual void read(int & length, sample_t* & buffer);
|
||||
};
|
||||
|
||||
#endif //AUD_RECTIFYREADER
|
||||
@@ -35,7 +35,7 @@ include nan_compile.mk
|
||||
|
||||
CCFLAGS += $(LEVEL_1_CPP_WARNINGS)
|
||||
|
||||
CPPFLAGS += -I$(LCGDIR)/samplerate/include/
|
||||
CPPFLAGS += -I$(NAN_SAMPLERATE)/include
|
||||
CPPFLAGS += -I../ffmpeg
|
||||
CPPFLAGS += -I../FX
|
||||
CPPFLAGS += -I../SDL
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
#include "AUD_LimiterFactory.h"
|
||||
#include "AUD_PingPongFactory.h"
|
||||
#include "AUD_LoopFactory.h"
|
||||
#include "AUD_RectifyFactory.h"
|
||||
#include "AUD_ReadDevice.h"
|
||||
#include "AUD_SourceCaps.h"
|
||||
#include "AUD_IReader.h"
|
||||
@@ -285,6 +286,20 @@ int AUD_stopLoop(AUD_Handle* handle)
|
||||
return false;
|
||||
}
|
||||
|
||||
AUD_Sound* AUD_rectifySound(AUD_Sound* sound)
|
||||
{
|
||||
assert(sound);
|
||||
|
||||
try
|
||||
{
|
||||
return new AUD_RectifyFactory(sound);
|
||||
}
|
||||
catch(AUD_Exception)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void AUD_unload(AUD_Sound* sound)
|
||||
{
|
||||
assert(sound);
|
||||
|
||||
@@ -149,6 +149,13 @@ extern AUD_Sound* AUD_loopSound(AUD_Sound* sound);
|
||||
*/
|
||||
extern int AUD_stopLoop(AUD_Handle* handle);
|
||||
|
||||
/**
|
||||
* Rectifies a sound.
|
||||
* \param sound The sound to rectify.
|
||||
* \return A handle of the rectified sound.
|
||||
*/
|
||||
extern AUD_Sound* AUD_rectifySound(AUD_Sound* sound);
|
||||
|
||||
/**
|
||||
* Unloads a sound of any type.
|
||||
* \param sound The handle of the sound.
|
||||
|
||||
@@ -500,3 +500,43 @@ void AUD_volume_adjust_s24_be(sample_t* target, sample_t* source,
|
||||
}
|
||||
}
|
||||
|
||||
void AUD_rectify_u8(sample_t* target, sample_t* source, int count)
|
||||
{
|
||||
for(int i=0; i<count; i++)
|
||||
target[i] = source[i] < 0x80 ? 0x0100 - source[i] : source[i];
|
||||
}
|
||||
|
||||
void AUD_rectify_s24_le(sample_t* target, sample_t* source, int count)
|
||||
{
|
||||
count *= 3;
|
||||
int value;
|
||||
|
||||
for(int i=0; i<count; i+=3)
|
||||
{
|
||||
value = source[i+2] << 16 | source[i+1] << 8 | source[i];
|
||||
value |= (((value & 0x800000) >> 23) * 255) << 24;
|
||||
if(value < 0)
|
||||
value = -value;
|
||||
target[i+2] = value >> 16;
|
||||
target[i+1] = value >> 8;
|
||||
target[i] = value;
|
||||
}
|
||||
}
|
||||
|
||||
void AUD_rectify_s24_be(sample_t* target, sample_t* source, int count)
|
||||
{
|
||||
count *= 3;
|
||||
int value;
|
||||
|
||||
for(int i=0; i < count; i+=3)
|
||||
{
|
||||
value = source[i] << 16 | source[i+1] << 8 | source[i+2];
|
||||
value |= (((value & 0x800000) >> 23) * 255) << 24;
|
||||
if(value < 0)
|
||||
value = -value;
|
||||
target[i] = value >> 16;
|
||||
target[i+1] = value >> 8;
|
||||
target[i+2] = value;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -46,6 +46,8 @@ typedef void (*AUD_convert_f)(sample_t* target, sample_t* source, int length);
|
||||
typedef void (*AUD_volume_adjust_f)(sample_t* target, sample_t* source,
|
||||
int count, float volume);
|
||||
|
||||
typedef void (*AUD_rectify_f)(sample_t* target, sample_t* source, int count);
|
||||
|
||||
template <class T>
|
||||
void AUD_convert_copy(sample_t* target, sample_t* source, int length)
|
||||
{
|
||||
@@ -153,4 +155,19 @@ void AUD_volume_adjust_s24_le(sample_t* target, sample_t* source,
|
||||
void AUD_volume_adjust_s24_be(sample_t* target, sample_t* source,
|
||||
int count, float volume);
|
||||
|
||||
template <class T>
|
||||
void AUD_rectify(sample_t* target, sample_t* source, int count)
|
||||
{
|
||||
T* t = (T*)target;
|
||||
T* s = (T*)source;
|
||||
for(int i=0; i < count; i++)
|
||||
t[i] = s[i] < 0 ? -s[i] : s[i];
|
||||
}
|
||||
|
||||
void AUD_rectify_u8(sample_t* target, sample_t* source, int count);
|
||||
|
||||
void AUD_rectify_s24_le(sample_t* target, sample_t* source, int count);
|
||||
|
||||
void AUD_rectify_s24_be(sample_t* target, sample_t* source, int count);
|
||||
|
||||
#endif //AUD_CONVERTERFUNCTIONS
|
||||
|
||||
@@ -310,7 +310,7 @@ void BOP_intersectCoplanarFaces(BOP_Mesh* mesh,
|
||||
}
|
||||
|
||||
MT_Vector3 p3p1 = p1-p3;
|
||||
MT_Plane3 plane3((p3p1.cross(normal).normalized()),p3);
|
||||
MT_Plane3 plane3((p3p1.cross(normal).safe_normalized()),p3);
|
||||
|
||||
sA.m_cfg1 = BOP_Segment::createVertexCfg(3);
|
||||
sA.m_v1 = faceA->getVertex(2);
|
||||
@@ -528,7 +528,7 @@ void BOP_mergeSort(MT_Point3 *points, unsigned int *face, unsigned int &size, bo
|
||||
invertB = false;
|
||||
if (face[1] == 1) {
|
||||
|
||||
// invertA<EFBFBD>?
|
||||
// invertAø?
|
||||
for(i=0;i<size;i++) {
|
||||
if (position[i] == 1) {
|
||||
invertA = true;
|
||||
@@ -537,7 +537,7 @@ void BOP_mergeSort(MT_Point3 *points, unsigned int *face, unsigned int &size, bo
|
||||
else if (position[i] == 0) break;
|
||||
}
|
||||
|
||||
// invertB<EFBFBD>?
|
||||
// invertBø?
|
||||
if (size == 4) {
|
||||
for(i=0;i<size;i++) {
|
||||
if (position[i] == 3) {
|
||||
@@ -549,7 +549,7 @@ void BOP_mergeSort(MT_Point3 *points, unsigned int *face, unsigned int &size, bo
|
||||
}
|
||||
}
|
||||
else if (face[1] == 2) {
|
||||
// invertB<EFBFBD>?
|
||||
// invertBø?
|
||||
for(i=0;i<size;i++) {
|
||||
if (position[i] == 2) {
|
||||
invertB = true;
|
||||
|
||||
@@ -41,9 +41,14 @@ IF(APPLE)
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_DisplayManagerWin32.cpp")
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_SystemWin32.cpp")
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_WindowWin32.cpp")
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_DropTargetWin32.cpp")
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_DisplayManagerX11.cpp")
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_SystemX11.cpp")
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_WindowX11.cpp")
|
||||
|
||||
IF(WITH_QUICKTIME)
|
||||
ADD_DEFINITIONS(-DWITH_QUICKTIME)
|
||||
ENDIF(WITH_QUICKTIME)
|
||||
ELSE(APPLE)
|
||||
IF(WIN32)
|
||||
SET(INC ${INC} ${WINTAB_INC})
|
||||
@@ -59,6 +64,7 @@ ELSE(APPLE)
|
||||
ELSE(WIN32)
|
||||
SET(INC ${INC} ${X11_X11_INCLUDE_PATH})
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_DisplayManagerWin32.cpp")
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_DropTargetWin32.cpp")
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_SystemWin32.cpp")
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_WindowWin32.cpp")
|
||||
LIST(REMOVE_ITEM SRC "${CMAKE_CURRENT_SOURCE_DIR}/intern/GHOST_DisplayManagerCarbon.cpp")
|
||||
|
||||
@@ -8,37 +8,57 @@ window_system = env['OURPLATFORM']
|
||||
|
||||
sources = env.Glob('intern/*.cpp')
|
||||
if window_system == 'darwin':
|
||||
sources += env.Glob('intern/*.mm')
|
||||
sources += env.Glob('intern/*.mm')
|
||||
|
||||
|
||||
pf = ['GHOST_DisplayManager', 'GHOST_System', 'GHOST_Window']
|
||||
pf = ['GHOST_DisplayManager', 'GHOST_System', 'GHOST_Window', 'GHOST_DropTarget']
|
||||
defs=['_USE_MATH_DEFINES']
|
||||
|
||||
if window_system in ('linux2', 'openbsd3', 'sunos5', 'freebsd6', 'irix6'):
|
||||
for f in pf:
|
||||
sources.remove('intern' + os.sep + f + 'Win32.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'Carbon.cpp')
|
||||
for f in pf:
|
||||
try:
|
||||
sources.remove('intern' + os.sep + f + 'Win32.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'Carbon.cpp')
|
||||
except ValueError:
|
||||
pass
|
||||
elif window_system in ('win32-vc', 'win32-mingw', 'cygwin', 'linuxcross', 'win64-vc'):
|
||||
for f in pf:
|
||||
sources.remove('intern' + os.sep + f + 'X11.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'Carbon.cpp')
|
||||
for f in pf:
|
||||
try:
|
||||
sources.remove('intern' + os.sep + f + 'X11.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'Carbon.cpp')
|
||||
except ValueError:
|
||||
pass
|
||||
elif window_system == 'darwin':
|
||||
if env['WITH_GHOST_COCOA']:
|
||||
for f in pf:
|
||||
sources.remove('intern' + os.sep + f + 'Win32.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'X11.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'Carbon.cpp')
|
||||
else:
|
||||
for f in pf:
|
||||
sources.remove('intern' + os.sep + f + 'Win32.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'X11.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'Cocoa.mm')
|
||||
if env['WITH_GHOST_COCOA']:
|
||||
if env['WITH_BF_QUICKTIME']:
|
||||
defs.append('WITH_QUICKTIME')
|
||||
if env['USE_QTKIT']:
|
||||
defs.append('USE_QTKIT')
|
||||
for f in pf:
|
||||
try:
|
||||
sources.remove('intern' + os.sep + f + 'Win32.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'X11.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'Carbon.cpp')
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
for f in pf:
|
||||
try:
|
||||
sources.remove('intern' + os.sep + f + 'Win32.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'X11.cpp')
|
||||
sources.remove('intern' + os.sep + f + 'Cocoa.mm')
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
else:
|
||||
print "Unknown window system specified."
|
||||
Exit()
|
||||
print "Unknown window system specified."
|
||||
Exit()
|
||||
|
||||
if env['BF_GHOST_DEBUG']:
|
||||
defs.append('BF_GHOST_DEBUG')
|
||||
|
||||
incs = '. ../string ' + env['BF_OPENGL_INC']
|
||||
if window_system in ('win32-vc', 'win32-mingw', 'cygwin', 'linuxcross', 'win64-vc'):
|
||||
incs = env['BF_WINTAB_INC'] + ' ' + incs
|
||||
env.BlenderLib ('bf_ghost', sources, Split(incs), defines=['_USE_MATH_DEFINES'], libtype=['intern','player'], priority = [40,15] )
|
||||
incs = env['BF_WINTAB_INC'] + ' ' + incs
|
||||
env.BlenderLib ('bf_ghost', sources, Split(incs), defines=defs, libtype=['intern','player'], priority = [40,15] )
|
||||
|
||||
|
||||
@@ -37,12 +37,17 @@
|
||||
#ifdef WIN32
|
||||
#ifdef _DEBUG
|
||||
#pragma warning (disable:4786) // suppress stl-MSVC debug info warning
|
||||
#define GHOST_DEBUG
|
||||
// #define GHOST_DEBUG
|
||||
#endif // _DEBUG
|
||||
#endif // WIN32
|
||||
|
||||
#ifdef BF_GHOST_DEBUG
|
||||
#define GHOST_DEBUG // spit ghost events to stdout
|
||||
#endif // BF_GHOST_DEBUG
|
||||
|
||||
#ifdef GHOST_DEBUG
|
||||
#include <iostream>
|
||||
#include <stdio.h> //for printf()
|
||||
#endif // GHOST_DEBUG
|
||||
|
||||
|
||||
|
||||
426
intern/ghost/intern/GHOST_DropTargetWin32.cpp
Normal file
426
intern/ghost/intern/GHOST_DropTargetWin32.cpp
Normal file
@@ -0,0 +1,426 @@
|
||||
/**
|
||||
* $Id$
|
||||
* ***** BEGIN GPL LICENSE BLOCK *****
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
*
|
||||
* The Original Code is Copyright (C) 2001-2002 by NaN Holding BV.
|
||||
* All rights reserved.
|
||||
*
|
||||
* The Original Code is: all of this file.
|
||||
*
|
||||
* Contributor(s): none yet.
|
||||
*
|
||||
* ***** END GPL LICENSE BLOCK *****
|
||||
*/
|
||||
|
||||
#include "GHOST_Debug.h"
|
||||
#include "GHOST_DropTargetWin32.h"
|
||||
|
||||
#ifdef GHOST_DEBUG
|
||||
// utility
|
||||
void printLastError(void);
|
||||
#endif // GHOST_DEBUG
|
||||
|
||||
|
||||
GHOST_DropTargetWin32::GHOST_DropTargetWin32(GHOST_WindowWin32 * window, GHOST_SystemWin32 * system)
|
||||
:
|
||||
m_window(window),
|
||||
m_system(system)
|
||||
{
|
||||
m_cRef = 1;
|
||||
m_hWnd = window->getHWND();
|
||||
m_draggedObjectType = GHOST_kDragnDropTypeUnknown;
|
||||
|
||||
// register our window as drop target
|
||||
::RegisterDragDrop(m_hWnd, this);
|
||||
}
|
||||
|
||||
GHOST_DropTargetWin32::~GHOST_DropTargetWin32()
|
||||
{
|
||||
::RevokeDragDrop(m_hWnd);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* IUnknown::QueryInterface
|
||||
*/
|
||||
HRESULT __stdcall GHOST_DropTargetWin32::QueryInterface (REFIID riid, void ** ppvObj)
|
||||
{
|
||||
|
||||
if (!ppvObj)
|
||||
return E_INVALIDARG;
|
||||
*ppvObj = NULL;
|
||||
|
||||
if(riid == IID_IUnknown || riid == IID_IDropTarget)
|
||||
{
|
||||
AddRef();
|
||||
*ppvObj = (void*)this;
|
||||
return S_OK;
|
||||
}
|
||||
else
|
||||
{
|
||||
*ppvObj = 0;
|
||||
return E_NOINTERFACE;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* IUnknown::AddRef
|
||||
*/
|
||||
|
||||
ULONG __stdcall GHOST_DropTargetWin32::AddRef(void)
|
||||
{
|
||||
return ::InterlockedIncrement(&m_cRef);
|
||||
}
|
||||
|
||||
/*
|
||||
* IUnknown::Release
|
||||
*/
|
||||
ULONG __stdcall GHOST_DropTargetWin32::Release(void)
|
||||
{
|
||||
ULONG refs = ::InterlockedDecrement(&m_cRef);
|
||||
|
||||
if(refs == 0)
|
||||
{
|
||||
delete this;
|
||||
return 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
return refs;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Implementation of IDropTarget::DragEnter
|
||||
*/
|
||||
HRESULT __stdcall GHOST_DropTargetWin32::DragEnter(IDataObject * pDataObject, DWORD grfKeyState, POINTL pt, DWORD * pdwEffect)
|
||||
{
|
||||
// we don't know yet if we accept the drop.
|
||||
m_window->setAcceptDragOperation(false);
|
||||
*pdwEffect = DROPEFFECT_NONE;
|
||||
|
||||
m_draggedObjectType = getGhostType(pDataObject);
|
||||
m_system->pushDragDropEvent(GHOST_kEventDraggingEntered, m_draggedObjectType, m_window, pt.x, pt.y, NULL);
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
/*
|
||||
* Implementation of IDropTarget::DragOver
|
||||
*/
|
||||
HRESULT __stdcall GHOST_DropTargetWin32::DragOver(DWORD grfKeyState, POINTL pt, DWORD * pdwEffect)
|
||||
{
|
||||
if(m_window->canAcceptDragOperation())
|
||||
{
|
||||
*pdwEffect = allowedDropEffect(*pdwEffect);
|
||||
}
|
||||
else
|
||||
{
|
||||
*pdwEffect = DROPEFFECT_NONE;
|
||||
//*pdwEffect = DROPEFFECT_COPY; // XXX Uncomment to test drop. Drop will not be called if pdwEffect == DROPEFFECT_NONE.
|
||||
}
|
||||
m_system->pushDragDropEvent(GHOST_kEventDraggingUpdated, m_draggedObjectType, m_window, pt.x, pt.y, NULL);
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
/*
|
||||
* Implementation of IDropTarget::DragLeave
|
||||
*/
|
||||
HRESULT __stdcall GHOST_DropTargetWin32::DragLeave(void)
|
||||
{
|
||||
m_system->pushDragDropEvent(GHOST_kEventDraggingExited, m_draggedObjectType, m_window, 0, 0, NULL);
|
||||
m_draggedObjectType = GHOST_kDragnDropTypeUnknown;
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
/* Implementation of IDropTarget::Drop
|
||||
* This function will not be called if pdwEffect is set to DROPEFFECT_NONE in
|
||||
* the implementation of IDropTarget::DragOver
|
||||
*/
|
||||
HRESULT __stdcall GHOST_DropTargetWin32::Drop(IDataObject * pDataObject, DWORD grfKeyState, POINTL pt, DWORD * pdwEffect)
|
||||
{
|
||||
void * data = getGhostData(pDataObject);
|
||||
if(m_window->canAcceptDragOperation())
|
||||
{
|
||||
*pdwEffect = allowedDropEffect(*pdwEffect);
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
*pdwEffect = DROPEFFECT_NONE;
|
||||
}
|
||||
if (data)
|
||||
m_system->pushDragDropEvent(GHOST_kEventDraggingDropDone, m_draggedObjectType, m_window, pt.x, pt.y, data );
|
||||
|
||||
m_draggedObjectType = GHOST_kDragnDropTypeUnknown;
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
/*
|
||||
* Helpers
|
||||
*/
|
||||
|
||||
DWORD GHOST_DropTargetWin32::allowedDropEffect(DWORD dwAllowed)
|
||||
{
|
||||
DWORD dwEffect = DROPEFFECT_NONE;
|
||||
if(dwAllowed & DROPEFFECT_COPY)
|
||||
dwEffect = DROPEFFECT_COPY;
|
||||
|
||||
return dwEffect;
|
||||
}
|
||||
|
||||
GHOST_TDragnDropTypes GHOST_DropTargetWin32::getGhostType(IDataObject * pDataObject)
|
||||
{
|
||||
/* Text
|
||||
* Note: Unicode text is aviable as CF_TEXT too, the system can do the
|
||||
* conversion, but we do the conversion ourself with WC_NO_BEST_FIT_CHARS.
|
||||
*/
|
||||
FORMATETC fmtetc = { CF_TEXT, 0, DVASPECT_CONTENT, -1, TYMED_HGLOBAL };
|
||||
if(pDataObject->QueryGetData(&fmtetc) == S_OK)
|
||||
{
|
||||
return GHOST_kDragnDropTypeString;
|
||||
}
|
||||
|
||||
// Filesnames
|
||||
fmtetc.cfFormat = CF_HDROP;
|
||||
if(pDataObject->QueryGetData(&fmtetc) == S_OK)
|
||||
{
|
||||
return GHOST_kDragnDropTypeFilenames;
|
||||
}
|
||||
|
||||
return GHOST_kDragnDropTypeUnknown;
|
||||
}
|
||||
|
||||
void * GHOST_DropTargetWin32::getGhostData(IDataObject * pDataObject)
|
||||
{
|
||||
GHOST_TDragnDropTypes type = getGhostType(pDataObject);
|
||||
switch(type)
|
||||
{
|
||||
case GHOST_kDragnDropTypeFilenames:
|
||||
return getDropDataAsFilenames(pDataObject);
|
||||
break;
|
||||
case GHOST_kDragnDropTypeString:
|
||||
return getDropDataAsString(pDataObject);
|
||||
break;
|
||||
case GHOST_kDragnDropTypeBitmap:
|
||||
//return getDropDataAsBitmap(pDataObject);
|
||||
break;
|
||||
default:
|
||||
#ifdef GHOST_DEBUG
|
||||
::printf("\nGHOST_kDragnDropTypeUnknown");
|
||||
#endif // GHOST_DEBUG
|
||||
return NULL;
|
||||
break;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void * GHOST_DropTargetWin32::getDropDataAsFilenames(IDataObject * pDataObject)
|
||||
{
|
||||
UINT totfiles, nvalid=0;
|
||||
WCHAR fpath [MAX_PATH];
|
||||
char * temp_path;
|
||||
GHOST_TStringArray *strArray = NULL;
|
||||
FORMATETC fmtetc = { CF_HDROP, 0, DVASPECT_CONTENT, -1, TYMED_HGLOBAL };
|
||||
STGMEDIUM stgmed;
|
||||
HDROP hdrop;
|
||||
|
||||
// Check if dataobject supplies the format we want.
|
||||
// Double checking here, first in getGhostType.
|
||||
if(pDataObject->QueryGetData(&fmtetc) == S_OK)
|
||||
{
|
||||
if(pDataObject->GetData(&fmtetc, &stgmed) == S_OK)
|
||||
{
|
||||
hdrop = (HDROP)::GlobalLock(stgmed.hGlobal);
|
||||
|
||||
totfiles = ::DragQueryFileW ( hdrop, -1, NULL, 0 );
|
||||
if (!totfiles)
|
||||
{
|
||||
::GlobalUnlock(stgmed.hGlobal);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
strArray = (GHOST_TStringArray*) ::malloc(sizeof(GHOST_TStringArray));
|
||||
strArray->count = 0;
|
||||
strArray->strings = (GHOST_TUns8**) ::malloc(totfiles*sizeof(GHOST_TUns8*));
|
||||
|
||||
for ( UINT nfile = 0; nfile < totfiles; nfile++ )
|
||||
{
|
||||
if ( ::DragQueryFileW ( hdrop, nfile, fpath, MAX_PATH ) > 0 )
|
||||
{
|
||||
if ( !WideCharToANSI(fpath, temp_path) )
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Just ignore paths that could not be converted verbatim.
|
||||
if (strpbrk(temp_path, "?"))
|
||||
{
|
||||
#ifdef GHOST_DEBUG
|
||||
::printf("\ndiscarding path that contains illegal characters: %s", temp_path);
|
||||
#endif // GHOST_DEBUG
|
||||
::free(temp_path);
|
||||
temp_path = NULL;
|
||||
continue;
|
||||
}
|
||||
strArray->strings[nvalid] = (GHOST_TUns8*) temp_path;
|
||||
strArray->count = nvalid+1;
|
||||
nvalid++;
|
||||
}
|
||||
}
|
||||
// Free up memory.
|
||||
::GlobalUnlock(stgmed.hGlobal);
|
||||
::ReleaseStgMedium(&stgmed);
|
||||
|
||||
return strArray;
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void * GHOST_DropTargetWin32::getDropDataAsString(IDataObject * pDataObject)
|
||||
{
|
||||
char* tmp_string;
|
||||
FORMATETC fmtetc = { CF_UNICODETEXT, 0, DVASPECT_CONTENT, -1, TYMED_HGLOBAL };
|
||||
STGMEDIUM stgmed;
|
||||
|
||||
// Try unicode first.
|
||||
// Check if dataobject supplies the format we want.
|
||||
if(pDataObject->QueryGetData(&fmtetc) == S_OK)
|
||||
{
|
||||
if(pDataObject->GetData(&fmtetc, &stgmed) == S_OK)
|
||||
{
|
||||
LPCWSTR wstr = (LPCWSTR)::GlobalLock(stgmed.hGlobal);
|
||||
if ( !WideCharToANSI(wstr, tmp_string) )
|
||||
{
|
||||
::GlobalUnlock(stgmed.hGlobal);
|
||||
return NULL;
|
||||
}
|
||||
// Free memory
|
||||
::GlobalUnlock(stgmed.hGlobal);
|
||||
::ReleaseStgMedium(&stgmed);
|
||||
#ifdef GHOST_DEBUG
|
||||
::printf("\n<converted droped unicode string>\n%s\n</droped converted unicode string>\n",tmp_string);
|
||||
#endif // GHOST_DEBUG
|
||||
return tmp_string;
|
||||
}
|
||||
}
|
||||
|
||||
fmtetc.cfFormat = CF_TEXT;
|
||||
|
||||
if(pDataObject->QueryGetData(&fmtetc) == S_OK)
|
||||
{
|
||||
if(pDataObject->GetData(&fmtetc, &stgmed) == S_OK)
|
||||
{
|
||||
char * str = (char*)::GlobalLock(stgmed.hGlobal);
|
||||
|
||||
tmp_string = (char*)::malloc(::strlen(str)+1);
|
||||
if ( !tmp_string )
|
||||
{
|
||||
::GlobalUnlock(stgmed.hGlobal);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if ( !::strcpy(tmp_string, str) )
|
||||
{
|
||||
::free(tmp_string);
|
||||
::GlobalUnlock(stgmed.hGlobal);
|
||||
return NULL;
|
||||
}
|
||||
// Free memory
|
||||
::GlobalUnlock(stgmed.hGlobal);
|
||||
::ReleaseStgMedium(&stgmed);
|
||||
|
||||
return tmp_string;
|
||||
}
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int GHOST_DropTargetWin32::WideCharToANSI(LPCWSTR in, char * &out)
|
||||
{
|
||||
int size;
|
||||
out = NULL; //caller should free if != NULL
|
||||
|
||||
// Get the required size.
|
||||
size = ::WideCharToMultiByte(CP_ACP, //System Default Codepage
|
||||
0x00000400, // WC_NO_BEST_FIT_CHARS
|
||||
in,
|
||||
-1, //-1 null terminated, makes output null terminated too.
|
||||
NULL,
|
||||
0,
|
||||
NULL,NULL
|
||||
);
|
||||
|
||||
if(!size)
|
||||
{
|
||||
#ifdef GHOST_DEBUG
|
||||
::printLastError();
|
||||
#endif // GHOST_DEBUG
|
||||
return 0;
|
||||
}
|
||||
|
||||
out = (char*)::malloc(size);
|
||||
if (!out)
|
||||
{
|
||||
::printf("\nmalloc failed!!!");
|
||||
return 0;
|
||||
}
|
||||
|
||||
size = ::WideCharToMultiByte(CP_ACP,
|
||||
0x00000400,
|
||||
in,
|
||||
-1,
|
||||
(LPSTR) out,
|
||||
size,
|
||||
NULL,NULL
|
||||
);
|
||||
|
||||
if(!size)
|
||||
{
|
||||
#ifdef GHOST_DEBUG
|
||||
::printLastError();
|
||||
#endif //GHOST_DEBUG
|
||||
::free(out);
|
||||
out = NULL;
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
#ifdef GHOST_DEBUG
|
||||
void printLastError(void)
|
||||
{
|
||||
LPTSTR s;
|
||||
DWORD err;
|
||||
|
||||
err = GetLastError();
|
||||
if(FormatMessage(FORMAT_MESSAGE_ALLOCATE_BUFFER |
|
||||
FORMAT_MESSAGE_FROM_SYSTEM,
|
||||
NULL,
|
||||
err,
|
||||
0,
|
||||
(LPTSTR)&s,
|
||||
0,
|
||||
NULL)
|
||||
)
|
||||
{
|
||||
printf("\nLastError: (%d) %s\n", (int)err, s);
|
||||
LocalFree(s);
|
||||
}
|
||||
}
|
||||
#endif // GHOST_DEBUG
|
||||
|
||||
155
intern/ghost/intern/GHOST_DropTargetWin32.h
Normal file
155
intern/ghost/intern/GHOST_DropTargetWin32.h
Normal file
@@ -0,0 +1,155 @@
|
||||
/**
|
||||
* $Id$
|
||||
* ***** BEGIN GPL LICENSE BLOCK *****
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
*
|
||||
* The Original Code is Copyright (C) 2001-2002 by NaN Holding BV.
|
||||
* All rights reserved.
|
||||
*
|
||||
* The Original Code is: all of this file.
|
||||
*
|
||||
* Contributor(s): none yet.
|
||||
*
|
||||
* ***** END GPL LICENSE BLOCK *****
|
||||
*/
|
||||
#ifndef _GHOST_DROP_TARGET_WIN32_H_
|
||||
#define _GHOST_DROP_TARGET_WIN32_H_
|
||||
|
||||
#include <windows.h>
|
||||
#include <string.h>
|
||||
#include <GHOST_Types.h>
|
||||
#include "GHOST_WindowWin32.h"
|
||||
#include "GHOST_SystemWin32.h"
|
||||
|
||||
class GHOST_DropTargetWin32 : public IDropTarget
|
||||
{
|
||||
public:
|
||||
/* IUnknownd implementation.
|
||||
* Enables clients to get pointers to other interfaces on a given object
|
||||
* through the QueryInterface method, and manage the existence of the object
|
||||
* through the AddRef and Release methods. All other COM interfaces are
|
||||
* inherited, directly or indirectly, from IUnknown. Therefore, the three
|
||||
* methods in IUnknown are the first entries in the VTable for every interface.
|
||||
*/
|
||||
HRESULT __stdcall QueryInterface (REFIID riid, void ** ppvObj);
|
||||
ULONG __stdcall AddRef (void);
|
||||
ULONG __stdcall Release (void);
|
||||
|
||||
/* IDropTarget implementation
|
||||
+ The IDropTarget interface is one of the interfaces you implement to
|
||||
provide drag-and-drop operations in your application. It contains methods
|
||||
used in any application that can be a target for data during a
|
||||
drag-and-drop operation. A drop-target application is responsible for:
|
||||
*
|
||||
* - Determining the effect of the drop on the target application.
|
||||
* - Incorporating any valid dropped data when the drop occurs.
|
||||
* - Communicating target feedback to the source so the source application
|
||||
* can provide appropriate visual feedback such as setting the cursor.
|
||||
* - Implementing drag scrolling.
|
||||
* - Registering and revoking its application windows as drop targets.
|
||||
*
|
||||
* The IDropTarget interface contains methods that handle all these
|
||||
* responsibilities except registering and revoking the application window
|
||||
* as a drop target, for which you must call the RegisterDragDrop and the
|
||||
* RevokeDragDrop functions.
|
||||
*/
|
||||
|
||||
HRESULT __stdcall DragEnter (IDataObject * pDataObject, DWORD grfKeyState, POINTL pt, DWORD * pdwEffect);
|
||||
HRESULT __stdcall DragOver (DWORD grfKeyState, POINTL pt, DWORD * pdwEffect);
|
||||
HRESULT __stdcall DragLeave (void);
|
||||
HRESULT __stdcall Drop (IDataObject * pDataObject, DWORD grfKeyState, POINTL pt, DWORD * pdwEffect);
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* With the modifier keys, we want to distinguish left and right keys.
|
||||
* Sometimes this is not possible (Windows ME for instance). Then, we want
|
||||
* events generated for both keys.
|
||||
* @param window The window to register as drop target.
|
||||
* @param system The associated system.
|
||||
*/
|
||||
GHOST_DropTargetWin32(GHOST_WindowWin32 * window, GHOST_SystemWin32 * system);
|
||||
|
||||
/**
|
||||
* Destructor
|
||||
* Do NOT destroy directly. Use Release() instead to make COM happy.
|
||||
*/
|
||||
~GHOST_DropTargetWin32();
|
||||
|
||||
private:
|
||||
|
||||
/* Internal helper functions */
|
||||
|
||||
/**
|
||||
* Base the effect on those allowed by the dropsource.
|
||||
* @param dwAllowed Drop sources allowed drop effect.
|
||||
* @return The allowed drop effect.
|
||||
*/
|
||||
DWORD allowedDropEffect(DWORD dwAllowed);
|
||||
|
||||
/**
|
||||
* Query DataObject for the data types it supports.
|
||||
* @param pDataObject Pointer to the DataObject.
|
||||
* @return GHOST data type.
|
||||
*/
|
||||
GHOST_TDragnDropTypes getGhostType(IDataObject * pDataObject);
|
||||
|
||||
/**
|
||||
* Get data to pass in event.
|
||||
* It checks the type and calls specific functions for each type.
|
||||
* @param pDataObject Pointer to the DataObject.
|
||||
* @return Pointer to data.
|
||||
*/
|
||||
void * getGhostData(IDataObject * pDataObject);
|
||||
|
||||
/**
|
||||
* Allocate data as file array to pass in event.
|
||||
* @param pDataObject Pointer to the DataObject.
|
||||
* @return Pointer to data.
|
||||
*/
|
||||
void * getDropDataAsFilenames(IDataObject * pDataObject);
|
||||
|
||||
/**
|
||||
* Allocate data as string to pass in event.
|
||||
* @param pDataObject Pointer to the DataObject.
|
||||
* @return Pointer to data.
|
||||
*/
|
||||
void * getDropDataAsString(IDataObject * pDataObject);
|
||||
|
||||
/**
|
||||
* Convert Unicode to ANSI, replacing unconvertable chars with '?'.
|
||||
* The ANSI codepage is the system default codepage,
|
||||
* and can change from system to system.
|
||||
* @param in LPCWSTR.
|
||||
* @param out char *. Is set to NULL on failure.
|
||||
* @return 0 on failure. Else the size of the string including '\0'.
|
||||
*/
|
||||
int WideCharToANSI(LPCWSTR in, char * &out);
|
||||
|
||||
/* Private member variables */
|
||||
/* COM reference count. */
|
||||
LONG m_cRef;
|
||||
/* Handle of the associated window. */
|
||||
HWND m_hWnd;
|
||||
/* The associated GHOST_WindowWin32. */
|
||||
GHOST_WindowWin32 * m_window;
|
||||
/* The System. */
|
||||
GHOST_SystemWin32 * m_system;
|
||||
/* Data type of the dragged object */
|
||||
GHOST_TDragnDropTypes m_draggedObjectType;
|
||||
|
||||
};
|
||||
|
||||
#endif // _GHOST_DROP_TARGET_WIN32_H_
|
||||
@@ -49,7 +49,7 @@ bool GHOST_EventPrinter::processEvent(GHOST_IEvent* event)
|
||||
|
||||
if (event->getType() == GHOST_kEventWindowUpdate) return false;
|
||||
|
||||
std::cout << "GHOST_EventPrinter::processEvent, time: " << (GHOST_TInt32)event->getTime() << ", type: ";
|
||||
std::cout << "\nGHOST_EventPrinter::processEvent, time: " << (GHOST_TInt32)event->getTime() << ", type: ";
|
||||
switch (event->getType()) {
|
||||
case GHOST_kEventUnknown:
|
||||
std::cout << "GHOST_kEventUnknown"; handled = false;
|
||||
@@ -125,19 +125,21 @@ bool GHOST_EventPrinter::processEvent(GHOST_IEvent* event)
|
||||
case GHOST_kEventDraggingDropDone:
|
||||
{
|
||||
GHOST_TEventDragnDropData* dragnDropData = (GHOST_TEventDragnDropData*)((GHOST_IEvent*)event)->getData();
|
||||
std::cout << "GHOST_kEventDraggingDropDone, dragged object type : " << dragnDropData->dataType;
|
||||
std::cout << "GHOST_kEventDraggingDropDone,";
|
||||
std::cout << " mouse at x=" << dragnDropData->x << " y=" << dragnDropData->y;
|
||||
switch (dragnDropData->dataType) {
|
||||
case GHOST_kDragnDropTypeString:
|
||||
std::cout << " string received = " << (char*)dragnDropData->data;
|
||||
std::cout << " type : GHOST_kDragnDropTypeString,";
|
||||
std::cout << "\n String received = " << (char*)dragnDropData->data;
|
||||
break;
|
||||
case GHOST_kDragnDropTypeFilenames:
|
||||
{
|
||||
GHOST_TStringArray *strArray = (GHOST_TStringArray*)dragnDropData->data;
|
||||
int i;
|
||||
std::cout << "\nReceived " << strArray->count << " filenames";
|
||||
std::cout << " type : GHOST_kDragnDropTypeFilenames,";
|
||||
std::cout << "\n Received " << strArray->count << " filename" << (strArray->count > 1 ? "s:" : ":");
|
||||
for (i=0;i<strArray->count;i++)
|
||||
std::cout << " Filename #" << i << ": " << strArray->strings[i];
|
||||
std::cout << "\n File[" << i << "] : " << strArray->strings[i];
|
||||
}
|
||||
break;
|
||||
default:
|
||||
@@ -192,7 +194,6 @@ bool GHOST_EventPrinter::processEvent(GHOST_IEvent* event)
|
||||
std::cout << "not found"; handled = false;
|
||||
break;
|
||||
}
|
||||
std::cout << "\n";
|
||||
return handled;
|
||||
}
|
||||
|
||||
|
||||
@@ -140,6 +140,24 @@ public:
|
||||
*/
|
||||
GHOST_TUns8 handleQuitRequest();
|
||||
|
||||
/**
|
||||
* Handle Cocoa openFile event
|
||||
* Display confirmation request panel if changes performed since last save
|
||||
*/
|
||||
bool handleOpenDocumentRequest(void *filepathStr);
|
||||
|
||||
/**
|
||||
* Handles a drag'n'drop destination event. Called by GHOST_WindowCocoa window subclass
|
||||
* @param eventType The type of drag'n'drop event
|
||||
* @param draggedObjectType The type object concerned (currently array of file names, string, TIFF image)
|
||||
* @param mouseX x mouse coordinate (in cocoa base window coordinates)
|
||||
* @param mouseY y mouse coordinate
|
||||
* @param window The window on which the event occured
|
||||
* @return Indication whether the event was handled.
|
||||
*/
|
||||
GHOST_TSuccess handleDraggingEvent(GHOST_TEventType eventType, GHOST_TDragnDropTypes draggedObjectType,
|
||||
GHOST_WindowCocoa* window, int mouseX, int mouseY, void* data);
|
||||
|
||||
/***************************************************************************************
|
||||
** Cursor management functionality
|
||||
***************************************************************************************/
|
||||
@@ -200,18 +218,12 @@ public:
|
||||
*/
|
||||
GHOST_TSuccess handleWindowEvent(GHOST_TEventType eventType, GHOST_WindowCocoa* window);
|
||||
|
||||
|
||||
/**
|
||||
* Handles a drag'n'drop destination event. Called by GHOST_WindowCocoa window subclass
|
||||
* @param eventType The type of drag'n'drop event
|
||||
* @param draggedObjectType The type object concerned (currently array of file names, string, TIFF image)
|
||||
* @param mouseX x mouse coordinate (in cocoa base window coordinates)
|
||||
* @param mouseY y mouse coordinate
|
||||
* @param window The window on which the event occured
|
||||
* Handles the Cocoa event telling the application has become active (again)
|
||||
* @return Indication whether the event was handled.
|
||||
*/
|
||||
GHOST_TSuccess handleDraggingEvent(GHOST_TEventType eventType, GHOST_TDragnDropTypes draggedObjectType,
|
||||
GHOST_WindowCocoa* window, int mouseX, int mouseY, void* data);
|
||||
GHOST_TSuccess handleApplicationBecomeActiveEvent();
|
||||
|
||||
|
||||
protected:
|
||||
/**
|
||||
|
||||
@@ -389,7 +389,29 @@ extern "C" int GHOST_HACK_getFirstFile(char buf[FIRSTFILEBUFLG]) {
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(WITH_QUICKTIME) && !defined(USE_QTKIT)
|
||||
//Need to place this quicktime function in an ObjC file
|
||||
//It is used to avoid memory leak when raising the quicktime "compression settings" standard dialog
|
||||
extern "C" {
|
||||
struct bContext;
|
||||
struct wmOperator;
|
||||
extern int fromcocoa_request_qtcodec_settings(bContext *C, wmOperator *op);
|
||||
|
||||
|
||||
int cocoa_request_qtcodec_settings(bContext *C, wmOperator *op)
|
||||
{
|
||||
int result;
|
||||
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
|
||||
|
||||
result = fromcocoa_request_qtcodec_settings(C, op);
|
||||
|
||||
[pool drain];
|
||||
return result;
|
||||
}
|
||||
};
|
||||
#endif
|
||||
|
||||
|
||||
#pragma mark Cocoa objects
|
||||
|
||||
/**
|
||||
@@ -403,6 +425,7 @@ extern "C" int GHOST_HACK_getFirstFile(char buf[FIRSTFILEBUFLG]) {
|
||||
- (BOOL)application:(NSApplication *)theApplication openFile:(NSString *)filename;
|
||||
- (NSApplicationTerminateReply)applicationShouldTerminate:(NSApplication *)sender;
|
||||
- (void)applicationWillTerminate:(NSNotification *)aNotification;
|
||||
- (void)applicationWillBecomeActive:(NSNotification *)aNotification;
|
||||
@end
|
||||
|
||||
@implementation CocoaAppDelegate : NSObject
|
||||
@@ -413,9 +436,7 @@ extern "C" int GHOST_HACK_getFirstFile(char buf[FIRSTFILEBUFLG]) {
|
||||
|
||||
- (BOOL)application:(NSApplication *)theApplication openFile:(NSString *)filename
|
||||
{
|
||||
NSLog(@"\nGet open file event from cocoa : %@",filename);
|
||||
systemCocoa->handleDraggingEvent(GHOST_kEventDraggingDropOnIcon, GHOST_kDragnDropTypeFilenames, nil, 0, 0, [NSArray arrayWithObject:filename]);
|
||||
return YES;
|
||||
return systemCocoa->handleOpenDocumentRequest(filename);
|
||||
}
|
||||
|
||||
- (NSApplicationTerminateReply)applicationShouldTerminate:(NSApplication *)sender
|
||||
@@ -436,6 +457,11 @@ extern "C" int GHOST_HACK_getFirstFile(char buf[FIRSTFILEBUFLG]) {
|
||||
/*G.afbreek = 0; //Let Cocoa perform the termination at the end
|
||||
WM_exit(C);*/
|
||||
}
|
||||
|
||||
- (void)applicationWillBecomeActive:(NSNotification *)aNotification
|
||||
{
|
||||
systemCocoa->handleApplicationBecomeActiveEvent();
|
||||
}
|
||||
@end
|
||||
|
||||
|
||||
@@ -530,6 +556,9 @@ GHOST_TSuccess GHOST_SystemCocoa::init()
|
||||
|
||||
[windowMenu addItemWithTitle:@"Zoom" action:@selector(performZoom:) keyEquivalent:@""];
|
||||
|
||||
menuItem = [windowMenu addItemWithTitle:@"Close" action:@selector(performClose:) keyEquivalent:@"w"];
|
||||
[menuItem setKeyEquivalentModifierMask:NSCommandKeyMask];
|
||||
|
||||
menuItem = [[NSMenuItem alloc] init];
|
||||
[menuItem setSubmenu:windowMenu];
|
||||
|
||||
@@ -706,13 +735,10 @@ GHOST_TSuccess GHOST_SystemCocoa::setCursorPosition(GHOST_TInt32 x, GHOST_TInt32
|
||||
|
||||
GHOST_TSuccess GHOST_SystemCocoa::getModifierKeys(GHOST_ModifierKeys& keys) const
|
||||
{
|
||||
unsigned int modifiers = [[NSApp currentEvent] modifierFlags];
|
||||
//Direct query to modifierFlags can be used in 10.6
|
||||
|
||||
keys.set(GHOST_kModifierKeyCommand, (modifiers & NSCommandKeyMask) ? true : false);
|
||||
keys.set(GHOST_kModifierKeyLeftAlt, (modifiers & NSAlternateKeyMask) ? true : false);
|
||||
keys.set(GHOST_kModifierKeyLeftShift, (modifiers & NSShiftKeyMask) ? true : false);
|
||||
keys.set(GHOST_kModifierKeyLeftControl, (modifiers & NSControlKeyMask) ? true : false);
|
||||
keys.set(GHOST_kModifierKeyCommand, (m_modifierMask & NSCommandKeyMask) ? true : false);
|
||||
keys.set(GHOST_kModifierKeyLeftAlt, (m_modifierMask & NSAlternateKeyMask) ? true : false);
|
||||
keys.set(GHOST_kModifierKeyLeftShift, (m_modifierMask & NSShiftKeyMask) ? true : false);
|
||||
keys.set(GHOST_kModifierKeyLeftControl, (m_modifierMask & NSControlKeyMask) ? true : false);
|
||||
|
||||
return GHOST_kSuccess;
|
||||
}
|
||||
@@ -740,8 +766,6 @@ bool GHOST_SystemCocoa::processEvents(bool waitForEvent)
|
||||
bool anyProcessed = false;
|
||||
NSEvent *event;
|
||||
|
||||
m_outsideLoopEventProcessed = false;
|
||||
|
||||
// SetMouseCoalescingEnabled(false, NULL);
|
||||
//TODO : implement timer ??
|
||||
|
||||
@@ -838,9 +862,55 @@ bool GHOST_SystemCocoa::processEvents(bool waitForEvent)
|
||||
} while (event!= nil);
|
||||
//} while (waitForEvent && !anyProcessed); Needed only for timer implementation
|
||||
|
||||
if (m_outsideLoopEventProcessed) {
|
||||
m_outsideLoopEventProcessed = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
return anyProcessed;
|
||||
}
|
||||
|
||||
//Note: called from NSApplication delegate
|
||||
GHOST_TSuccess GHOST_SystemCocoa::handleApplicationBecomeActiveEvent()
|
||||
{
|
||||
//Update the modifiers key mask, as its status may have changed when the application was not active
|
||||
//(that is when update events are sent to another application)
|
||||
unsigned int modifiers;
|
||||
GHOST_IWindow* window = m_windowManager->getActiveWindow();
|
||||
|
||||
#ifdef MAC_OS_X_VERSION_10_6
|
||||
modifiers = [NSEvent modifierFlags];
|
||||
#else
|
||||
//If build against an older SDK, check if running on 10.6 to use the correct function
|
||||
if ([NSEvent respondsToSelector:@selector(modifierFlags)]) {
|
||||
#if MAC_OS_X_VERSION_MIN_REQUIRED <= MAC_OS_X_VERSION_10_4
|
||||
modifiers = (unsigned int)[NSEvent modifierFlags];
|
||||
#else
|
||||
modifiers = (NSUInteger)[NSEvent modifierFlags];
|
||||
#endif
|
||||
}
|
||||
else {
|
||||
//TODO: need to find a better workaround for the missing cocoa "getModifierFlag" function in 10.4/10.5
|
||||
modifiers = 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
return anyProcessed || m_outsideLoopEventProcessed;
|
||||
if ((modifiers & NSShiftKeyMask) != (m_modifierMask & NSShiftKeyMask)) {
|
||||
pushEvent( new GHOST_EventKey(getMilliSeconds(), (modifiers & NSShiftKeyMask)?GHOST_kEventKeyDown:GHOST_kEventKeyUp, window, GHOST_kKeyLeftShift) );
|
||||
}
|
||||
if ((modifiers & NSControlKeyMask) != (m_modifierMask & NSControlKeyMask)) {
|
||||
pushEvent( new GHOST_EventKey(getMilliSeconds(), (modifiers & NSControlKeyMask)?GHOST_kEventKeyDown:GHOST_kEventKeyUp, window, GHOST_kKeyLeftControl) );
|
||||
}
|
||||
if ((modifiers & NSAlternateKeyMask) != (m_modifierMask & NSAlternateKeyMask)) {
|
||||
pushEvent( new GHOST_EventKey(getMilliSeconds(), (modifiers & NSAlternateKeyMask)?GHOST_kEventKeyDown:GHOST_kEventKeyUp, window, GHOST_kKeyLeftAlt) );
|
||||
}
|
||||
if ((modifiers & NSCommandKeyMask) != (m_modifierMask & NSCommandKeyMask)) {
|
||||
pushEvent( new GHOST_EventKey(getMilliSeconds(), (modifiers & NSCommandKeyMask)?GHOST_kEventKeyDown:GHOST_kEventKeyUp, window, GHOST_kKeyCommand) );
|
||||
}
|
||||
|
||||
m_modifierMask = modifiers;
|
||||
|
||||
return GHOST_kSuccess;
|
||||
}
|
||||
|
||||
//Note: called from NSWindow delegate
|
||||
@@ -886,7 +956,7 @@ GHOST_TSuccess GHOST_SystemCocoa::handleWindowEvent(GHOST_TEventType eventType,
|
||||
GHOST_TSuccess GHOST_SystemCocoa::handleDraggingEvent(GHOST_TEventType eventType, GHOST_TDragnDropTypes draggedObjectType,
|
||||
GHOST_WindowCocoa* window, int mouseX, int mouseY, void* data)
|
||||
{
|
||||
if (!validWindow(window)) {
|
||||
if (!validWindow(window) && (eventType != GHOST_kEventDraggingDropOnIcon)) {
|
||||
return GHOST_kFailure;
|
||||
}
|
||||
switch(eventType)
|
||||
@@ -1014,7 +1084,34 @@ GHOST_TUns8 GHOST_SystemCocoa::handleQuitRequest()
|
||||
return GHOST_kExitCancel;
|
||||
}
|
||||
|
||||
bool GHOST_SystemCocoa::handleOpenDocumentRequest(void *filepathStr)
|
||||
{
|
||||
NSString *filepath = (NSString*)filepathStr;
|
||||
int confirmOpen = NSAlertAlternateReturn;
|
||||
NSArray *windowsList;
|
||||
|
||||
//Check open windows if some changes are not saved
|
||||
if (m_windowManager->getAnyModifiedState())
|
||||
{
|
||||
confirmOpen = NSRunAlertPanel([NSString stringWithFormat:@"Opening %@",[filepath lastPathComponent]],
|
||||
@"Current document has not been saved.\nDo you really want to proceed?",
|
||||
@"Cancel", @"Open", nil);
|
||||
}
|
||||
|
||||
//Give back focus to the blender window
|
||||
windowsList = [NSApp orderedWindows];
|
||||
if ([windowsList count]) {
|
||||
[[windowsList objectAtIndex:0] makeKeyAndOrderFront:nil];
|
||||
}
|
||||
|
||||
if (confirmOpen == NSAlertAlternateReturn)
|
||||
{
|
||||
handleDraggingEvent(GHOST_kEventDraggingDropOnIcon,GHOST_kDragnDropTypeFilenames,NULL,0,0, [NSArray arrayWithObject:filepath]);
|
||||
return YES;
|
||||
}
|
||||
else return NO;
|
||||
}
|
||||
|
||||
GHOST_TSuccess GHOST_SystemCocoa::handleTabletEvent(void *eventPtr, short eventType)
|
||||
{
|
||||
NSEvent *event = (NSEvent *)eventPtr;
|
||||
|
||||
@@ -39,6 +39,7 @@
|
||||
#endif
|
||||
|
||||
#include "GHOST_SystemWin32.h"
|
||||
#include "GHOST_EventDragnDrop.h"
|
||||
|
||||
// win64 doesn't define GWL_USERDATA
|
||||
#ifdef WIN32
|
||||
@@ -138,10 +139,15 @@ GHOST_SystemWin32::GHOST_SystemWin32()
|
||||
m_displayManager = new GHOST_DisplayManagerWin32 ();
|
||||
GHOST_ASSERT(m_displayManager, "GHOST_SystemWin32::GHOST_SystemWin32(): m_displayManager==0\n");
|
||||
m_displayManager->initialize();
|
||||
|
||||
// Require COM for GHOST_DropTargetWin32 created in GHOST_WindowWin32.
|
||||
OleInitialize(0);
|
||||
}
|
||||
|
||||
GHOST_SystemWin32::~GHOST_SystemWin32()
|
||||
{
|
||||
// Shutdown COM
|
||||
OleUninitialize();
|
||||
}
|
||||
|
||||
|
||||
@@ -187,7 +193,7 @@ GHOST_IWindow* GHOST_SystemWin32::createWindow(
|
||||
bool stereoVisual, const GHOST_TEmbedderWindowID parentWindow )
|
||||
{
|
||||
GHOST_Window* window = 0;
|
||||
window = new GHOST_WindowWin32 (title, left, top, width, height, state, type, stereoVisual);
|
||||
window = new GHOST_WindowWin32 (this, title, left, top, width, height, state, type, stereoVisual);
|
||||
if (window) {
|
||||
if (window->getValid()) {
|
||||
// Store the pointer to the window
|
||||
@@ -248,10 +254,12 @@ bool GHOST_SystemWin32::processEvents(bool waitForEvent)
|
||||
GHOST_TSuccess GHOST_SystemWin32::getCursorPosition(GHOST_TInt32& x, GHOST_TInt32& y) const
|
||||
{
|
||||
POINT point;
|
||||
::GetCursorPos(&point);
|
||||
x = point.x;
|
||||
y = point.y;
|
||||
return GHOST_kSuccess;
|
||||
if(::GetCursorPos(&point)){
|
||||
x = point.x;
|
||||
y = point.y;
|
||||
return GHOST_kSuccess;
|
||||
}
|
||||
return GHOST_kFailure;
|
||||
}
|
||||
|
||||
|
||||
@@ -499,11 +507,56 @@ GHOST_EventButton* GHOST_SystemWin32::processButtonEvent(GHOST_TEventType type,
|
||||
}
|
||||
|
||||
|
||||
GHOST_EventCursor* GHOST_SystemWin32::processCursorEvent(GHOST_TEventType type, GHOST_IWindow *window)
|
||||
GHOST_EventCursor* GHOST_SystemWin32::processCursorEvent(GHOST_TEventType type, GHOST_IWindow *Iwindow)
|
||||
{
|
||||
GHOST_TInt32 x, y;
|
||||
getSystem()->getCursorPosition(x, y);
|
||||
return new GHOST_EventCursor (getSystem()->getMilliSeconds(), type, window, x, y);
|
||||
GHOST_TInt32 x_screen, y_screen;
|
||||
GHOST_SystemWin32 * system = ((GHOST_SystemWin32 * ) getSystem());
|
||||
GHOST_WindowWin32 * window = ( GHOST_WindowWin32 * ) Iwindow;
|
||||
|
||||
system->getCursorPosition(x_screen, y_screen);
|
||||
|
||||
if(window->getCursorGrabMode() != GHOST_kGrabDisable && window->getCursorGrabMode() != GHOST_kGrabNormal)
|
||||
{
|
||||
GHOST_TInt32 x_new= x_screen;
|
||||
GHOST_TInt32 y_new= y_screen;
|
||||
GHOST_TInt32 x_accum, y_accum;
|
||||
GHOST_Rect bounds;
|
||||
|
||||
/* fallback to window bounds */
|
||||
if(window->getCursorGrabBounds(bounds)==GHOST_kFailure){
|
||||
window->getClientBounds(bounds);
|
||||
}
|
||||
|
||||
/* could also clamp to screen bounds
|
||||
* wrap with a window outside the view will fail atm */
|
||||
|
||||
bounds.wrapPoint(x_new, y_new, 2); /* offset of one incase blender is at screen bounds */
|
||||
|
||||
window->getCursorGrabAccum(x_accum, y_accum);
|
||||
if(x_new != x_screen|| y_new != y_screen) {
|
||||
/* when wrapping we don't need to add an event because the
|
||||
* setCursorPosition call will cause a new event after */
|
||||
system->setCursorPosition(x_new, y_new); /* wrap */
|
||||
window->setCursorGrabAccum(x_accum + (x_screen - x_new), y_accum + (y_screen - y_new));
|
||||
}else{
|
||||
return new GHOST_EventCursor(system->getMilliSeconds(),
|
||||
GHOST_kEventCursorMove,
|
||||
window,
|
||||
x_screen + x_accum,
|
||||
y_screen + y_accum
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
else {
|
||||
return new GHOST_EventCursor(system->getMilliSeconds(),
|
||||
GHOST_kEventCursorMove,
|
||||
window,
|
||||
x_screen,
|
||||
y_screen
|
||||
);
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
@@ -549,6 +602,26 @@ GHOST_Event* GHOST_SystemWin32::processWindowEvent(GHOST_TEventType type, GHOST_
|
||||
return new GHOST_Event(getSystem()->getMilliSeconds(), type, window);
|
||||
}
|
||||
|
||||
GHOST_TSuccess GHOST_SystemWin32::pushDragDropEvent(GHOST_TEventType eventType,
|
||||
GHOST_TDragnDropTypes draggedObjectType,
|
||||
GHOST_IWindow* window,
|
||||
int mouseX, int mouseY,
|
||||
void* data)
|
||||
{
|
||||
GHOST_SystemWin32* system = ((GHOST_SystemWin32*)getSystem());
|
||||
return system->pushEvent(new GHOST_EventDragnDrop(system->getMilliSeconds(),
|
||||
eventType,
|
||||
draggedObjectType,
|
||||
window,mouseX,mouseY,data)
|
||||
);
|
||||
}
|
||||
|
||||
void GHOST_SystemWin32::processMinMaxInfo(MINMAXINFO * minmax)
|
||||
{
|
||||
minmax->ptMinTrackSize.x=320;
|
||||
minmax->ptMinTrackSize.y=240;
|
||||
}
|
||||
|
||||
|
||||
LRESULT WINAPI GHOST_SystemWin32::s_wndProc(HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam)
|
||||
{
|
||||
@@ -793,6 +866,15 @@ LRESULT WINAPI GHOST_SystemWin32::s_wndProc(HWND hwnd, UINT msg, WPARAM wParam,
|
||||
event = processWindowEvent(GHOST_kEventWindowUpdate, window);
|
||||
::ValidateRect(hwnd, NULL);
|
||||
break;
|
||||
case WM_GETMINMAXINFO:
|
||||
/* The WM_GETMINMAXINFO message is sent to a window when the size or
|
||||
* position of the window is about to change. An application can use
|
||||
* this message to override the window's default maximized size and
|
||||
* position, or its default minimum or maximum tracking size.
|
||||
*/
|
||||
processMinMaxInfo((MINMAXINFO *) lParam);
|
||||
/* Let DefWindowProc handle it. */
|
||||
break;
|
||||
case WM_SIZE:
|
||||
/* The WM_SIZE message is sent to a window after its size has changed.
|
||||
* The WM_SIZE and WM_MOVE messages are not sent if an application handles the
|
||||
|
||||
@@ -51,6 +51,7 @@ class GHOST_EventCursor;
|
||||
class GHOST_EventKey;
|
||||
class GHOST_EventWheel;
|
||||
class GHOST_EventWindow;
|
||||
class GHOST_EventDragnDrop;
|
||||
|
||||
/**
|
||||
* WIN32 Implementation of GHOST_System class.
|
||||
@@ -181,6 +182,18 @@ public:
|
||||
* @return No return
|
||||
*/
|
||||
virtual void putClipboard(GHOST_TInt8 *buffer, bool selection) const;
|
||||
|
||||
/**
|
||||
* Creates a drag'n'drop event and pushes it immediately onto the event queue.
|
||||
* Called by GHOST_DropTargetWin32 class.
|
||||
* @param eventType The type of drag'n'drop event
|
||||
* @param draggedObjectType The type object concerned (currently array of file names, string, ?bitmap)
|
||||
* @param mouseX x mouse coordinate (in window coordinates)
|
||||
* @param mouseY y mouse coordinate
|
||||
* @param window The window on which the event occured
|
||||
* @return Indication whether the event was handled.
|
||||
*/
|
||||
static GHOST_TSuccess pushDragDropEvent(GHOST_TEventType eventType, GHOST_TDragnDropTypes draggedObjectType,GHOST_IWindow* window, int mouseX, int mouseY, void* data);
|
||||
|
||||
protected:
|
||||
/**
|
||||
@@ -228,7 +241,7 @@ protected:
|
||||
* @param window The window receiving the event (the active window).
|
||||
* @return The event created.
|
||||
*/
|
||||
static GHOST_EventCursor* processCursorEvent(GHOST_TEventType type, GHOST_IWindow *window);
|
||||
static GHOST_EventCursor* processCursorEvent(GHOST_TEventType type, GHOST_IWindow *Iwindow);
|
||||
|
||||
/**
|
||||
* Creates a mouse wheel event.
|
||||
@@ -255,7 +268,12 @@ protected:
|
||||
* @return The event created.
|
||||
*/
|
||||
static GHOST_Event* processWindowEvent(GHOST_TEventType type, GHOST_IWindow* window);
|
||||
|
||||
/**
|
||||
* Handles minimum window size.
|
||||
* @param minmax The MINMAXINFO structure.
|
||||
*/
|
||||
static void processMinMaxInfo(MINMAXINFO * minmax);
|
||||
|
||||
/**
|
||||
* Returns the local state of the modifier keys (from the message queue).
|
||||
* @param keys The state of the keys.
|
||||
|
||||
@@ -23,6 +23,9 @@
|
||||
*
|
||||
* Contributor(s): none yet.
|
||||
*
|
||||
* Part of this code has been taken from Qt, under LGPL license
|
||||
* Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
|
||||
*
|
||||
* ***** END GPL LICENSE BLOCK *****
|
||||
*/
|
||||
|
||||
@@ -63,8 +66,10 @@
|
||||
#include <sys/time.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
#include <stdio.h> // for fprintf only
|
||||
#include <cstdlib> // for exit
|
||||
|
||||
typedef struct NDOFPlatformInfo {
|
||||
Display *display;
|
||||
@@ -93,7 +98,10 @@ GHOST_SystemX11(
|
||||
{
|
||||
m_display = XOpenDisplay(NULL);
|
||||
|
||||
if (!m_display) return;
|
||||
if (!m_display) {
|
||||
std::cerr << "Unable to open a display" << std::endl;
|
||||
abort(); //was return before, but this would just mean it will crash later
|
||||
}
|
||||
|
||||
#ifdef __sgi
|
||||
m_delete_window_atom
|
||||
@@ -126,6 +134,8 @@ GHOST_SystemX11(
|
||||
m_xclip_out= XInternAtom(m_display, "XCLIP_OUT", False);
|
||||
m_incr= XInternAtom(m_display, "INCR", False);
|
||||
m_utf8_string= XInternAtom(m_display, "UTF8_STRING", False);
|
||||
m_last_warp = 0;
|
||||
|
||||
|
||||
// compute the initial time
|
||||
timeval tv;
|
||||
@@ -310,6 +320,61 @@ static void SleepTillEvent(Display *display, GHOST_TInt64 maxSleep) {
|
||||
}
|
||||
}
|
||||
|
||||
/* This function borrowed from Qt's X11 support
|
||||
* qclipboard_x11.cpp
|
||||
* */
|
||||
struct init_timestamp_data
|
||||
{
|
||||
Time timestamp;
|
||||
};
|
||||
|
||||
static Bool init_timestamp_scanner(Display*, XEvent *event, XPointer arg)
|
||||
{
|
||||
init_timestamp_data *data =
|
||||
reinterpret_cast<init_timestamp_data*>(arg);
|
||||
switch(event->type)
|
||||
{
|
||||
case ButtonPress:
|
||||
case ButtonRelease:
|
||||
data->timestamp = event->xbutton.time;
|
||||
break;
|
||||
case MotionNotify:
|
||||
data->timestamp = event->xmotion.time;
|
||||
break;
|
||||
case KeyPress:
|
||||
case KeyRelease:
|
||||
data->timestamp = event->xkey.time;
|
||||
break;
|
||||
case PropertyNotify:
|
||||
data->timestamp = event->xproperty.time;
|
||||
break;
|
||||
case EnterNotify:
|
||||
case LeaveNotify:
|
||||
data->timestamp = event->xcrossing.time;
|
||||
break;
|
||||
case SelectionClear:
|
||||
data->timestamp = event->xselectionclear.time;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
Time
|
||||
GHOST_SystemX11::
|
||||
lastEventTime(Time default_time) {
|
||||
init_timestamp_data data;
|
||||
data.timestamp = default_time;
|
||||
XEvent ev;
|
||||
XCheckIfEvent(m_display, &ev, &init_timestamp_scanner, (XPointer)&data);
|
||||
|
||||
return data.timestamp;
|
||||
}
|
||||
|
||||
|
||||
|
||||
bool
|
||||
GHOST_SystemX11::
|
||||
processEvents(
|
||||
@@ -405,10 +470,15 @@ GHOST_SystemX11::processEvent(XEvent *xe)
|
||||
window->getCursorGrabAccum(x_accum, y_accum);
|
||||
|
||||
if(x_new != xme.x_root || y_new != xme.y_root) {
|
||||
/* when wrapping we don't need to add an event because the
|
||||
* setCursorPosition call will cause a new event after */
|
||||
setCursorPosition(x_new, y_new); /* wrap */
|
||||
window->setCursorGrabAccum(x_accum + (xme.x_root - x_new), y_accum + (xme.y_root - y_new));
|
||||
if (xme.time > m_last_warp) {
|
||||
/* when wrapping we don't need to add an event because the
|
||||
* setCursorPosition call will cause a new event after */
|
||||
setCursorPosition(x_new, y_new); /* wrap */
|
||||
window->setCursorGrabAccum(x_accum + (xme.x_root - x_new), y_accum + (xme.y_root - y_new));
|
||||
m_last_warp = lastEventTime(xme.time);
|
||||
} else {
|
||||
setCursorPosition(x_new, y_new); /* wrap but don't accumulate */
|
||||
}
|
||||
}
|
||||
else {
|
||||
g_event = new
|
||||
@@ -907,7 +977,7 @@ setCursorPosition(
|
||||
int rely = y-cy;
|
||||
|
||||
XWarpPointer(m_display,None,None,0,0,0,0,relx,rely);
|
||||
XFlush(m_display);
|
||||
XSync(m_display, 0); /* Sync to process all requests */
|
||||
|
||||
return GHOST_kSuccess;
|
||||
}
|
||||
|
||||
@@ -266,6 +266,10 @@ private :
|
||||
/// A vector of keyboard key masks
|
||||
char m_keyboard_vector[32];
|
||||
|
||||
/* to prevent multiple warp, we store the time of the last warp event
|
||||
* and stop accumulating all events generated before that */
|
||||
Time m_last_warp;
|
||||
|
||||
/**
|
||||
* Return the ghost window associated with the
|
||||
* X11 window xwind
|
||||
@@ -281,6 +285,11 @@ private :
|
||||
XEvent *xe
|
||||
);
|
||||
|
||||
Time
|
||||
lastEventTime(
|
||||
Time default_time
|
||||
);
|
||||
|
||||
bool
|
||||
generateWindowExposeEvents(
|
||||
);
|
||||
|
||||
@@ -109,6 +109,8 @@ GHOST_TSuccess GHOST_Window::setCursorGrab(GHOST_TGrabCursorMode mode, GHOST_Rec
|
||||
m_cursorGrabBounds.m_l= m_cursorGrabBounds.m_r= -1;
|
||||
else if (bounds) {
|
||||
m_cursorGrabBounds= *bounds;
|
||||
} else { /* if bounds not defined, use window */
|
||||
getClientBounds(m_cursorGrabBounds);
|
||||
}
|
||||
m_cursorGrab = mode;
|
||||
return GHOST_kSuccess;
|
||||
|
||||
@@ -230,6 +230,32 @@ extern "C" {
|
||||
- (void)keyDown:(NSEvent *)theEvent
|
||||
{}
|
||||
|
||||
#if MAC_OS_X_VERSION_MIN_REQUIRED <= MAC_OS_X_VERSION_10_4
|
||||
//Cmd+key are handled differently before 10.5
|
||||
- (BOOL)performKeyEquivalent:(NSEvent *)theEvent
|
||||
{
|
||||
NSString *chars = [theEvent charactersIgnoringModifiers];
|
||||
|
||||
if ([chars length] <1)
|
||||
return NO;
|
||||
|
||||
//Let cocoa handle menu shortcuts
|
||||
switch ([chars characterAtIndex:0]) {
|
||||
case 'q':
|
||||
case 'w':
|
||||
case 'h':
|
||||
case 'm':
|
||||
case '<':
|
||||
case '>':
|
||||
case '~':
|
||||
case '`':
|
||||
return NO;
|
||||
default:
|
||||
return YES;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
- (BOOL)isOpaque
|
||||
{
|
||||
return YES;
|
||||
@@ -858,15 +884,18 @@ GHOST_TSuccess GHOST_WindowCocoa::installDrawingContext(GHOST_TDrawingContextTyp
|
||||
|
||||
if (!s_firstOpenGLcontext) s_firstOpenGLcontext = tmpOpenGLContext;
|
||||
#ifdef WAIT_FOR_VSYNC
|
||||
{
|
||||
GLint swapInt = 1;
|
||||
/* wait for vsync, to avoid tearing artifacts */
|
||||
[tmpOpenGLContext setValues:1 forParameter:NSOpenGLCPSwapInterval];
|
||||
[tmpOpenGLContext setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
|
||||
}
|
||||
#endif
|
||||
[m_openGLView setOpenGLContext:tmpOpenGLContext];
|
||||
[tmpOpenGLContext setView:m_openGLView];
|
||||
|
||||
m_openGLContext = tmpOpenGLContext;
|
||||
[m_openGLView setOpenGLContext:tmpOpenGLContext];
|
||||
[tmpOpenGLContext setView:m_openGLView];
|
||||
|
||||
m_openGLContext = tmpOpenGLContext;
|
||||
break;
|
||||
|
||||
|
||||
case GHOST_kDrawingContextTypeNone:
|
||||
success = GHOST_kSuccess;
|
||||
break;
|
||||
|
||||
@@ -40,6 +40,8 @@
|
||||
|
||||
#include <string.h>
|
||||
#include "GHOST_WindowWin32.h"
|
||||
#include "GHOST_SystemWin32.h"
|
||||
#include "GHOST_DropTargetWin32.h"
|
||||
#include <GL/gl.h>
|
||||
#include <math.h>
|
||||
|
||||
@@ -95,6 +97,7 @@ static PIXELFORMATDESCRIPTOR sPreferredFormat = {
|
||||
};
|
||||
|
||||
GHOST_WindowWin32::GHOST_WindowWin32(
|
||||
GHOST_SystemWin32 * system,
|
||||
const STR_String& title,
|
||||
GHOST_TInt32 left,
|
||||
GHOST_TInt32 top,
|
||||
@@ -106,6 +109,7 @@ GHOST_WindowWin32::GHOST_WindowWin32(
|
||||
:
|
||||
GHOST_Window(title, left, top, width, height, state, GHOST_kDrawingContextTypeNone,
|
||||
stereoVisual),
|
||||
m_system(system),
|
||||
m_hDC(0),
|
||||
m_hGlRc(0),
|
||||
m_hasMouseCaptured(false),
|
||||
@@ -167,6 +171,9 @@ GHOST_WindowWin32::GHOST_WindowWin32(
|
||||
0); // pointer to window-creation data
|
||||
}
|
||||
if (m_hWnd) {
|
||||
// Register this window as a droptarget. Requires m_hWnd to be valid.
|
||||
// Note that OleInitialize(0) has to be called prior to this. Done in GHOST_SystemWin32.
|
||||
m_dropTarget = new GHOST_DropTargetWin32(this, m_system);
|
||||
// Store a pointer to this class in the window structure
|
||||
::SetWindowLongPtr(m_hWnd, GWL_USERDATA, (LONG_PTR)this);
|
||||
|
||||
@@ -275,6 +282,7 @@ GHOST_WindowWin32::~GHOST_WindowWin32()
|
||||
m_hDC = 0;
|
||||
}
|
||||
if (m_hWnd) {
|
||||
m_dropTarget->Release(); // frees itself.
|
||||
::DestroyWindow(m_hWnd);
|
||||
m_hWnd = 0;
|
||||
}
|
||||
@@ -285,6 +293,10 @@ bool GHOST_WindowWin32::getValid() const
|
||||
return m_hWnd != 0;
|
||||
}
|
||||
|
||||
HWND GHOST_WindowWin32::getHWND() const
|
||||
{
|
||||
return m_hWnd;
|
||||
}
|
||||
|
||||
void GHOST_WindowWin32::setTitle(const STR_String& title)
|
||||
{
|
||||
@@ -663,6 +675,41 @@ GHOST_TSuccess GHOST_WindowWin32::setWindowCursorVisibility(bool visible)
|
||||
return GHOST_kSuccess;
|
||||
}
|
||||
|
||||
GHOST_TSuccess GHOST_WindowWin32::setWindowCursorGrab(GHOST_TGrabCursorMode mode)
|
||||
{
|
||||
if(mode != GHOST_kGrabDisable) {
|
||||
if(mode != GHOST_kGrabNormal) {
|
||||
m_system->getCursorPosition(m_cursorGrabInitPos[0], m_cursorGrabInitPos[1]);
|
||||
setCursorGrabAccum(0, 0);
|
||||
|
||||
if(mode == GHOST_kGrabHide)
|
||||
setWindowCursorVisibility(false);
|
||||
}
|
||||
registerMouseClickEvent(true);
|
||||
}
|
||||
else {
|
||||
if (m_cursorGrab==GHOST_kGrabHide) {
|
||||
m_system->setCursorPosition(m_cursorGrabInitPos[0], m_cursorGrabInitPos[1]);
|
||||
setWindowCursorVisibility(true);
|
||||
}
|
||||
if(m_cursorGrab != GHOST_kGrabNormal) {
|
||||
/* use to generate a mouse move event, otherwise the last event
|
||||
* blender gets can be outside the screen causing menus not to show
|
||||
* properly unless the user moves the mouse */
|
||||
GHOST_TInt32 pos[2];
|
||||
m_system->getCursorPosition(pos[0], pos[1]);
|
||||
m_system->setCursorPosition(pos[0], pos[1]);
|
||||
}
|
||||
|
||||
/* Almost works without but important otherwise the mouse GHOST location can be incorrect on exit */
|
||||
setCursorGrabAccum(0, 0);
|
||||
m_cursorGrabBounds.m_l= m_cursorGrabBounds.m_r= -1; /* disable */
|
||||
registerMouseClickEvent(false);
|
||||
}
|
||||
|
||||
return GHOST_kSuccess;
|
||||
}
|
||||
|
||||
GHOST_TSuccess GHOST_WindowWin32::setWindowCursorShape(GHOST_TStandardCursor cursorShape)
|
||||
{
|
||||
if (m_customCursor) {
|
||||
@@ -676,6 +723,7 @@ GHOST_TSuccess GHOST_WindowWin32::setWindowCursorShape(GHOST_TStandardCursor cur
|
||||
|
||||
return GHOST_kSuccess;
|
||||
}
|
||||
|
||||
void GHOST_WindowWin32::processWin32TabletInitEvent()
|
||||
{
|
||||
if (m_wintab) {
|
||||
|
||||
@@ -47,6 +47,9 @@
|
||||
#define PACKETMODE PK_BUTTONS
|
||||
#include <pktdef.h>
|
||||
|
||||
class GHOST_SystemWin32;
|
||||
class GHOST_DropTargetWin32;
|
||||
|
||||
// typedefs for WinTab functions to allow dynamic loading
|
||||
typedef UINT (API * GHOST_WIN32_WTInfo) ( UINT, UINT, LPVOID );
|
||||
typedef HCTX (API * GHOST_WIN32_WTOpen) (HWND, LPLOGCONTEXTA, BOOL);
|
||||
@@ -74,6 +77,7 @@ public:
|
||||
* @param stereoVisual Stereo visual for quad buffered stereo.
|
||||
*/
|
||||
GHOST_WindowWin32(
|
||||
GHOST_SystemWin32 * system,
|
||||
const STR_String& title,
|
||||
GHOST_TInt32 left,
|
||||
GHOST_TInt32 top,
|
||||
@@ -96,6 +100,12 @@ public:
|
||||
*/
|
||||
virtual bool getValid() const;
|
||||
|
||||
/**
|
||||
* Access to the handle of the window.
|
||||
* @return The handle of the window.
|
||||
*/
|
||||
virtual HWND getHWND() const;
|
||||
|
||||
/**
|
||||
* Sets the title displayed in the title bar.
|
||||
* @param title The title to display in the title bar.
|
||||
@@ -250,6 +260,13 @@ protected:
|
||||
*/
|
||||
virtual GHOST_TSuccess setWindowCursorVisibility(bool visible);
|
||||
|
||||
/**
|
||||
* Sets the cursor grab on the window using native window system calls.
|
||||
* Using registerMouseClickEvent.
|
||||
* @param mode GHOST_TGrabCursorMode.
|
||||
*/
|
||||
virtual GHOST_TSuccess setWindowCursorGrab(GHOST_TGrabCursorMode mode);
|
||||
|
||||
/**
|
||||
* Sets the cursor shape on the window using
|
||||
* native window system calls.
|
||||
@@ -273,6 +290,10 @@ protected:
|
||||
int bg_color
|
||||
);
|
||||
|
||||
/** Pointer to system */
|
||||
GHOST_SystemWin32 * m_system;
|
||||
/** Pointer to COM IDropTarget implementor */
|
||||
GHOST_DropTargetWin32 * m_dropTarget;
|
||||
/** Window handle. */
|
||||
HWND m_hWnd;
|
||||
/** Device context handle. */
|
||||
|
||||
@@ -41,7 +41,15 @@ CCSRCS += GHOST_CallbackEventConsumer.cpp
|
||||
CCSRCS += GHOST_NDOFManager.cpp
|
||||
|
||||
ifeq ($(OS),$(findstring $(OS), "darwin"))
|
||||
CCSRCS += $(wildcard *Carbon.cpp)
|
||||
ifeq ($(WITH_COCOA), true)
|
||||
OCSRCS += $(wildcard *Cocoa.mm)
|
||||
CPPFLAGS += -DGHOST_COCOA
|
||||
ifeq ($(WITH_QUICKTIME), true)
|
||||
CPPFLAGS += -DWITH_QUICKTIME
|
||||
endif
|
||||
else
|
||||
CCSRCS += $(wildcard *Carbon.cpp)
|
||||
endif
|
||||
endif
|
||||
|
||||
ifeq ($(OS),$(findstring $(OS), "windows"))
|
||||
|
||||
@@ -352,6 +352,10 @@
|
||||
RelativePath="..\..\intern\GHOST_DisplayManagerWin32.h"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\intern\GHOST_DropTargetWin32.h"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\intern\GHOST_Event.h"
|
||||
>
|
||||
@@ -489,6 +493,10 @@
|
||||
RelativePath="..\..\intern\GHOST_DisplayManagerWin32.cpp"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\intern\GHOST_DropTargetWin32.cpp"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\intern\GHOST_EventManager.cpp"
|
||||
>
|
||||
|
||||
@@ -283,8 +283,12 @@ sgstrf (superlu_options_t *options, SuperMatrix *A,
|
||||
* -------------------------------------- */
|
||||
/* Determine the union of the row structure of the snode */
|
||||
if ( (*info = ssnode_dfs(jcol, kcol, asub, xa_begin, xa_end,
|
||||
xprune, marker, &Glu)) != 0 )
|
||||
xprune, marker, &Glu)) != 0 ) {
|
||||
if ( iperm_r_allocated ) SUPERLU_FREE (iperm_r);
|
||||
SUPERLU_FREE (iperm_c);
|
||||
SUPERLU_FREE (relax_end);
|
||||
return;
|
||||
}
|
||||
|
||||
nextu = xusub[jcol];
|
||||
nextlu = xlusup[jcol];
|
||||
@@ -293,8 +297,12 @@ sgstrf (superlu_options_t *options, SuperMatrix *A,
|
||||
new_next = nextlu + (xlsub[fsupc+1]-xlsub[fsupc])*(kcol-jcol+1);
|
||||
nzlumax = Glu.nzlumax;
|
||||
while ( new_next > nzlumax ) {
|
||||
if ( (*info = sLUMemXpand(jcol, nextlu, LUSUP, &nzlumax, &Glu)) )
|
||||
return;
|
||||
if ( (*info = sLUMemXpand(jcol, nextlu, LUSUP, &nzlumax, &Glu)) ) {
|
||||
if ( iperm_r_allocated ) SUPERLU_FREE (iperm_r);
|
||||
SUPERLU_FREE (iperm_c);
|
||||
SUPERLU_FREE (relax_end);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (icol = jcol; icol<= kcol; icol++) {
|
||||
@@ -350,17 +358,31 @@ sgstrf (superlu_options_t *options, SuperMatrix *A,
|
||||
|
||||
if ((*info = scolumn_dfs(m, jj, perm_r, &nseg, &panel_lsub[k],
|
||||
segrep, &repfnz[k], xprune, marker,
|
||||
parent, xplore, &Glu)) != 0) return;
|
||||
parent, xplore, &Glu)) != 0) {
|
||||
if ( iperm_r_allocated ) SUPERLU_FREE (iperm_r);
|
||||
SUPERLU_FREE (iperm_c);
|
||||
SUPERLU_FREE (relax_end);
|
||||
return;
|
||||
}
|
||||
|
||||
/* Numeric updates */
|
||||
if ((*info = scolumn_bmod(jj, (nseg - nseg1), &dense[k],
|
||||
tempv, &segrep[nseg1], &repfnz[k],
|
||||
jcol, &Glu, stat)) != 0) return;
|
||||
jcol, &Glu, stat)) != 0) {
|
||||
if ( iperm_r_allocated ) SUPERLU_FREE (iperm_r);
|
||||
SUPERLU_FREE (iperm_c);
|
||||
SUPERLU_FREE (relax_end);
|
||||
return;
|
||||
}
|
||||
|
||||
/* Copy the U-segments to ucol[*] */
|
||||
if ((*info = scopy_to_ucol(jj, nseg, segrep, &repfnz[k],
|
||||
perm_r, &dense[k], &Glu)) != 0)
|
||||
return;
|
||||
perm_r, &dense[k], &Glu)) != 0) {
|
||||
if ( iperm_r_allocated ) SUPERLU_FREE (iperm_r);
|
||||
SUPERLU_FREE (iperm_c);
|
||||
SUPERLU_FREE (relax_end);
|
||||
return;
|
||||
}
|
||||
|
||||
if ( (*info = spivotL(jj, diag_pivot_thresh, &usepr, perm_r,
|
||||
iperm_r, iperm_c, &pivrow, &Glu, stat)) )
|
||||
@@ -429,5 +451,4 @@ sgstrf (superlu_options_t *options, SuperMatrix *A,
|
||||
if ( iperm_r_allocated ) SUPERLU_FREE (iperm_r);
|
||||
SUPERLU_FREE (iperm_c);
|
||||
SUPERLU_FREE (relax_end);
|
||||
|
||||
}
|
||||
|
||||
@@ -339,6 +339,10 @@
|
||||
RelativePath="..\..\..\source\blender\python\intern\bpy_array.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\python\intern\bpy_driver.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\python\intern\bpy_interface.c"
|
||||
>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -222,49 +222,45 @@
|
||||
RelativePath="..\..\source\icons\winblenderfile.ico"
|
||||
>
|
||||
</File>
|
||||
<Filter
|
||||
Name="Source Files"
|
||||
Filter="cpp;c;cxx;rc;def;r;odl;idl;hpj;bat"
|
||||
>
|
||||
<File
|
||||
RelativePath="..\..\source\creator\buildinfo.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\source\creator\creator.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\source\icons\winblender.rc"
|
||||
>
|
||||
<FileConfiguration
|
||||
Name="Blender Release|Win32"
|
||||
>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"
|
||||
AdditionalIncludeDirectories="\blenderdev\blender.test2\source\icons;$(NoInherit)"
|
||||
/>
|
||||
</FileConfiguration>
|
||||
<FileConfiguration
|
||||
Name="Blender Debug|Win32"
|
||||
>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"
|
||||
AdditionalIncludeDirectories="\blenderdev\blender.test2\source\icons;$(NoInherit)"
|
||||
/>
|
||||
</FileConfiguration>
|
||||
</File>
|
||||
</Filter>
|
||||
</Filter>
|
||||
<Filter
|
||||
Name="Header Files"
|
||||
Filter="h;hpp;hxx;hm;inl"
|
||||
>
|
||||
</Filter>
|
||||
<Filter
|
||||
Name="Source Files"
|
||||
>
|
||||
</Filter>
|
||||
<Filter
|
||||
Name="Source Files"
|
||||
Filter="cpp;c;cxx;rc;def;r;odl;idl;hpj;bat"
|
||||
>
|
||||
<File
|
||||
RelativePath="..\..\source\creator\buildinfo.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\source\creator\creator.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\source\icons\winblender.rc"
|
||||
>
|
||||
<FileConfiguration
|
||||
Name="Blender Release|Win32"
|
||||
>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"
|
||||
AdditionalIncludeDirectories="\blenderdev\blender.test2\source\icons;$(NoInherit)"
|
||||
/>
|
||||
</FileConfiguration>
|
||||
<FileConfiguration
|
||||
Name="Blender Debug|Win32"
|
||||
>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"
|
||||
AdditionalIncludeDirectories="\blenderdev\blender.test2\source\icons;$(NoInherit)"
|
||||
/>
|
||||
</FileConfiguration>
|
||||
</File>
|
||||
</Filter>
|
||||
</Files>
|
||||
<Globals>
|
||||
</Globals>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -43,7 +43,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel;..\..\..\source\blender\gpu"
|
||||
PreprocessorDefinitions="NDEBUG,WIN32,_LIB"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="2"
|
||||
@@ -119,7 +119,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel;..\..\..\source\blender\gpu"
|
||||
PreprocessorDefinitions="_DEBUG;WIN32;_LIB;_CRT_SECURE_NO_WARNINGS"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
@@ -196,7 +196,7 @@
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="2"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel;..\..\..\source\blender\gpu"
|
||||
PreprocessorDefinitions="NDEBUG;WIN32;_LIB"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="0"
|
||||
@@ -272,7 +272,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel;..\..\..\source\blender\gpu"
|
||||
PreprocessorDefinitions="_DEBUG,WIN32,_LIB"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="3"
|
||||
@@ -347,7 +347,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel;..\..\..\source\blender\gpu"
|
||||
PreprocessorDefinitions="_DEBUG,WIN32,_LIB"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
@@ -423,7 +423,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\..\lib\windows\freetype\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\include;..\..\..\source\blender\blenlib;..\..\..\source\blender\python;..\..\..\source\blender\makesdna;..\..\..\source\blender\blenkernel;..\..\..\source\blender\gpu"
|
||||
PreprocessorDefinitions="NDEBUG,WIN32,_LIB"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="0"
|
||||
@@ -604,6 +604,14 @@
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\blenlib\intern\path_util.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\blenlib\intern\pbvh.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\blenlib\intern\rand.c"
|
||||
>
|
||||
</File>
|
||||
@@ -631,10 +639,6 @@
|
||||
RelativePath="..\..\..\source\blender\blenlib\intern\time.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\blenlib\intern\util.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\blenlib\intern\voxel.c"
|
||||
>
|
||||
@@ -765,6 +769,10 @@
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\blenlib\BLI_pbvh.h"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\blenlib\BLI_scanfill.h"
|
||||
>
|
||||
</File>
|
||||
@@ -784,10 +792,6 @@
|
||||
RelativePath="..\..\..\source\blender\blenlib\BLI_threads.h"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\blenlib\BLI_util.h"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\blenlib\BLI_voxel.h"
|
||||
>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -43,7 +43,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds;..\..\..\source\blender\windowmanager;..\..\..\source\blender\makesrna"
|
||||
PreprocessorDefinitions="_DEBUG;WIN32;_LIB;WITH_QUICKTIME;WITH_OPENEXR;WITH_DDS;WITH_FFMPEG"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
@@ -118,7 +118,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds;..\..\..\source\blender\windowmanager;..\..\..\source\blender\makesrna"
|
||||
PreprocessorDefinitions="_DEBUG,WIN32,_LIB"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="3"
|
||||
@@ -192,7 +192,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds;..\..\..\source\blender\windowmanager;..\..\..\source\blender\makesrna"
|
||||
PreprocessorDefinitions="NDEBUG,WIN32,_LIB,WITH_QUICKTIME"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="2"
|
||||
@@ -268,7 +268,7 @@
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="2"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds;..\..\..\source\blender\windowmanager;..\..\..\source\blender\makesrna"
|
||||
PreprocessorDefinitions="NDEBUG;WIN32;_LIB;WITH_QUICKTIME;WITH_OPENEXR;WITH_DDS;WITH_FFMPEG"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="0"
|
||||
@@ -343,7 +343,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds;..\..\..\source\blender\windowmanager;..\..\..\source\blender\makesrna"
|
||||
PreprocessorDefinitions="_DEBUG;WIN32;_LIB;WITH_QUICKTIME;WITH_DDS;WITH_FFMPEG"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
@@ -418,7 +418,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds"
|
||||
AdditionalIncludeDirectories="..\..\..\..\lib\windows\jpeg\include;..\..\..\..\lib\windows\zlib\include;..\..\..\..\lib\windows\png\include;..\..\..\..\lib\windows\tiff\include;..\..\..\..\lib\windows\openexr\include;..\..\..\..\lib\windows\openexr\include\Iex;..\..\..\..\lib\windows\openexr\include\Imath;..\..\..\..\lib\windows\openexr\include\IlmImf;..\..\..\..\lib\windows\QTDevWin\CIncludes;..\..\..\..\lib\windows\ffmpeg\include;..\..\..\..\lib\windows\ffmpeg\include\msvc;..\..\..\..\build\msvc_9\intern\memutil\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender\avi;..\..\..\source\blender\imbuf;..\..\..\source\blender\blenlib;..\..\..\source\blender\include;..\..\..\source\blender\quicktime;..\..\..\source\blender\blenkernel;..\..\..\source\blender\blenloader;..\..\..\source\blender\makesdna;..\..\..\source\blender\imbuf\intern;..\..\..\source\blender\readstreamglue;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\imbuf\intern\openexr;..\..\..\source\blender\imbuf\intern\dds;..\..\..\source\blender\windowmanager;..\..\..\source\blender\makesrna"
|
||||
PreprocessorDefinitions="NDEBUG;WIN32;_LIB;WITH_QUICKTIME;WITH_DDS;WITH_FFMPEG"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="0"
|
||||
|
||||
@@ -141,7 +141,7 @@
|
||||
Optimization="2"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender;..\..\..\source\blender\blenlib;..\..\..\source\blender\makesdna;..\..\..\source\blender\makesrna;..\..\..\source\blender\editors\include;..\..\..\source\blender\blenkernel;..\..\..\..\lib\windows\pthreads\include;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\windowmanager;..\..\..\source\blender\imbuf;..\..\..\source\blender\bmesh"
|
||||
PreprocessorDefinitions="NDEBUG,WIN32,_CONSOLE"
|
||||
PreprocessorDefinitions="NDEBUG,WIN32,_CONSOLE;GAMEBLENDER=1"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="0"
|
||||
EnableFunctionLevelLinking="true"
|
||||
@@ -327,7 +327,7 @@
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender;..\..\..\source\blender\blenlib;..\..\..\source\blender\makesdna;..\..\..\source\blender\makesrna;..\..\..\source\blender\editors\include;..\..\..\source\blender\blenkernel;..\..\..\..\lib\windows\pthreads\include;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\windowmanager;..\..\..\source\blender\imbuf;..\..\..\source\blender\bmesh"
|
||||
PreprocessorDefinitions="_DEBUG;WIN32;_CONSOLE"
|
||||
PreprocessorDefinitions="_DEBUG;WIN32;_CONSOLE;GAMEBLENDER=1"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
DefaultCharIsUnsigned="true"
|
||||
@@ -423,7 +423,7 @@
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender;..\..\..\source\blender\blenlib;..\..\..\source\blender\makesdna;..\..\..\source\blender\makesrna;..\..\..\source\blender\editors\include;..\..\..\source\blender\blenkernel;..\..\..\..\lib\windows\pthreads\include;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\windowmanager;..\..\..\source\blender\imbuf"
|
||||
PreprocessorDefinitions="_DEBUG,WIN32,_CONSOLE"
|
||||
PreprocessorDefinitions="_DEBUG,WIN32,_CONSOLE;GAMEBLENDER=1"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
DefaultCharIsUnsigned="true"
|
||||
@@ -519,7 +519,7 @@
|
||||
Name="VCCLCompilerTool"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\source\blender;..\..\..\source\blender\blenlib;..\..\..\source\blender\makesdna;..\..\..\source\blender\makesrna;..\..\..\source\blender\editors\include;..\..\..\source\blender\blenkernel;..\..\..\..\lib\windows\pthreads\include;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\windowmanager;..\..\..\source\blender\imbuf"
|
||||
PreprocessorDefinitions="NDEBUG,WIN32,_CONSOLE"
|
||||
PreprocessorDefinitions="NDEBUG,WIN32,_CONSOLE;GAMEBLENDER=1"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="0"
|
||||
EnableFunctionLevelLinking="true"
|
||||
@@ -618,6 +618,10 @@
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\makesrna\intern\rna_armature_api.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\makesrna\intern\rna_boid.c"
|
||||
>
|
||||
</File>
|
||||
@@ -802,7 +806,7 @@
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\makesrna\intern\rna_sequence.c"
|
||||
RelativePath="..\..\..\source\blender\makesrna\intern\rna_sequencer.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\source\blender\imbuf;..\..\..\source\blender\makesdna;..\..\..\source\blender\makesrna;..\..\..\source\blender\blenlib;..\..\..\source\blender\blenkernel;..\..\..\source\blender\ikplugin;..\..\..\source\blender\windowmanager;..\..\..\source\blender\editors\include;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\bmesh"
|
||||
PreprocessorDefinitions="WIN32;_DEBUG;_LIB;_CRT_SECURE_NO_WARNINGS"
|
||||
PreprocessorDefinitions="WIN32;_DEBUG;_LIB;_CRT_SECURE_NO_WARNINGS;GAMEBLENDER=1"
|
||||
MinimalRebuild="true"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
@@ -116,7 +116,7 @@
|
||||
EnableIntrinsicFunctions="true"
|
||||
WholeProgramOptimization="true"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\..\lib\windows\pthreads\include;..\..\..\source\blender\imbuf;..\..\..\source\blender\makesdna;..\..\..\source\blender\makesrna;..\..\..\source\blender\blenlib;..\..\..\source\blender\blenkernel;..\..\..\source\blender\ikplugin;..\..\..\source\blender\windowmanager;..\..\..\source\blender\editors\include;..\..\..\source\blender\render\extern\include;..\..\..\source\blender\bmesh"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_LIB;_CRT_SECURE_NO_WARNINGS"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_LIB;_CRT_SECURE_NO_WARNINGS;GAMEBLENDER=1"
|
||||
MinimalRebuild="true"
|
||||
RuntimeLibrary="0"
|
||||
EnableFunctionLevelLinking="false"
|
||||
@@ -336,7 +336,7 @@
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\blender\makesrna\intern\rna_sequence_gen.c"
|
||||
RelativePath="..\..\..\source\blender\makesrna\intern\rna_sequencer_gen.c"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
|
||||
@@ -525,10 +525,6 @@
|
||||
RelativePath="..\..\..\source\gameengine\Converter\BL_SkinDeformer.cpp"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\gameengine\Converter\BL_SkinMeshObject.cpp"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\gameengine\Converter\BlenderWorldInfo.cpp"
|
||||
>
|
||||
@@ -561,6 +557,10 @@
|
||||
RelativePath="..\..\..\source\gameengine\Converter\KX_IpoConvert.cpp"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\gameengine\Converter\KX_SoftBodyDeformer.cpp"
|
||||
>
|
||||
</File>
|
||||
</Filter>
|
||||
<Filter
|
||||
Name="Header Files"
|
||||
@@ -614,10 +614,6 @@
|
||||
RelativePath="..\..\..\source\gameengine\Converter\BL_SkinDeformer.h"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\gameengine\Converter\BL_SkinMeshObject.h"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\gameengine\Converter\BlenderWorldInfo.h"
|
||||
>
|
||||
@@ -650,6 +646,10 @@
|
||||
RelativePath="..\..\..\source\gameengine\Converter\KX_IpoConvert.h"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\..\source\gameengine\Converter\KX_SoftBodyDeformer.h"
|
||||
>
|
||||
</File>
|
||||
</Filter>
|
||||
</Files>
|
||||
<Globals>
|
||||
|
||||
@@ -43,7 +43,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
PreprocessorDefinitions="_DEBUG;WIN32;_LIB;WITH_GLEXT;GLEW_STATIC"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
@@ -118,7 +118,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
PreprocessorDefinitions="NDEBUG,WIN32,_LIB"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="2"
|
||||
@@ -194,7 +194,7 @@
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="2"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
PreprocessorDefinitions="NDEBUG;WIN32;_LIB;WITH_GLEXT;GLEW_STATIC"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="0"
|
||||
@@ -269,7 +269,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
PreprocessorDefinitions="_DEBUG,WIN32,_LIB"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="3"
|
||||
@@ -344,7 +344,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
PreprocessorDefinitions="_DEBUG;WIN32;_LIB;WITH_GLEXT;GLEW_STATIC"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
@@ -419,7 +419,7 @@
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
InlineFunctionExpansion="1"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
AdditionalIncludeDirectories="..\..\..\..\build\msvc_9\intern\moto\include;..\..\..\..\build\msvc_9\intern\string\include;..\..\..\..\build\msvc_9\intern\guardedalloc\include;..\..\..\..\build\msvc_9\extern\glew\include;..\..\..\source\kernel\gen_system;..\..\..\source\gameengine\Expressions;..\..\..\source\gameengine\SceneGraph;..\..\..\..\lib\windows\python\include\python3.1;..\..\..\source\blender\makesdna"
|
||||
PreprocessorDefinitions="NDEBUG;WIN32;_LIB;WITH_GLEXT;GLEW_STATIC"
|
||||
StringPooling="true"
|
||||
RuntimeLibrary="0"
|
||||
|
||||
@@ -1 +1 @@
|
||||
2.5-devel
|
||||
2.5-alpha0
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 190 KiB After Width: | Height: | Size: 204 KiB |
@@ -24,26 +24,29 @@
|
||||
#
|
||||
# ***** END GPL LICENCE BLOCK *****
|
||||
|
||||
import sys, os
|
||||
# <pep8 compliant>
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
sys.stdout.write("Usage: datatoc <data_file>\n")
|
||||
sys.exit(1)
|
||||
sys.stdout.write("Usage: datatoc <data_file>\n")
|
||||
sys.exit(1)
|
||||
|
||||
filename = sys.argv[1]
|
||||
|
||||
try:
|
||||
fpin = open(filename, "rb");
|
||||
fpin = open(filename, "rb")
|
||||
except:
|
||||
sys.stdout.write("Unable to open input %s\n" % sys.argv[1])
|
||||
sys.exit(1)
|
||||
sys.stdout.write("Unable to open input %s\n" % sys.argv[1])
|
||||
sys.exit(1)
|
||||
|
||||
fpin.seek(0, os.SEEK_END)
|
||||
size = fpin.tell()
|
||||
fpin.seek(0)
|
||||
|
||||
if filename[0] == ".":
|
||||
filename = filename[1:]
|
||||
filename = filename[1:]
|
||||
|
||||
cname = filename + ".c"
|
||||
sys.stdout.write("Making C file <%s>\n" % cname)
|
||||
@@ -53,10 +56,10 @@ filename = filename.replace(".", "_")
|
||||
sys.stdout.write(str(size))
|
||||
sys.stdout.write("\n")
|
||||
try:
|
||||
fpout = open(cname, "w")
|
||||
fpout = open(cname, "w")
|
||||
except:
|
||||
sys.stdout.write("Unable to open output %s\n" % cname)
|
||||
sys.exit(1)
|
||||
sys.stdout.write("Unable to open output %s\n" % cname)
|
||||
sys.exit(1)
|
||||
|
||||
fpout.write("/* DataToC output of file <%s> */\n\n" % filename)
|
||||
fpout.write("int datatoc_%s_size= %d;\n" % (filename, size))
|
||||
@@ -64,11 +67,11 @@ fpout.write("int datatoc_%s_size= %d;\n" % (filename, size))
|
||||
fpout.write("char datatoc_%s[]= {\n" % filename)
|
||||
|
||||
while size > 0:
|
||||
size -= 1
|
||||
if size % 32 == 31:
|
||||
fpout.write("\n")
|
||||
size -= 1
|
||||
if size % 32 == 31:
|
||||
fpout.write("\n")
|
||||
|
||||
fpout.write("%3d," % ord(fpin.read(1)))
|
||||
fpout.write("%3d," % ord(fpin.read(1)))
|
||||
|
||||
fpout.write("\n 0};\n\n")
|
||||
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 192 KiB After Width: | Height: | Size: 198 KiB |
@@ -24,15 +24,19 @@
|
||||
# The Original Code is: see repository.
|
||||
#
|
||||
# Contributor(s): see repository.
|
||||
#
|
||||
import sys, os, re
|
||||
|
||||
nanblenderhome = os.getenv("NANBLENDERHOME");
|
||||
# <pep8-80 compliant>
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
nanblenderhome = os.getenv("NANBLENDERHOME")
|
||||
|
||||
if nanblenderhome == None:
|
||||
nanblenderhome = os.path.dirname(os.path.abspath(sys.argv[0]))+"/.."
|
||||
nanblenderhome = os.path.dirname(os.path.abspath(sys.argv[0])) + "/.."
|
||||
|
||||
config = nanblenderhome+"/source/blender/blenkernel/BKE_blender.h"
|
||||
config = nanblenderhome + "/source/blender/blenkernel/BKE_blender.h"
|
||||
|
||||
infile = open(config)
|
||||
|
||||
@@ -40,23 +44,23 @@ major = None
|
||||
minor = None
|
||||
|
||||
for line in infile.readlines():
|
||||
m = re.search("#define BLENDER_VERSION\s+(\d+)", line)
|
||||
if m:
|
||||
major = m.group(1)
|
||||
m = re.search("#define BLENDER_SUBVERSION\s+(\d+)", line)
|
||||
if m:
|
||||
minor = m.group(1)
|
||||
if minor and major:
|
||||
major = float(major) / 100.0
|
||||
break
|
||||
m = re.search("#define BLENDER_VERSION\s+(\d+)", line)
|
||||
if m:
|
||||
major = m.group(1)
|
||||
m = re.search("#define BLENDER_SUBVERSION\s+(\d+)", line)
|
||||
if m:
|
||||
minor = m.group(1)
|
||||
if minor and major:
|
||||
major = float(major) / 100.0
|
||||
break
|
||||
|
||||
infile.close()
|
||||
|
||||
# Major was changed to float, but minor is still a string
|
||||
if minor and major:
|
||||
if minor == "0":
|
||||
print "%.2f" % major
|
||||
else:
|
||||
print "%.2f.%s" % (major, minor)
|
||||
if minor == "0":
|
||||
print "%.2f" % major
|
||||
else:
|
||||
print "%.2f.%s" % (major, minor)
|
||||
else:
|
||||
print "unknownversion"
|
||||
print "unknownversion"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -4,18 +4,20 @@
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
__author__ = "Bill L.Nieuwendorp"
|
||||
__bpydoc__ = """\
|
||||
This script Exports Lightwaves MotionDesigner format.
|
||||
@@ -47,16 +49,8 @@ Be sure not to use modifiers that change the number or order of verts in the mes
|
||||
|
||||
import bpy
|
||||
import Mathutils
|
||||
import math
|
||||
import os
|
||||
from struct import pack
|
||||
|
||||
#import Blender
|
||||
#from Blender import *
|
||||
#import BPyMessages
|
||||
try:
|
||||
from struct import pack
|
||||
except:
|
||||
pack = None
|
||||
|
||||
def zero_file(filepath):
|
||||
'''
|
||||
@@ -66,7 +60,8 @@ def zero_file(filepath):
|
||||
file.write('\n') # apparently macosx needs some data in a blank file?
|
||||
file.close()
|
||||
|
||||
def check_vertcount(mesh,vertcount):
|
||||
|
||||
def check_vertcount(mesh, vertcount):
|
||||
'''
|
||||
check and make sure the vertcount is consistent throughout the frame range
|
||||
'''
|
||||
@@ -75,8 +70,8 @@ def check_vertcount(mesh,vertcount):
|
||||
f.close()
|
||||
zero_file(filepath)
|
||||
return
|
||||
|
||||
|
||||
|
||||
|
||||
def write(filename, sce, ob, PREF_STARTFRAME, PREF_ENDFRAME, PREF_FPS):
|
||||
"""
|
||||
Blender.Window.WaitCursor(1)
|
||||
@@ -92,7 +87,7 @@ def write(filename, sce, ob, PREF_STARTFRAME, PREF_ENDFRAME, PREF_FPS):
|
||||
me = ob.create_mesh(True, 'PREVIEW')
|
||||
|
||||
#Flip y and z
|
||||
mat_flip= Mathutils.Matrix(\
|
||||
mat_flip = Mathutils.Matrix(\
|
||||
[1.0, 0.0, 0.0, 0.0],\
|
||||
[0.0, 0.0, 1.0, 0.0],\
|
||||
[0.0, 1.0, 0.0, 0.0],\
|
||||
@@ -101,27 +96,27 @@ def write(filename, sce, ob, PREF_STARTFRAME, PREF_ENDFRAME, PREF_FPS):
|
||||
|
||||
numverts = len(me.verts)
|
||||
|
||||
numframes = PREF_ENDFRAME-PREF_STARTFRAME+1
|
||||
PREF_FPS= float(PREF_FPS)
|
||||
numframes = PREF_ENDFRAME-PREF_STARTFRAME + 1
|
||||
PREF_FPS = float(PREF_FPS)
|
||||
f = open(filename, 'wb') #no Errors yet:Safe to create file
|
||||
|
||||
|
||||
# Write the header
|
||||
f.write(pack(">2i", numframes, numverts))
|
||||
|
||||
|
||||
# Write the frame times (should we use the time IPO??)
|
||||
f.write( pack(">%df" % (numframes), *[frame/PREF_FPS for frame in range(numframes)]) ) # seconds
|
||||
|
||||
f.write( pack(">%df" % (numframes), *[frame / PREF_FPS for frame in range(numframes)]) ) # seconds
|
||||
|
||||
#rest frame needed to keep frames in sync
|
||||
"""
|
||||
Blender.Set('curframe', PREF_STARTFRAME)
|
||||
me_tmp.getFromObject(ob.name)
|
||||
"""
|
||||
|
||||
check_vertcount(me,numverts)
|
||||
check_vertcount(me, numverts)
|
||||
me.transform(mat_flip * ob.matrix)
|
||||
f.write(pack(">%df" % (numverts*3), *[axis for v in me.verts for axis in v.co]))
|
||||
|
||||
for frame in range(PREF_STARTFRAME,PREF_ENDFRAME+1):#in order to start at desired frame
|
||||
f.write(pack(">%df" % (numverts * 3), *[axis for v in me.verts for axis in v.co]))
|
||||
|
||||
for frame in range(PREF_STARTFRAME, PREF_ENDFRAME + 1):#in order to start at desired frame
|
||||
"""
|
||||
Blender.Set('curframe', frame)
|
||||
me_tmp.getFromObject(ob.name)
|
||||
@@ -129,18 +124,18 @@ def write(filename, sce, ob, PREF_STARTFRAME, PREF_ENDFRAME, PREF_FPS):
|
||||
|
||||
sce.set_frame(frame)
|
||||
me = ob.create_mesh(True, 'PREVIEW')
|
||||
check_vertcount(me,numverts)
|
||||
check_vertcount(me, numverts)
|
||||
me.transform(mat_flip * ob.matrix)
|
||||
|
||||
|
||||
# Write the vertex data
|
||||
f.write(pack(">%df" % (numverts*3), *[axis for v in me.verts for axis in v.co]))
|
||||
|
||||
f.write(pack(">%df" % (numverts * 3), *[axis for v in me.verts for axis in v.co]))
|
||||
|
||||
"""
|
||||
me_tmp.verts= None
|
||||
"""
|
||||
f.close()
|
||||
|
||||
print ('MDD Exported: %s frames:%d\n'% (filename, numframes-1))
|
||||
|
||||
print('MDD Exported: %s frames:%d\n' % (filename, numframes - 1))
|
||||
"""
|
||||
Blender.Window.WaitCursor(0)
|
||||
Blender.Set('curframe', orig_frame)
|
||||
@@ -149,6 +144,7 @@ def write(filename, sce, ob, PREF_STARTFRAME, PREF_ENDFRAME, PREF_FPS):
|
||||
|
||||
from bpy.props import *
|
||||
|
||||
|
||||
class ExportMDD(bpy.types.Operator):
|
||||
'''Animated mesh to MDD vertex keyframe file.'''
|
||||
bl_idname = "export.mdd"
|
||||
@@ -163,37 +159,35 @@ class ExportMDD(bpy.types.Operator):
|
||||
|
||||
# List of operator properties, the attributes will be assigned
|
||||
# to the class instance from the operator settings before calling.
|
||||
path = StringProperty(name="File Path", description="File path used for exporting the MDD file", maxlen= 1024, default= "tmp.mdd")
|
||||
path = StringProperty(name="File Path", description="File path used for exporting the MDD file", maxlen= 1024, default= "")
|
||||
fps = IntProperty(name="Frames Per Second", description="Number of frames/second", min=minfps, max=maxfps, default= 25)
|
||||
start_frame = IntProperty(name="Start Frame", description="Start frame for baking", min=minframe,max=maxframe,default=1)
|
||||
end_frame = IntProperty(name="End Frame", description="End frame for baking", min=minframe, max=maxframe, default= 250)
|
||||
start_frame = IntProperty(name="Start Frame", description="Start frame for baking", min=minframe, max=maxframe, default=1)
|
||||
end_frame = IntProperty(name="End Frame", description="End frame for baking", min=minframe, max=maxframe, default=250)
|
||||
|
||||
def poll(self, context):
|
||||
ob = context.active_object
|
||||
return (ob and ob.type=='MESH')
|
||||
return (ob and ob.type == 'MESH')
|
||||
|
||||
def execute(self, context):
|
||||
if not self.properties.path:
|
||||
raise Exception("filename not set")
|
||||
write(self.properties.path, context.scene, context.active_object,
|
||||
self.properties.start_frame, self.properties.end_frame, self.properties.fps )
|
||||
return ('FINISHED',)
|
||||
|
||||
self.properties.start_frame, self.properties.end_frame, self.properties.fps)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
wm = context.manager
|
||||
wm.add_fileselect(self)
|
||||
return ('RUNNING_MODAL',)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
bpy.ops.add(ExportMDD)
|
||||
bpy.types.register(ExportMDD)
|
||||
|
||||
# Add to a menu
|
||||
import dynamic_menu
|
||||
|
||||
def menu_func(self, context):
|
||||
default_path = bpy.data.filename.replace(".blend", ".mdd")
|
||||
self.layout.operator(ExportMDD.bl_idname, text="Vertex Keyframe Animation (.mdd)...").path = default_path
|
||||
|
||||
menu_item = dynamic_menu.add(bpy.types.INFO_MT_file_export, menu_func)
|
||||
bpy.types.INFO_MT_file_export.append(menu_func)
|
||||
|
||||
if __name__=='__main__':
|
||||
if __name__ == '__main__':
|
||||
bpy.ops.export.mdd(path="/tmp/test.mdd")
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,24 +4,26 @@
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
|
||||
__author__ = "Bruce Merry"
|
||||
__version__ = "0.93"
|
||||
__bpydoc__ = """\
|
||||
This script exports Stanford PLY files from Blender. It supports normals,
|
||||
This script exports Stanford PLY files from Blender. It supports normals,
|
||||
colours, and texture coordinates per face or per vertex.
|
||||
Only one mesh can be exported at a time.
|
||||
"""
|
||||
@@ -65,242 +67,262 @@ Only one mesh can be exported at a time.
|
||||
#
|
||||
|
||||
|
||||
def rvec3d(v): return round(v[0], 6), round(v[1], 6), round(v[2], 6)
|
||||
def rvec2d(v): return round(v[0], 6), round(v[1], 6)
|
||||
def rvec3d(v):
|
||||
return round(v[0], 6), round(v[1], 6), round(v[2], 6)
|
||||
|
||||
|
||||
def rvec2d(v):
|
||||
return round(v[0], 6), round(v[1], 6)
|
||||
|
||||
|
||||
def write(filename, scene, ob, \
|
||||
EXPORT_APPLY_MODIFIERS= True,\
|
||||
EXPORT_NORMALS= True,\
|
||||
EXPORT_UV= True,\
|
||||
EXPORT_COLORS= True\
|
||||
):
|
||||
|
||||
if not filename.lower().endswith('.ply'):
|
||||
filename += '.ply'
|
||||
|
||||
if not ob:
|
||||
raise Exception("Error, Select 1 active object")
|
||||
return
|
||||
|
||||
file = open(filename, 'w')
|
||||
|
||||
|
||||
#EXPORT_EDGES = Draw.Create(0)
|
||||
"""
|
||||
is_editmode = Blender.Window.EditMode()
|
||||
if is_editmode:
|
||||
Blender.Window.EditMode(0, '', 0)
|
||||
|
||||
Window.WaitCursor(1)
|
||||
"""
|
||||
|
||||
#mesh = BPyMesh.getMeshFromObject(ob, None, EXPORT_APPLY_MODIFIERS, False, scn) # XXX
|
||||
if EXPORT_APPLY_MODIFIERS:
|
||||
mesh = ob.create_mesh(True, 'PREVIEW')
|
||||
else:
|
||||
mesh = ob.data
|
||||
|
||||
if not mesh:
|
||||
raise ("Error, could not get mesh data from active object")
|
||||
return
|
||||
|
||||
# mesh.transform(ob.matrixWorld) # XXX
|
||||
|
||||
faceUV = len(mesh.uv_textures) > 0
|
||||
vertexUV = len(mesh.sticky) > 0
|
||||
vertexColors = len(mesh.vertex_colors) > 0
|
||||
|
||||
if (not faceUV) and (not vertexUV): EXPORT_UV = False
|
||||
if not vertexColors: EXPORT_COLORS = False
|
||||
|
||||
if not EXPORT_UV: faceUV = vertexUV = False
|
||||
if not EXPORT_COLORS: vertexColors = False
|
||||
|
||||
if faceUV:
|
||||
active_uv_layer = None
|
||||
for lay in mesh.uv_textures:
|
||||
if lay.active:
|
||||
active_uv_layer= lay.data
|
||||
break
|
||||
if not active_uv_layer:
|
||||
EXPORT_UV = False
|
||||
faceUV = None
|
||||
|
||||
if vertexColors:
|
||||
active_col_layer = None
|
||||
for lay in mesh.vertex_colors:
|
||||
if lay.active:
|
||||
active_col_layer= lay.data
|
||||
if not active_col_layer:
|
||||
EXPORT_COLORS = False
|
||||
vertexColors = None
|
||||
|
||||
# incase
|
||||
color = uvcoord = uvcoord_key = normal = normal_key = None
|
||||
|
||||
mesh_verts = mesh.verts # save a lookup
|
||||
ply_verts = [] # list of dictionaries
|
||||
# vdict = {} # (index, normal, uv) -> new index
|
||||
vdict = [{} for i in range(len(mesh_verts))]
|
||||
ply_faces = [[] for f in range(len(mesh.faces))]
|
||||
vert_count = 0
|
||||
for i, f in enumerate(mesh.faces):
|
||||
|
||||
|
||||
smooth = f.smooth
|
||||
if not smooth:
|
||||
normal = tuple(f.normal)
|
||||
normal_key = rvec3d(normal)
|
||||
|
||||
if faceUV:
|
||||
uv = active_uv_layer[i]
|
||||
uv = uv.uv1, uv.uv2, uv.uv3, uv.uv4 # XXX - crufty :/
|
||||
if vertexColors:
|
||||
col = active_col_layer[i]
|
||||
col = col.color1, col.color2, col.color3, col.color4
|
||||
|
||||
f_verts= f.verts
|
||||
|
||||
pf= ply_faces[i]
|
||||
for j, vidx in enumerate(f_verts):
|
||||
v = mesh_verts[vidx]
|
||||
|
||||
if smooth:
|
||||
normal= tuple(v.normal)
|
||||
normal_key = rvec3d(normal)
|
||||
|
||||
if faceUV:
|
||||
uvcoord= uv[j][0], 1.0-uv[j][1]
|
||||
uvcoord_key = rvec2d(uvcoord)
|
||||
elif vertexUV:
|
||||
uvcoord= v.uvco[0], 1.0-v.uvco[1]
|
||||
uvcoord_key = rvec2d(uvcoord)
|
||||
|
||||
if vertexColors:
|
||||
color= col[j]
|
||||
color= int(color[0]*255.0), int(color[1]*255.0), int(color[2]*255.0)
|
||||
|
||||
|
||||
key = normal_key, uvcoord_key, color
|
||||
|
||||
vdict_local = vdict[vidx]
|
||||
pf_vidx = vdict_local.get(key) # Will be None initially
|
||||
|
||||
if pf_vidx == None: # same as vdict_local.has_key(key)
|
||||
pf_vidx = vdict_local[key] = vert_count;
|
||||
ply_verts.append((vidx, normal, uvcoord, color))
|
||||
vert_count += 1
|
||||
|
||||
pf.append(pf_vidx)
|
||||
|
||||
file.write('ply\n')
|
||||
file.write('format ascii 1.0\n')
|
||||
version = "2.5" # Blender.Get('version')
|
||||
file.write('comment Created by Blender3D %s - www.blender.org, source file: %s\n' % (version, bpy.data.filename.split('/')[-1].split('\\')[-1] ))
|
||||
|
||||
file.write('element vertex %d\n' % len(ply_verts))
|
||||
|
||||
file.write('property float x\n')
|
||||
file.write('property float y\n')
|
||||
file.write('property float z\n')
|
||||
|
||||
# XXX
|
||||
"""
|
||||
if EXPORT_NORMALS:
|
||||
file.write('property float nx\n')
|
||||
file.write('property float ny\n')
|
||||
file.write('property float nz\n')
|
||||
"""
|
||||
if EXPORT_UV:
|
||||
file.write('property float s\n')
|
||||
file.write('property float t\n')
|
||||
if EXPORT_COLORS:
|
||||
file.write('property uchar red\n')
|
||||
file.write('property uchar green\n')
|
||||
file.write('property uchar blue\n')
|
||||
|
||||
file.write('element face %d\n' % len(mesh.faces))
|
||||
file.write('property list uchar uint vertex_indices\n')
|
||||
file.write('end_header\n')
|
||||
EXPORT_APPLY_MODIFIERS=True,\
|
||||
EXPORT_NORMALS=True,\
|
||||
EXPORT_UV=True,\
|
||||
EXPORT_COLORS=True):
|
||||
|
||||
for i, v in enumerate(ply_verts):
|
||||
file.write('%.6f %.6f %.6f ' % tuple(mesh_verts[v[0]].co)) # co
|
||||
"""
|
||||
if EXPORT_NORMALS:
|
||||
file.write('%.6f %.6f %.6f ' % v[1]) # no
|
||||
"""
|
||||
if EXPORT_UV: file.write('%.6f %.6f ' % v[2]) # uv
|
||||
if EXPORT_COLORS: file.write('%u %u %u' % v[3]) # col
|
||||
file.write('\n')
|
||||
|
||||
for pf in ply_faces:
|
||||
if len(pf)==3: file.write('3 %d %d %d\n' % tuple(pf))
|
||||
else: file.write('4 %d %d %d %d\n' % tuple(pf))
|
||||
|
||||
file.close()
|
||||
print("writing", filename, "done")
|
||||
|
||||
if EXPORT_APPLY_MODIFIERS:
|
||||
bpy.data.remove_mesh(mesh)
|
||||
|
||||
# XXX
|
||||
"""
|
||||
if is_editmode:
|
||||
Blender.Window.EditMode(1, '', 0)
|
||||
"""
|
||||
if not filename.lower().endswith('.ply'):
|
||||
filename += '.ply'
|
||||
|
||||
if not ob:
|
||||
raise Exception("Error, Select 1 active object")
|
||||
return
|
||||
|
||||
file = open(filename, 'w')
|
||||
|
||||
|
||||
#EXPORT_EDGES = Draw.Create(0)
|
||||
"""
|
||||
is_editmode = Blender.Window.EditMode()
|
||||
if is_editmode:
|
||||
Blender.Window.EditMode(0, '', 0)
|
||||
|
||||
Window.WaitCursor(1)
|
||||
"""
|
||||
|
||||
#mesh = BPyMesh.getMeshFromObject(ob, None, EXPORT_APPLY_MODIFIERS, False, scn) # XXX
|
||||
if EXPORT_APPLY_MODIFIERS:
|
||||
mesh = ob.create_mesh(True, 'PREVIEW')
|
||||
else:
|
||||
mesh = ob.data
|
||||
|
||||
if not mesh:
|
||||
raise ("Error, could not get mesh data from active object")
|
||||
return
|
||||
|
||||
# mesh.transform(ob.matrixWorld) # XXX
|
||||
|
||||
faceUV = (len(mesh.uv_textures) > 0)
|
||||
vertexUV = (len(mesh.sticky) > 0)
|
||||
vertexColors = len(mesh.vertex_colors) > 0
|
||||
|
||||
if (not faceUV) and (not vertexUV):
|
||||
EXPORT_UV = False
|
||||
if not vertexColors:
|
||||
EXPORT_COLORS = False
|
||||
|
||||
if not EXPORT_UV:
|
||||
faceUV = vertexUV = False
|
||||
if not EXPORT_COLORS:
|
||||
vertexColors = False
|
||||
|
||||
if faceUV:
|
||||
active_uv_layer = None
|
||||
for lay in mesh.uv_textures:
|
||||
if lay.active:
|
||||
active_uv_layer = lay.data
|
||||
break
|
||||
if not active_uv_layer:
|
||||
EXPORT_UV = False
|
||||
faceUV = None
|
||||
|
||||
if vertexColors:
|
||||
active_col_layer = None
|
||||
for lay in mesh.vertex_colors:
|
||||
if lay.active:
|
||||
active_col_layer = lay.data
|
||||
if not active_col_layer:
|
||||
EXPORT_COLORS = False
|
||||
vertexColors = None
|
||||
|
||||
# incase
|
||||
color = uvcoord = uvcoord_key = normal = normal_key = None
|
||||
|
||||
mesh_verts = mesh.verts # save a lookup
|
||||
ply_verts = [] # list of dictionaries
|
||||
# vdict = {} # (index, normal, uv) -> new index
|
||||
vdict = [{} for i in range(len(mesh_verts))]
|
||||
ply_faces = [[] for f in range(len(mesh.faces))]
|
||||
vert_count = 0
|
||||
for i, f in enumerate(mesh.faces):
|
||||
|
||||
|
||||
smooth = f.smooth
|
||||
if not smooth:
|
||||
normal = tuple(f.normal)
|
||||
normal_key = rvec3d(normal)
|
||||
|
||||
if faceUV:
|
||||
uv = active_uv_layer[i]
|
||||
uv = uv.uv1, uv.uv2, uv.uv3, uv.uv4 # XXX - crufty :/
|
||||
if vertexColors:
|
||||
col = active_col_layer[i]
|
||||
col = col.color1, col.color2, col.color3, col.color4
|
||||
|
||||
f_verts = f.verts
|
||||
|
||||
pf = ply_faces[i]
|
||||
for j, vidx in enumerate(f_verts):
|
||||
v = mesh_verts[vidx]
|
||||
|
||||
if smooth:
|
||||
normal = tuple(v.normal)
|
||||
normal_key = rvec3d(normal)
|
||||
|
||||
if faceUV:
|
||||
uvcoord = uv[j][0], 1.0-uv[j][1]
|
||||
uvcoord_key = rvec2d(uvcoord)
|
||||
elif vertexUV:
|
||||
uvcoord = v.uvco[0], 1.0 - v.uvco[1]
|
||||
uvcoord_key = rvec2d(uvcoord)
|
||||
|
||||
if vertexColors:
|
||||
color = col[j]
|
||||
color = int(color[0] * 255.0), int(color[1] * 255.0), int(color[2] * 255.0)
|
||||
|
||||
|
||||
key = normal_key, uvcoord_key, color
|
||||
|
||||
vdict_local = vdict[vidx]
|
||||
pf_vidx = vdict_local.get(key) # Will be None initially
|
||||
|
||||
if pf_vidx == None: # same as vdict_local.has_key(key)
|
||||
pf_vidx = vdict_local[key] = vert_count
|
||||
ply_verts.append((vidx, normal, uvcoord, color))
|
||||
vert_count += 1
|
||||
|
||||
pf.append(pf_vidx)
|
||||
|
||||
file.write('ply\n')
|
||||
file.write('format ascii 1.0\n')
|
||||
version = "2.5" # Blender.Get('version')
|
||||
file.write('comment Created by Blender3D %s - www.blender.org, source file: %s\n' % (version, bpy.data.filename.split('/')[-1].split('\\')[-1]))
|
||||
|
||||
file.write('element vertex %d\n' % len(ply_verts))
|
||||
|
||||
file.write('property float x\n')
|
||||
file.write('property float y\n')
|
||||
file.write('property float z\n')
|
||||
|
||||
# XXX
|
||||
"""
|
||||
if EXPORT_NORMALS:
|
||||
file.write('property float nx\n')
|
||||
file.write('property float ny\n')
|
||||
file.write('property float nz\n')
|
||||
"""
|
||||
if EXPORT_UV:
|
||||
file.write('property float s\n')
|
||||
file.write('property float t\n')
|
||||
if EXPORT_COLORS:
|
||||
file.write('property uchar red\n')
|
||||
file.write('property uchar green\n')
|
||||
file.write('property uchar blue\n')
|
||||
|
||||
file.write('element face %d\n' % len(mesh.faces))
|
||||
file.write('property list uchar uint vertex_indices\n')
|
||||
file.write('end_header\n')
|
||||
|
||||
for i, v in enumerate(ply_verts):
|
||||
file.write('%.6f %.6f %.6f ' % tuple(mesh_verts[v[0]].co)) # co
|
||||
"""
|
||||
if EXPORT_NORMALS:
|
||||
file.write('%.6f %.6f %.6f ' % v[1]) # no
|
||||
"""
|
||||
if EXPORT_UV:
|
||||
file.write('%.6f %.6f ' % v[2]) # uv
|
||||
if EXPORT_COLORS:
|
||||
file.write('%u %u %u' % v[3]) # col
|
||||
file.write('\n')
|
||||
|
||||
for pf in ply_faces:
|
||||
if len(pf)==3:
|
||||
file.write('3 %d %d %d\n' % tuple(pf))
|
||||
else:
|
||||
file.write('4 %d %d %d %d\n' % tuple(pf))
|
||||
|
||||
file.close()
|
||||
print("writing", filename, "done")
|
||||
|
||||
if EXPORT_APPLY_MODIFIERS:
|
||||
bpy.data.remove_mesh(mesh)
|
||||
|
||||
# XXX
|
||||
"""
|
||||
if is_editmode:
|
||||
Blender.Window.EditMode(1, '', 0)
|
||||
"""
|
||||
|
||||
from bpy.props import *
|
||||
|
||||
|
||||
class ExportPLY(bpy.types.Operator):
|
||||
'''Export a single object as a stanford PLY with normals, colours and texture coordinates.'''
|
||||
bl_idname = "export.ply"
|
||||
bl_label = "Export PLY"
|
||||
|
||||
# List of operator properties, the attributes will be assigned
|
||||
# to the class instance from the operator settings before calling.
|
||||
|
||||
|
||||
path = StringProperty(name="File Path", description="File path used for exporting the PLY file", maxlen= 1024, default= "")
|
||||
use_modifiers = BoolProperty(name="Apply Modifiers", description="Apply Modifiers to the exported mesh", default= True)
|
||||
use_normals = BoolProperty(name="Export Normals", description="Export Normals for smooth and hard shaded faces", default= True)
|
||||
use_uvs = BoolProperty(name="Export UVs", description="Exort the active UV layer", default= True)
|
||||
use_colors = BoolProperty(name="Export Vertex Colors", description="Exort the active vertex color layer", default= True)
|
||||
|
||||
|
||||
def poll(self, context):
|
||||
return context.active_object != None
|
||||
|
||||
def execute(self, context):
|
||||
# print("Selected: " + context.active_object.name)
|
||||
'''Export a single object as a stanford PLY with normals, colours and texture coordinates.'''
|
||||
bl_idname = "export.ply"
|
||||
bl_label = "Export PLY"
|
||||
|
||||
if not self.properties.path:
|
||||
raise Exception("filename not set")
|
||||
|
||||
write(self.properties.path, context.scene, context.active_object,\
|
||||
EXPORT_APPLY_MODIFIERS = self.properties.use_modifiers,
|
||||
EXPORT_NORMALS = self.properties.use_normals,
|
||||
EXPORT_UV = self.properties.use_uvs,
|
||||
EXPORT_COLORS = self.properties.use_colors,
|
||||
)
|
||||
|
||||
return ('FINISHED',)
|
||||
|
||||
def invoke(self, context, event):
|
||||
wm = context.manager
|
||||
wm.add_fileselect(self)
|
||||
return ('RUNNING_MODAL',)
|
||||
# List of operator properties, the attributes will be assigned
|
||||
# to the class instance from the operator settings before calling.
|
||||
|
||||
|
||||
bpy.ops.add(ExportPLY)
|
||||
path = StringProperty(name="File Path", description="File path used for exporting the PLY file", maxlen=1024, default="")
|
||||
use_modifiers = BoolProperty(name="Apply Modifiers", description="Apply Modifiers to the exported mesh", default=True)
|
||||
use_normals = BoolProperty(name="Normals", description="Export Normals for smooth and hard shaded faces", default=True)
|
||||
use_uvs = BoolProperty(name="UVs", description="Exort the active UV layer", default=True)
|
||||
use_colors = BoolProperty(name="Vertex Colors", description="Exort the active vertex color layer", default=True)
|
||||
|
||||
import dynamic_menu
|
||||
def poll(self, context):
|
||||
return context.active_object != None
|
||||
|
||||
def execute(self, context):
|
||||
# print("Selected: " + context.active_object.name)
|
||||
|
||||
if not self.properties.path:
|
||||
raise Exception("filename not set")
|
||||
|
||||
write(self.properties.path, context.scene, context.active_object,\
|
||||
EXPORT_APPLY_MODIFIERS=self.properties.use_modifiers,
|
||||
EXPORT_NORMALS=self.properties.use_normals,
|
||||
EXPORT_UV=self.properties.use_uvs,
|
||||
EXPORT_COLORS=self.properties.use_colors,
|
||||
)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
wm = context.manager
|
||||
wm.add_fileselect(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
props = self.properties
|
||||
|
||||
row = layout.row()
|
||||
row.prop(props, "use_modifiers")
|
||||
row.prop(props, "use_normals")
|
||||
row = layout.row()
|
||||
row.prop(props, "use_uvs")
|
||||
row.prop(props, "use_colors")
|
||||
|
||||
|
||||
bpy.types.register(ExportPLY)
|
||||
|
||||
def menu_func(self, context):
|
||||
default_path = bpy.data.filename.replace(".blend", ".ply")
|
||||
self.layout.operator(ExportPLY.bl_idname, text="Stanford (.ply)...").path = default_path
|
||||
|
||||
menu_item = dynamic_menu.add(bpy.types.INFO_MT_file_export, menu_func)
|
||||
bpy.types.INFO_MT_file_export.append(menu_func)
|
||||
|
||||
if __name__ == "__main__":
|
||||
bpy.ops.export.ply(path="/tmp/test.ply")
|
||||
bpy.ops.export.ply(path="/tmp/test.ply")
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -72,7 +72,8 @@ class Balancer:
|
||||
|
||||
def balance(self, jobs):
|
||||
if jobs:
|
||||
jobs.sort(key=self.sortKey)
|
||||
# use inline copy to make sure the list is still accessible while sorting
|
||||
jobs[:] = sorted(jobs, key=self.sortKey)
|
||||
return jobs[0]
|
||||
else:
|
||||
return None
|
||||
@@ -80,11 +81,33 @@ class Balancer:
|
||||
# ==========================
|
||||
|
||||
class RatingUsage(RatingRule):
|
||||
def __str__(self):
|
||||
return "Usage rating"
|
||||
|
||||
def rate(self, job):
|
||||
# less usage is better
|
||||
return job.usage / job.priority
|
||||
|
||||
class RatingUsageByCategory(RatingRule):
|
||||
def __str__(self):
|
||||
return "Usage per category rating"
|
||||
|
||||
def __init__(self, get_jobs):
|
||||
self.getJobs = get_jobs
|
||||
def rate(self, job):
|
||||
total_category_usage = sum([j.usage for j in self.getJobs() if j.category == job.category])
|
||||
maximum_priority = max([j.priority for j in self.getJobs() if j.category == job.category])
|
||||
|
||||
# less usage is better
|
||||
return total_category_usage / maximum_priority
|
||||
|
||||
class NewJobPriority(PriorityRule):
|
||||
def str_limit(self):
|
||||
return "less than %i frame%s done" % (self.limit, "s" if self.limit > 1 else "")
|
||||
|
||||
def __str__(self):
|
||||
return "Priority to new jobs"
|
||||
|
||||
def __init__(self, limit = 1):
|
||||
self.limit = limit
|
||||
|
||||
@@ -92,6 +115,12 @@ class NewJobPriority(PriorityRule):
|
||||
return job.countFrames(status = DONE) < self.limit
|
||||
|
||||
class MinimumTimeBetweenDispatchPriority(PriorityRule):
|
||||
def str_limit(self):
|
||||
return "more than %i minute%s since last" % (self.limit, "s" if self.limit > 1 else "")
|
||||
|
||||
def __str__(self):
|
||||
return "Priority to jobs that haven't been dispatched recently"
|
||||
|
||||
def __init__(self, limit = 10):
|
||||
self.limit = limit
|
||||
|
||||
@@ -99,10 +128,19 @@ class MinimumTimeBetweenDispatchPriority(PriorityRule):
|
||||
return job.countFrames(status = DISPATCHED) == 0 and (time.time() - job.last_dispatched) / 60 > self.limit
|
||||
|
||||
class ExcludeQueuedEmptyJob(ExclusionRule):
|
||||
def __str__(self):
|
||||
return "Exclude queued and empty jobs"
|
||||
|
||||
def test(self, job):
|
||||
return job.status != JOB_QUEUED or job.countFrames(status = QUEUED) == 0
|
||||
|
||||
class ExcludeSlavesLimit(ExclusionRule):
|
||||
def str_limit(self):
|
||||
return "more than %.0f%% of all slaves" % (self.limit * 100)
|
||||
|
||||
def __str__(self):
|
||||
return "Exclude jobs that would use too many slaves"
|
||||
|
||||
def __init__(self, count_jobs, count_slaves, limit = 0.75):
|
||||
self.count_jobs = count_jobs
|
||||
self.count_slaves = count_slaves
|
||||
|
||||
@@ -106,7 +106,6 @@ def clientSendJob(conn, scene, anim = False):
|
||||
|
||||
job_name = netsettings.job_name
|
||||
path, name = os.path.split(filename)
|
||||
path += os.sep
|
||||
if job_name == "[default]":
|
||||
job_name = name
|
||||
|
||||
@@ -127,7 +126,7 @@ def clientSendJob(conn, scene, anim = False):
|
||||
# FLUID + POINT CACHE
|
||||
###########################
|
||||
root, ext = os.path.splitext(name)
|
||||
default_path = path + "blendcache_" + root + os.sep # need an API call for that
|
||||
default_path = path + os.sep + "blendcache_" + root + os.sep # need an API call for that
|
||||
|
||||
for object in bpy.data.objects:
|
||||
for modifier in object.modifiers:
|
||||
@@ -147,9 +146,10 @@ def clientSendJob(conn, scene, anim = False):
|
||||
for psys in object.particle_systems:
|
||||
addPointCache(job, object, psys.point_cache, default_path)
|
||||
|
||||
# print(job.files)
|
||||
#print(job.files)
|
||||
|
||||
job.name = job_name
|
||||
job.category = netsettings.job_category
|
||||
|
||||
for slave in netrender.blacklist:
|
||||
job.blacklist.append(slave.id)
|
||||
@@ -165,18 +165,18 @@ def clientSendJob(conn, scene, anim = False):
|
||||
|
||||
# if not ACCEPTED (but not processed), send files
|
||||
if response.status == http.client.ACCEPTED:
|
||||
for filepath, start, end in job.files:
|
||||
f = open(filepath, "rb")
|
||||
conn.request("PUT", "/file", f, headers={"job-id": job_id, "job-file": filepath})
|
||||
for rfile in job.files:
|
||||
f = open(rfile.filepath, "rb")
|
||||
conn.request("PUT", fileURL(job_id, rfile.index), f)
|
||||
f.close()
|
||||
response = conn.getresponse()
|
||||
|
||||
# server will reply with NOT_FOUD until all files are found
|
||||
# server will reply with ACCEPTED until all files are found
|
||||
|
||||
return job_id
|
||||
|
||||
def requestResult(conn, job_id, frame):
|
||||
conn.request("GET", "/render", headers={"job-id": job_id, "job-frame":str(frame)})
|
||||
conn.request("GET", renderURL(job_id, frame))
|
||||
|
||||
@rnaType
|
||||
class NetworkRenderEngine(bpy.types.RenderEngine):
|
||||
@@ -215,6 +215,8 @@ class NetworkRenderEngine(bpy.types.RenderEngine):
|
||||
|
||||
self.update_stats("", "Network render exporting")
|
||||
|
||||
new_job = False
|
||||
|
||||
job_id = netsettings.job_id
|
||||
|
||||
# reading back result
|
||||
@@ -225,14 +227,25 @@ class NetworkRenderEngine(bpy.types.RenderEngine):
|
||||
response = conn.getresponse()
|
||||
|
||||
if response.status == http.client.NO_CONTENT:
|
||||
new_job = True
|
||||
netsettings.job_id = clientSendJob(conn, scene)
|
||||
job_id = netsettings.job_id
|
||||
|
||||
requestResult(conn, job_id, scene.current_frame)
|
||||
response = conn.getresponse()
|
||||
|
||||
while response.status == http.client.ACCEPTED and not self.test_break():
|
||||
time.sleep(1)
|
||||
requestResult(conn, job_id, scene.current_frame)
|
||||
response = conn.getresponse()
|
||||
|
||||
# cancel new jobs (animate on network) on break
|
||||
if self.test_break() and new_job:
|
||||
conn.request("POST", cancelURL(job_id))
|
||||
response = conn.getresponse()
|
||||
print( response.status, response.reason )
|
||||
netsettings.job_id = 0
|
||||
|
||||
if response.status != http.client.OK:
|
||||
conn.close()
|
||||
return
|
||||
|
||||
@@ -25,11 +25,9 @@ import netrender.model
|
||||
import netrender.balancing
|
||||
import netrender.master_html
|
||||
|
||||
class MRenderFile:
|
||||
def __init__(self, filepath, start, end):
|
||||
self.filepath = filepath
|
||||
self.start = start
|
||||
self.end = end
|
||||
class MRenderFile(netrender.model.RenderFile):
|
||||
def __init__(self, filepath, index, start, end):
|
||||
super().__init__(filepath, index, start, end)
|
||||
self.found = False
|
||||
|
||||
def test(self):
|
||||
@@ -60,37 +58,41 @@ class MRenderSlave(netrender.model.RenderSlave):
|
||||
self.job = None
|
||||
|
||||
class MRenderJob(netrender.model.RenderJob):
|
||||
def __init__(self, job_id, job_type, name, files, chunks = 1, priority = 1, blacklist = []):
|
||||
super().__init__()
|
||||
def __init__(self, job_id, job_info):
|
||||
super().__init__(job_info)
|
||||
self.id = job_id
|
||||
self.type = job_type
|
||||
self.name = name
|
||||
self.files = files
|
||||
self.frames = []
|
||||
self.chunks = chunks
|
||||
self.priority = priority
|
||||
self.usage = 0.0
|
||||
self.blacklist = blacklist
|
||||
self.last_dispatched = time.time()
|
||||
|
||||
# force one chunk for process jobs
|
||||
if self.type == netrender.model.JOB_PROCESS:
|
||||
self.chunks = 1
|
||||
|
||||
# Force WAITING status on creation
|
||||
self.status = JOB_WAITING
|
||||
|
||||
# special server properties
|
||||
self.last_update = 0
|
||||
self.save_path = ""
|
||||
self.files_map = {path: MRenderFile(path, start, end) for path, start, end in files}
|
||||
self.status = JOB_WAITING
|
||||
self.files = [MRenderFile(rfile.filepath, rfile.index, rfile.start, rfile.end) for rfile in job_info.files]
|
||||
|
||||
def save(self):
|
||||
if self.save_path:
|
||||
f = open(self.save_path + "job.txt", "w")
|
||||
f.write(repr(self.serialize()))
|
||||
f.close()
|
||||
|
||||
def edit(self, info_map):
|
||||
if "status" in info_map:
|
||||
self.status = info_map["status"]
|
||||
|
||||
if "priority" in info_map:
|
||||
self.priority = info_map["priority"]
|
||||
|
||||
if "chunks" in info_map:
|
||||
self.chunks = info_map["chunks"]
|
||||
|
||||
def testStart(self):
|
||||
for f in self.files_map.values():
|
||||
for f in self.files:
|
||||
if not f.test():
|
||||
return False
|
||||
|
||||
@@ -149,6 +151,7 @@ class MRenderFrame(netrender.model.RenderFrame):
|
||||
|
||||
def reset(self, all):
|
||||
if all or self.status == ERROR:
|
||||
self.log_path = None
|
||||
self.slave = None
|
||||
self.time = 0
|
||||
self.status = QUEUED
|
||||
@@ -158,6 +161,12 @@ class MRenderFrame(netrender.model.RenderFrame):
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
file_pattern = re.compile("/file_([a-zA-Z0-9]+)_([0-9]+)")
|
||||
render_pattern = re.compile("/render_([a-zA-Z0-9]+)_([0-9]+).exr")
|
||||
log_pattern = re.compile("/log_([a-zA-Z0-9]+)_([0-9]+).log")
|
||||
reset_pattern = re.compile("/reset(all|)_([a-zA-Z0-9]+)_([0-9]+)")
|
||||
cancel_pattern = re.compile("/cancel_([a-zA-Z0-9]+)")
|
||||
edit_pattern = re.compile("/edit_([a-zA-Z0-9]+)")
|
||||
|
||||
class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
def send_head(self, code = http.client.OK, headers = {}, content = "application/octet-stream"):
|
||||
@@ -202,62 +211,74 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
self.server.stats("", "Version check")
|
||||
self.wfile.write(VERSION)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/render":
|
||||
job_id = self.headers['job-id']
|
||||
job_frame = int(self.headers['job-frame'])
|
||||
elif self.path.startswith("/render"):
|
||||
match = render_pattern.match(self.path)
|
||||
|
||||
job = self.server.getJobID(job_id)
|
||||
if match:
|
||||
job_id = match.groups()[0]
|
||||
frame_number = int(match.groups()[1])
|
||||
|
||||
if job:
|
||||
frame = job[job_frame]
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if frame:
|
||||
if frame.status in (QUEUED, DISPATCHED):
|
||||
self.send_head(http.client.ACCEPTED)
|
||||
elif frame.status == DONE:
|
||||
self.server.stats("", "Sending result to client")
|
||||
f = open(job.save_path + "%04d" % job_frame + ".exr", 'rb')
|
||||
|
||||
self.send_head()
|
||||
|
||||
shutil.copyfileobj(f, self.wfile)
|
||||
|
||||
f.close()
|
||||
elif frame.status == ERROR:
|
||||
self.send_head(http.client.PARTIAL_CONTENT)
|
||||
if job:
|
||||
frame = job[frame_number]
|
||||
|
||||
if frame:
|
||||
if frame.status in (QUEUED, DISPATCHED):
|
||||
self.send_head(http.client.ACCEPTED)
|
||||
elif frame.status == DONE:
|
||||
self.server.stats("", "Sending result to client")
|
||||
f = open(job.save_path + "%04d" % frame_number + ".exr", 'rb')
|
||||
|
||||
self.send_head()
|
||||
|
||||
shutil.copyfileobj(f, self.wfile)
|
||||
|
||||
f.close()
|
||||
elif frame.status == ERROR:
|
||||
self.send_head(http.client.PARTIAL_CONTENT)
|
||||
else:
|
||||
# no such frame
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else:
|
||||
# no such frame
|
||||
# no such job id
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else:
|
||||
# no such job id
|
||||
# invalid url
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/log":
|
||||
job_id = self.headers['job-id']
|
||||
job_frame = int(self.headers['job-frame'])
|
||||
elif self.path.startswith("/log"):
|
||||
match = log_pattern.match(self.path)
|
||||
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
frame = job[job_frame]
|
||||
if match:
|
||||
job_id = match.groups()[0]
|
||||
frame_number = int(match.groups()[1])
|
||||
|
||||
if frame:
|
||||
if not frame.log_path or frame.status in (QUEUED, DISPATCHED):
|
||||
self.send_head(http.client.PROCESSING)
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
frame = job[frame_number]
|
||||
|
||||
if frame:
|
||||
if not frame.log_path or frame.status in (QUEUED, DISPATCHED):
|
||||
self.send_head(http.client.PROCESSING)
|
||||
else:
|
||||
self.server.stats("", "Sending log to client")
|
||||
f = open(frame.log_path, 'rb')
|
||||
|
||||
self.send_head(content = "text/plain")
|
||||
|
||||
shutil.copyfileobj(f, self.wfile)
|
||||
|
||||
f.close()
|
||||
else:
|
||||
self.server.stats("", "Sending log to client")
|
||||
f = open(frame.log_path, 'rb')
|
||||
|
||||
self.send_head()
|
||||
|
||||
shutil.copyfileobj(f, self.wfile)
|
||||
|
||||
f.close()
|
||||
# no such frame
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else:
|
||||
# no such frame
|
||||
# no such job id
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else:
|
||||
# no such job id
|
||||
# invalid URL
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/status":
|
||||
@@ -330,19 +351,24 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
else: # invalid slave id
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/file":
|
||||
slave_id = self.headers['slave-id']
|
||||
elif self.path.startswith("/file"):
|
||||
match = file_pattern.match(self.path)
|
||||
|
||||
slave = self.server.getSeenSlave(slave_id)
|
||||
|
||||
if slave: # only if slave id is valid
|
||||
job_id = self.headers['job-id']
|
||||
job_file = self.headers['job-file']
|
||||
if match:
|
||||
slave_id = self.headers['slave-id']
|
||||
slave = self.server.getSeenSlave(slave_id)
|
||||
|
||||
if not slave:
|
||||
# invalid slave id
|
||||
print("invalid slave id")
|
||||
|
||||
job_id = match.groups()[0]
|
||||
file_index = int(match.groups()[1])
|
||||
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
render_file = job.files_map.get(job_file, None)
|
||||
render_file = job.files[file_index]
|
||||
|
||||
if render_file:
|
||||
self.server.stats("", "Sending file to slave")
|
||||
@@ -358,7 +384,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
else:
|
||||
# no such job id
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else: # invalid slave id
|
||||
else: # invalid url
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/slaves":
|
||||
@@ -393,7 +419,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
|
||||
job_id = self.server.nextJobID()
|
||||
|
||||
job = MRenderJob(job_id, job_info.type, job_info.name, job_info.files, chunks = job_info.chunks, priority = job_info.priority, blacklist = job_info.blacklist)
|
||||
job = MRenderJob(job_id, job_info)
|
||||
|
||||
for frame in job_info.frames:
|
||||
frame = job.addFrame(frame.number, frame.command)
|
||||
@@ -403,23 +429,50 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
headers={"job-id": job_id}
|
||||
|
||||
if job.testStart():
|
||||
self.server.stats("", "New job, missing files")
|
||||
self.server.stats("", "New job, started")
|
||||
self.send_head(headers=headers)
|
||||
else:
|
||||
self.server.stats("", "New job, started")
|
||||
self.server.stats("", "New job, missing files (%i total)" % len(job.files))
|
||||
self.send_head(http.client.ACCEPTED, headers=headers)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/cancel":
|
||||
job_id = self.headers.get('job-id', "")
|
||||
elif self.path.startswith("/edit"):
|
||||
match = edit_pattern.match(self.path)
|
||||
|
||||
if match:
|
||||
job_id = match.groups()[0]
|
||||
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
self.server.stats("", "Cancelling job")
|
||||
self.server.removeJob(job)
|
||||
self.send_head()
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
length = int(self.headers['content-length'])
|
||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
||||
|
||||
job.edit(info_map)
|
||||
self.send_head()
|
||||
else:
|
||||
# no such job id
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else:
|
||||
# no such job id
|
||||
# invalid url
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path.startswith("/cancel"):
|
||||
match = cancel_pattern.match(self.path)
|
||||
|
||||
if match:
|
||||
job_id = match.groups()[0]
|
||||
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
self.server.stats("", "Cancelling job")
|
||||
self.server.removeJob(job)
|
||||
self.send_head()
|
||||
else:
|
||||
# no such job id
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else:
|
||||
# invalid url
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
@@ -430,31 +483,36 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
|
||||
self.send_head()
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/reset":
|
||||
job_id = self.headers.get('job-id', "")
|
||||
job_frame = int(self.headers.get('job-frame', "-1"))
|
||||
all = bool(self.headers.get('reset-all', "False"))
|
||||
elif self.path.startswith("/reset"):
|
||||
match = reset_pattern.match(self.path)
|
||||
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
if job_frame != -1:
|
||||
|
||||
frame = job[job_frame]
|
||||
if frame:
|
||||
self.server.stats("", "Reset job frame")
|
||||
frame.reset(all)
|
||||
self.send_head()
|
||||
else:
|
||||
# no such frame
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
if match:
|
||||
all = match.groups()[0] == 'all'
|
||||
job_id = match.groups()[1]
|
||||
job_frame = int(match.groups()[2])
|
||||
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
if job_frame != 0:
|
||||
|
||||
else:
|
||||
self.server.stats("", "Reset job")
|
||||
job.reset(all)
|
||||
self.send_head()
|
||||
|
||||
else: # job not found
|
||||
frame = job[job_frame]
|
||||
if frame:
|
||||
self.server.stats("", "Reset job frame")
|
||||
frame.reset(all)
|
||||
self.send_head()
|
||||
else:
|
||||
# no such frame
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
|
||||
else:
|
||||
self.server.stats("", "Reset job")
|
||||
job.reset(all)
|
||||
self.send_head()
|
||||
|
||||
else: # job not found
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else: # invalid url
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/slave":
|
||||
@@ -463,22 +521,22 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
|
||||
self.server.stats("", "New slave connected")
|
||||
|
||||
slave_info = netrender.model.RenderSlave.materialize(eval(str(self.rfile.read(length), encoding='utf8')))
|
||||
slave_info = netrender.model.RenderSlave.materialize(eval(str(self.rfile.read(length), encoding='utf8')), cache = False)
|
||||
|
||||
slave_id = self.server.addSlave(slave_info.name, self.client_address, slave_info.stats)
|
||||
|
||||
self.send_head(headers = {"slave-id": slave_id})
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/log":
|
||||
slave_id = self.headers['slave-id']
|
||||
length = int(self.headers['content-length'])
|
||||
|
||||
log_info = netrender.model.LogFile.materialize(eval(str(self.rfile.read(length), encoding='utf8')))
|
||||
|
||||
slave_id = log_info.slave_id
|
||||
|
||||
slave = self.server.getSeenSlave(slave_id)
|
||||
|
||||
if slave: # only if slave id is valid
|
||||
length = int(self.headers['content-length'])
|
||||
|
||||
log_info = netrender.model.LogFile.materialize(eval(str(self.rfile.read(length), encoding='utf8')))
|
||||
|
||||
job = self.server.getJobID(log_info.job_id)
|
||||
|
||||
if job:
|
||||
@@ -498,53 +556,64 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
def do_PUT(self):
|
||||
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
if self.path == "/file":
|
||||
self.server.stats("", "Receiving job")
|
||||
if self.path.startswith("/file"):
|
||||
match = file_pattern.match(self.path)
|
||||
|
||||
length = int(self.headers['content-length'])
|
||||
job_id = self.headers['job-id']
|
||||
job_file = self.headers['job-file']
|
||||
if match:
|
||||
self.server.stats("", "Receiving job")
|
||||
|
||||
length = int(self.headers['content-length'])
|
||||
job_id = match.groups()[0]
|
||||
file_index = int(match.groups()[1])
|
||||
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
render_file = job.files_map.get(job_file, None)
|
||||
|
||||
if render_file:
|
||||
main_file = job.files[0][0] # filename of the first file
|
||||
if job:
|
||||
|
||||
main_path, main_name = os.path.split(main_file)
|
||||
render_file = job.files[file_index]
|
||||
|
||||
if job_file != main_file:
|
||||
file_path = prefixPath(job.save_path, job_file, main_path)
|
||||
else:
|
||||
file_path = job.save_path + main_name
|
||||
|
||||
buf = self.rfile.read(length)
|
||||
|
||||
# add same temp file + renames as slave
|
||||
|
||||
f = open(file_path, "wb")
|
||||
f.write(buf)
|
||||
f.close()
|
||||
del buf
|
||||
|
||||
render_file.filepath = file_path # set the new path
|
||||
|
||||
if job.testStart():
|
||||
self.server.stats("", "File upload, starting job")
|
||||
self.send_head(http.client.OK)
|
||||
else:
|
||||
self.server.stats("", "File upload, file missings")
|
||||
self.send_head(http.client.ACCEPTED)
|
||||
else: # invalid file
|
||||
if render_file:
|
||||
main_file = job.files[0].filepath # filename of the first file
|
||||
|
||||
main_path, main_name = os.path.split(main_file)
|
||||
|
||||
if file_index > 0:
|
||||
file_path = prefixPath(job.save_path, render_file.filepath, main_path)
|
||||
else:
|
||||
file_path = job.save_path + main_name
|
||||
|
||||
buf = self.rfile.read(length)
|
||||
|
||||
# add same temp file + renames as slave
|
||||
|
||||
f = open(file_path, "wb")
|
||||
f.write(buf)
|
||||
f.close()
|
||||
del buf
|
||||
|
||||
render_file.filepath = file_path # set the new path
|
||||
|
||||
if job.testStart():
|
||||
self.server.stats("", "File upload, starting job")
|
||||
self.send_head(http.client.OK)
|
||||
else:
|
||||
self.server.stats("", "File upload, file missings")
|
||||
self.send_head(http.client.ACCEPTED)
|
||||
else: # invalid file
|
||||
print("file not found", job_id, file_index)
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else: # job not found
|
||||
print("job not found", job_id, file_index)
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else: # job not found
|
||||
else: # invalid url
|
||||
print("no match")
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/render":
|
||||
self.server.stats("", "Receiving render result")
|
||||
|
||||
# need some message content here or the slave doesn't like it
|
||||
self.wfile.write(bytes("foo", encoding='utf8'))
|
||||
|
||||
slave_id = self.headers['slave-id']
|
||||
|
||||
@@ -574,7 +643,9 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
del buf
|
||||
elif job_result == ERROR:
|
||||
# blacklist slave on this job on error
|
||||
job.blacklist.append(slave.id)
|
||||
# slaves might already be in blacklist if errors on the whole chunk
|
||||
if not slave.id in job.blacklist:
|
||||
job.blacklist.append(slave.id)
|
||||
|
||||
self.server.stats("", "Receiving result")
|
||||
|
||||
@@ -593,33 +664,38 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
else: # invalid slave id
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/log":
|
||||
elif self.path.startswith("/log"):
|
||||
self.server.stats("", "Receiving log file")
|
||||
|
||||
match = log_pattern.match(self.path)
|
||||
|
||||
job_id = self.headers['job-id']
|
||||
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
job_frame = int(self.headers['job-frame'])
|
||||
if match:
|
||||
job_id = match.groups()[0]
|
||||
|
||||
frame = job[job_frame]
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if frame and frame.log_path:
|
||||
length = int(self.headers['content-length'])
|
||||
buf = self.rfile.read(length)
|
||||
f = open(frame.log_path, 'ab')
|
||||
f.write(buf)
|
||||
f.close()
|
||||
if job:
|
||||
job_frame = int(match.groups()[1])
|
||||
|
||||
frame = job[job_frame]
|
||||
|
||||
if frame and frame.log_path:
|
||||
length = int(self.headers['content-length'])
|
||||
buf = self.rfile.read(length)
|
||||
f = open(frame.log_path, 'ab')
|
||||
f.write(buf)
|
||||
f.close()
|
||||
|
||||
del buf
|
||||
|
||||
del buf
|
||||
|
||||
self.server.getSeenSlave(self.headers['slave-id'])
|
||||
|
||||
self.send_head()
|
||||
else: # frame not found
|
||||
self.server.getSeenSlave(self.headers['slave-id'])
|
||||
|
||||
self.send_head()
|
||||
else: # frame not found
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else: # job not found
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
else: # job not found
|
||||
else: # invalid url
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
|
||||
class RenderMasterServer(http.server.HTTPServer):
|
||||
@@ -632,9 +708,10 @@ class RenderMasterServer(http.server.HTTPServer):
|
||||
self.job_id = 0
|
||||
self.path = path + "master_" + str(os.getpid()) + os.sep
|
||||
|
||||
self.slave_timeout = 2
|
||||
self.slave_timeout = 30 # 30 mins: need a parameter for that
|
||||
|
||||
self.balancer = netrender.balancing.Balancer()
|
||||
self.balancer.addRule(netrender.balancing.RatingUsageByCategory(self.getJobs))
|
||||
self.balancer.addRule(netrender.balancing.RatingUsage())
|
||||
self.balancer.addException(netrender.balancing.ExcludeQueuedEmptyJob())
|
||||
self.balancer.addException(netrender.balancing.ExcludeSlavesLimit(self.countJobs, self.countSlaves, limit = 0.9))
|
||||
@@ -707,6 +784,9 @@ class RenderMasterServer(http.server.HTTPServer):
|
||||
def balance(self):
|
||||
self.balancer.balance(self.jobs)
|
||||
|
||||
def getJobs(self):
|
||||
return self.jobs
|
||||
|
||||
def countJobs(self, status = JOB_QUEUED):
|
||||
total = 0
|
||||
for j in self.jobs:
|
||||
@@ -776,3 +856,5 @@ def runMaster(address, broadcast, path, update_stats, test_break):
|
||||
print("broadcasting address")
|
||||
s.sendto(bytes("%i" % address[1], encoding='utf8'), 0, ('<broadcast>', 8000))
|
||||
start_time = time.time()
|
||||
|
||||
httpd.server_close()
|
||||
|
||||
@@ -16,20 +16,40 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import shutil
|
||||
from netrender.utils import *
|
||||
|
||||
src_folder = os.path.split(__file__)[0]
|
||||
|
||||
def get(handler):
|
||||
def output(text):
|
||||
handler.wfile.write(bytes(text, encoding='utf8'))
|
||||
|
||||
def head(title):
|
||||
output("<html><head>")
|
||||
output("<script src='/html/netrender.js' type='text/javascript'></script>")
|
||||
# output("<script src='/html/json2.js' type='text/javascript'></script>")
|
||||
output("<title>")
|
||||
output(title)
|
||||
output("</title></head><body>")
|
||||
output("<link rel='stylesheet' href='/html/netrender.css' type='text/css'>")
|
||||
|
||||
|
||||
def link(text, url):
|
||||
return "<a href='%s'>%s</a>" % (url, text)
|
||||
|
||||
def startTable(border=1):
|
||||
output("<table border='%i'>" % border)
|
||||
def startTable(border=1, class_style = None, caption = None):
|
||||
output("<table border='%i'" % border)
|
||||
|
||||
if class_style:
|
||||
output(" class='%s'" % class_style)
|
||||
|
||||
output(">")
|
||||
|
||||
if caption:
|
||||
output("<caption>%s</caption>" % caption)
|
||||
|
||||
def headerTable(*headers):
|
||||
output("<thead><tr>")
|
||||
@@ -39,8 +59,19 @@ def get(handler):
|
||||
|
||||
output("</tr></thead>")
|
||||
|
||||
def rowTable(*data):
|
||||
output("<tr>")
|
||||
def rowTable(*data, id = None, class_style = None, extra = None):
|
||||
output("<tr")
|
||||
|
||||
if id:
|
||||
output(" id='%s'" % id)
|
||||
|
||||
if class_style:
|
||||
output(" class='%s'" % class_style)
|
||||
|
||||
if extra:
|
||||
output(" %s" % extra)
|
||||
|
||||
output(">")
|
||||
|
||||
for c in data:
|
||||
output("<td>" + str(c) + "</td>")
|
||||
@@ -50,12 +81,43 @@ def get(handler):
|
||||
def endTable():
|
||||
output("</table>")
|
||||
|
||||
if handler.path == "/html" or handler.path == "/":
|
||||
if handler.path == "/html/netrender.js":
|
||||
f = open(os.path.join(src_folder, "netrender.js"), 'rb')
|
||||
|
||||
handler.send_head(content = "text/javascript")
|
||||
shutil.copyfileobj(f, handler.wfile)
|
||||
|
||||
f.close()
|
||||
elif handler.path == "/html/netrender.css":
|
||||
f = open(os.path.join(src_folder, "netrender.css"), 'rb')
|
||||
|
||||
handler.send_head(content = "text/css")
|
||||
shutil.copyfileobj(f, handler.wfile)
|
||||
|
||||
f.close()
|
||||
elif handler.path == "/html" or handler.path == "/":
|
||||
handler.send_head(content = "text/html")
|
||||
output("<html><head><title>NetRender</title></head><body>")
|
||||
head("NetRender")
|
||||
|
||||
output("<h2>Master</h2>")
|
||||
|
||||
|
||||
output("""<button title="remove all jobs" onclick="request('/clear', null);">CLEAR JOB LIST</button>""")
|
||||
|
||||
startTable(caption = "Rules", class_style = "rules")
|
||||
|
||||
headerTable("type", "description", "limit")
|
||||
|
||||
for rule in handler.server.balancer.rules:
|
||||
rowTable("rating", rule, rule.str_limit() if hasattr(rule, "limit") else " ")
|
||||
|
||||
for rule in handler.server.balancer.priorities:
|
||||
rowTable("priority", rule, rule.str_limit() if hasattr(rule, "limit") else " ")
|
||||
|
||||
for rule in handler.server.balancer.exceptions:
|
||||
rowTable("exception", rule, rule.str_limit() if hasattr(rule, "limit") else " ")
|
||||
|
||||
endTable()
|
||||
|
||||
output("<h2>Slaves</h2>")
|
||||
|
||||
startTable()
|
||||
@@ -70,10 +132,15 @@ def get(handler):
|
||||
|
||||
startTable()
|
||||
headerTable(
|
||||
" ",
|
||||
"id",
|
||||
"name",
|
||||
"category",
|
||||
"chunks",
|
||||
"priority",
|
||||
"usage",
|
||||
"wait",
|
||||
"status",
|
||||
"length",
|
||||
"done",
|
||||
"dispatched",
|
||||
@@ -87,14 +154,25 @@ def get(handler):
|
||||
for job in handler.server.jobs:
|
||||
results = job.framesStatus()
|
||||
rowTable(
|
||||
"""<button title="cancel job" onclick="request('/cancel_%s', null);">X</button>""" % job.id +
|
||||
"""<button title="reset all frames" onclick="request('/resetall_%s_0', null);">R</button>""" % job.id,
|
||||
job.id,
|
||||
link(job.name, "/html/job" + job.id),
|
||||
job.priority,
|
||||
job.category if job.category else "<i>None</i>",
|
||||
str(job.chunks) +
|
||||
"""<button title="increase priority" onclick="request('/edit_%s', "{'chunks': %i}");">+</button>""" % (job.id, job.chunks + 1) +
|
||||
"""<button title="decrease priority" onclick="request('/edit_%s', "{'chunks': %i}");" %s>-</button>""" % (job.id, job.chunks - 1, "disabled=True" if job.chunks == 1 else ""),
|
||||
str(job.priority) +
|
||||
"""<button title="increase chunks size" onclick="request('/edit_%s', "{'priority': %i}");">+</button>""" % (job.id, job.priority + 1) +
|
||||
"""<button title="decrease chunks size" onclick="request('/edit_%s', "{'priority': %i}");" %s>-</button>""" % (job.id, job.priority - 1, "disabled=True" if job.priority == 1 else ""),
|
||||
"%0.1f%%" % (job.usage * 100),
|
||||
"%is" % int(time.time() - job.last_dispatched),
|
||||
job.statusText(),
|
||||
len(job),
|
||||
results[DONE],
|
||||
results[DISPATCHED],
|
||||
results[ERROR],
|
||||
str(results[ERROR]) +
|
||||
"""<button title="reset error frames" onclick="request('/reset_%s_0', null);" %s>R</button>""" % (job.id, "disabled=True" if not results[ERROR] else ""),
|
||||
handler.server.balancer.applyPriorities(job), handler.server.balancer.applyExceptions(job)
|
||||
)
|
||||
|
||||
@@ -106,48 +184,66 @@ def get(handler):
|
||||
handler.send_head(content = "text/html")
|
||||
job_id = handler.path[9:]
|
||||
|
||||
output("<html><head><title>NetRender</title></head><body>")
|
||||
head("NetRender")
|
||||
|
||||
job = handler.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
output("<h2>Files</h2>")
|
||||
|
||||
startTable()
|
||||
headerTable("path")
|
||||
|
||||
tot_cache = 0
|
||||
tot_fluid = 0
|
||||
|
||||
for file in job.files:
|
||||
if file.filepath.endswith(".bphys"):
|
||||
tot_cache += 1
|
||||
elif file.filepath.endswith(".bobj.gz") or file.filepath.endswith(".bvel.gz"):
|
||||
tot_fluid += 1
|
||||
else:
|
||||
rowTable(file.filepath)
|
||||
|
||||
if tot_cache > 0:
|
||||
rowTable("%i physic cache files" % tot_cache, class_style = "toggle", extra = "onclick='toggleDisplay(".cache", "none", "table-row")'")
|
||||
for file in job.files:
|
||||
if file.filepath.endswith(".bphys"):
|
||||
rowTable(os.path.split(file.filepath)[1], class_style = "cache")
|
||||
|
||||
if tot_fluid > 0:
|
||||
rowTable("%i fluid bake files" % tot_fluid, class_style = "toggle", extra = "onclick='toggleDisplay(".fluid", "none", "table-row")'")
|
||||
for file in job.files:
|
||||
if file.filepath.endswith(".bobj.gz") or file.filepath.endswith(".bvel.gz"):
|
||||
rowTable(os.path.split(file.filepath)[1], class_style = "fluid")
|
||||
|
||||
endTable()
|
||||
|
||||
output("<h2>Blacklist</h2>")
|
||||
|
||||
if job.blacklist:
|
||||
startTable()
|
||||
headerTable("name", "address")
|
||||
|
||||
for slave_id in job.blacklist:
|
||||
slave = handler.server.slaves_map[slave_id]
|
||||
rowTable(slave.name, slave.address[0])
|
||||
|
||||
endTable()
|
||||
else:
|
||||
output("<i>Empty</i>")
|
||||
|
||||
output("<h2>Frames</h2>")
|
||||
|
||||
startTable()
|
||||
headerTable("no", "status", "render time", "slave", "log")
|
||||
headerTable("no", "status", "render time", "slave", "log", "result")
|
||||
|
||||
for frame in job.frames:
|
||||
rowTable(frame.number, frame.statusText(), "%.1fs" % frame.time, frame.slave.name if frame.slave else " ", link("view log", "/html/log%s_%i" % (job_id, frame.number)) if frame.log_path else " ")
|
||||
rowTable(frame.number, frame.statusText(), "%.1fs" % frame.time, frame.slave.name if frame.slave else " ", link("view log", logURL(job_id, frame.number)) if frame.log_path else " ", link("view result", renderURL(job_id, frame.number)) if frame.status == DONE else " ")
|
||||
|
||||
endTable()
|
||||
else:
|
||||
output("no such job")
|
||||
|
||||
output("</body></html>")
|
||||
|
||||
elif handler.path.startswith("/html/log"):
|
||||
handler.send_head(content = "text/plain")
|
||||
pattern = re.compile("([a-zA-Z0-9]+)_([0-9]+)")
|
||||
|
||||
match = pattern.match(handler.path[9:])
|
||||
if match:
|
||||
job_id = match.groups()[0]
|
||||
frame_number = int(match.groups()[1])
|
||||
|
||||
job = handler.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
frame = job[frame_number]
|
||||
|
||||
if frame:
|
||||
f = open(frame.log_path, 'rb')
|
||||
|
||||
shutil.copyfileobj(f, handler.wfile)
|
||||
|
||||
f.close()
|
||||
else:
|
||||
output("no such frame")
|
||||
else:
|
||||
output("no such job")
|
||||
else:
|
||||
output("malformed url")
|
||||
|
||||
|
||||
@@ -23,13 +23,15 @@ import subprocess, shutil, time, hashlib
|
||||
from netrender.utils import *
|
||||
|
||||
class LogFile:
|
||||
def __init__(self, job_id = 0, frames = []):
|
||||
def __init__(self, job_id = 0, slave_id = 0, frames = []):
|
||||
self.job_id = job_id
|
||||
self.slave_id = slave_id
|
||||
self.frames = frames
|
||||
|
||||
def serialize(self):
|
||||
return {
|
||||
"job_id": self.job_id,
|
||||
"slave_id": self.slave_id,
|
||||
"frames": self.frames
|
||||
}
|
||||
|
||||
@@ -40,6 +42,7 @@ class LogFile:
|
||||
|
||||
logfile = LogFile()
|
||||
logfile.job_id = data["job_id"]
|
||||
logfile.slave_id = data["slave_id"]
|
||||
logfile.frames = data["frames"]
|
||||
|
||||
return logfile
|
||||
@@ -68,27 +71,28 @@ class RenderSlave:
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def materialize(data):
|
||||
def materialize(data, cache = True):
|
||||
if not data:
|
||||
return None
|
||||
|
||||
slave_id = data["id"]
|
||||
|
||||
if slave_id in RenderSlave._slave_map:
|
||||
|
||||
if cache and slave_id in RenderSlave._slave_map:
|
||||
return RenderSlave._slave_map[slave_id]
|
||||
else:
|
||||
slave = RenderSlave()
|
||||
slave.id = slave_id
|
||||
slave.name = data["name"]
|
||||
slave.address = data["address"]
|
||||
slave.stats = data["stats"]
|
||||
slave.total_done = data["total_done"]
|
||||
slave.total_error = data["total_error"]
|
||||
slave.last_seen = data["last_seen"]
|
||||
|
||||
|
||||
slave = RenderSlave()
|
||||
slave.id = slave_id
|
||||
slave.name = data["name"]
|
||||
slave.address = data["address"]
|
||||
slave.stats = data["stats"]
|
||||
slave.total_done = data["total_done"]
|
||||
slave.total_error = data["total_error"]
|
||||
slave.last_seen = data["last_seen"]
|
||||
|
||||
if cache:
|
||||
RenderSlave._slave_map[slave_id] = slave
|
||||
|
||||
return slave
|
||||
return slave
|
||||
|
||||
JOB_BLENDER = 1
|
||||
JOB_PROCESS = 2
|
||||
@@ -98,21 +102,58 @@ JOB_TYPES = {
|
||||
JOB_PROCESS: "Process"
|
||||
}
|
||||
|
||||
class RenderFile:
|
||||
def __init__(self, filepath = "", index = 0, start = -1, end = -1):
|
||||
self.filepath = filepath
|
||||
self.index = index
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
def serialize(self):
|
||||
return {
|
||||
"filepath": self.filepath,
|
||||
"index": self.index,
|
||||
"start": self.start,
|
||||
"end": self.end
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def materialize(data):
|
||||
if not data:
|
||||
return None
|
||||
|
||||
rfile = RenderFile(data["filepath"], data["index"], data["start"], data["end"])
|
||||
|
||||
return rfile
|
||||
|
||||
class RenderJob:
|
||||
def __init__(self):
|
||||
def __init__(self, job_info = None):
|
||||
self.id = ""
|
||||
self.type = JOB_BLENDER
|
||||
self.name = ""
|
||||
self.category = "None"
|
||||
self.status = JOB_WAITING
|
||||
self.files = []
|
||||
self.frames = []
|
||||
self.chunks = 0
|
||||
self.priority = 0
|
||||
self.usage = 0.0
|
||||
self.blacklist = []
|
||||
|
||||
self.usage = 0.0
|
||||
self.last_dispatched = 0.0
|
||||
|
||||
self.frames = []
|
||||
|
||||
if job_info:
|
||||
self.type = job_info.type
|
||||
self.name = job_info.name
|
||||
self.category = job_info.category
|
||||
self.status = job_info.status
|
||||
self.files = job_info.files
|
||||
self.chunks = job_info.chunks
|
||||
self.priority = job_info.priority
|
||||
self.blacklist = job_info.blacklist
|
||||
|
||||
def addFile(self, file_path, start=-1, end=-1):
|
||||
self.files.append((file_path, start, end))
|
||||
self.files.append(RenderFile(file_path, len(self.files), start, end))
|
||||
|
||||
def addFrame(self, frame_number, command = ""):
|
||||
frame = RenderFrame(frame_number, command)
|
||||
@@ -133,6 +174,9 @@ class RenderJob:
|
||||
def countSlaves(self):
|
||||
return len(set((frame.slave for frame in self.frames if frame.status == DISPATCHED)))
|
||||
|
||||
def statusText(self):
|
||||
return JOB_STATUS_TEXT[self.status]
|
||||
|
||||
def framesStatus(self):
|
||||
results = {
|
||||
QUEUED: 0,
|
||||
@@ -167,7 +211,9 @@ class RenderJob:
|
||||
"id": self.id,
|
||||
"type": self.type,
|
||||
"name": self.name,
|
||||
"files": [f for f in self.files if f[1] == -1 or not frames or (f[1] <= max_frame and f[2] >= min_frame)],
|
||||
"category": self.category,
|
||||
"status": self.status,
|
||||
"files": [f.serialize() for f in self.files if f.start == -1 or not frames or (f.start <= max_frame and f.end >= min_frame)],
|
||||
"frames": [f.serialize() for f in self.frames if not frames or f in frames],
|
||||
"chunks": self.chunks,
|
||||
"priority": self.priority,
|
||||
@@ -185,7 +231,9 @@ class RenderJob:
|
||||
job.id = data["id"]
|
||||
job.type = data["type"]
|
||||
job.name = data["name"]
|
||||
job.files = data["files"]
|
||||
job.category = data["category"]
|
||||
job.status = data["status"]
|
||||
job.files = [RenderFile.materialize(f) for f in data["files"]]
|
||||
job.frames = [RenderFrame.materialize(f) for f in data["frames"]]
|
||||
job.chunks = data["chunks"]
|
||||
job.priority = data["priority"]
|
||||
@@ -204,7 +252,7 @@ class RenderFrame:
|
||||
self.command = command
|
||||
|
||||
def statusText(self):
|
||||
return STATUS_TEXT[self.status]
|
||||
return FRAME_STATUS_TEXT[self.status]
|
||||
|
||||
def serialize(self):
|
||||
return {
|
||||
|
||||
75
release/scripts/io/netrender/netrender.css
Normal file
75
release/scripts/io/netrender/netrender.css
Normal file
@@ -0,0 +1,75 @@
|
||||
body {
|
||||
background-color:#eee;
|
||||
font-size:12px;
|
||||
font-family: "Lucida Sans","Lucida Sans Unicode","Lucida Grande",Lucida,sans-serif;
|
||||
|
||||
}
|
||||
a {
|
||||
/*text-decoration:none;*/
|
||||
color:#666;
|
||||
}
|
||||
a:hover {
|
||||
color:#000;
|
||||
}
|
||||
h2 {
|
||||
background-color:#ddd;
|
||||
font-size:120%;
|
||||
padding:5px;
|
||||
}
|
||||
|
||||
h2 {
|
||||
background-color:#ddd;
|
||||
font-size:110%;
|
||||
padding:5px;
|
||||
}
|
||||
|
||||
table {
|
||||
text-align:center;
|
||||
border:0;
|
||||
background-color:#ddd;
|
||||
padding: 0px;
|
||||
margin: 0px;
|
||||
}
|
||||
thead{
|
||||
font-size:90%;
|
||||
color:#555;
|
||||
background-color:#ccc;
|
||||
}
|
||||
td {
|
||||
border:0;
|
||||
padding:2px;
|
||||
padding-left:10px;
|
||||
padding-right:10px;
|
||||
margin-left:20px;
|
||||
background-color:#ddd;
|
||||
}
|
||||
td:hover {
|
||||
background-color:#ccc;
|
||||
}
|
||||
tr {
|
||||
border:0;
|
||||
}
|
||||
button {
|
||||
color: #111;
|
||||
width: auto;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.toggle {
|
||||
text-decoration: underline;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
|
||||
.cache {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.fluid {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.rules {
|
||||
width: 60em;
|
||||
text-align: left;
|
||||
}
|
||||
43
release/scripts/io/netrender/netrender.js
Normal file
43
release/scripts/io/netrender/netrender.js
Normal file
@@ -0,0 +1,43 @@
|
||||
function request(url, data)
|
||||
{
|
||||
xmlhttp = new XMLHttpRequest();
|
||||
xmlhttp.open("POST", url, false);
|
||||
xmlhttp.send(data);
|
||||
window.location.reload()
|
||||
}
|
||||
|
||||
function edit(id, info)
|
||||
{
|
||||
request("/edit_" + id, info)
|
||||
}
|
||||
|
||||
function returnObjById( id )
|
||||
{
|
||||
if (document.getElementById)
|
||||
var returnVar = document.getElementById(id);
|
||||
else if (document.all)
|
||||
var returnVar = document.all[id];
|
||||
else if (document.layers)
|
||||
var returnVar = document.layers[id];
|
||||
return returnVar;
|
||||
}
|
||||
|
||||
function toggleDisplay( className, value1, value2 )
|
||||
{
|
||||
style = getStyle(className)
|
||||
|
||||
if (style.style["display"] == value1) {
|
||||
style.style["display"] = value2;
|
||||
} else {
|
||||
style.style["display"] = value1;
|
||||
}
|
||||
}
|
||||
|
||||
function getStyle(className) {
|
||||
var classes = document.styleSheets[0].rules || document.styleSheets[0].cssRules
|
||||
for(var x=0;x<classes.length;x++) {
|
||||
if(classes[x].selectorText==className) {
|
||||
return classes[x];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,64 @@ from netrender.utils import *
|
||||
import netrender.client as client
|
||||
import netrender.model
|
||||
|
||||
@rnaOperator
|
||||
class RENDER_OT_netslave_bake(bpy.types.Operator):
|
||||
'''NEED DESCRIPTION'''
|
||||
bl_idname = "render.netslavebake"
|
||||
bl_label = "Bake all in file"
|
||||
|
||||
def poll(self, context):
|
||||
return True
|
||||
|
||||
def execute(self, context):
|
||||
scene = context.scene
|
||||
netsettings = scene.network_render
|
||||
|
||||
filename = bpy.data.filename
|
||||
path, name = os.path.split(filename)
|
||||
root, ext = os.path.splitext(name)
|
||||
default_path = path + os.sep + "blendcache_" + root + os.sep # need an API call for that
|
||||
relative_path = os.sep + os.sep + "blendcache_" + root + os.sep
|
||||
|
||||
# Force all point cache next to the blend file
|
||||
for object in bpy.data.objects:
|
||||
for modifier in object.modifiers:
|
||||
if modifier.type == 'FLUID_SIMULATION' and modifier.settings.type == "DOMAIN":
|
||||
modifier.settings.path = relative_path
|
||||
bpy.ops.fluid.bake({"active_object": object, "scene": scene})
|
||||
elif modifier.type == "CLOTH":
|
||||
modifier.point_cache.step = 1
|
||||
modifier.point_cache.disk_cache = True
|
||||
modifier.point_cache.external = False
|
||||
elif modifier.type == "SOFT_BODY":
|
||||
modifier.point_cache.step = 1
|
||||
modifier.point_cache.disk_cache = True
|
||||
modifier.point_cache.external = False
|
||||
elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
|
||||
modifier.domain_settings.point_cache_low.step = 1
|
||||
modifier.domain_settings.point_cache_low.disk_cache = True
|
||||
modifier.domain_settings.point_cache_low.external = False
|
||||
modifier.domain_settings.point_cache_high.step = 1
|
||||
modifier.domain_settings.point_cache_high.disk_cache = True
|
||||
modifier.domain_settings.point_cache_high.external = False
|
||||
|
||||
# particles modifier are stupid and don't contain data
|
||||
# we have to go through the object property
|
||||
for psys in object.particle_systems:
|
||||
psys.point_cache.step = 1
|
||||
psys.point_cache.disk_cache = True
|
||||
psys.point_cache.external = False
|
||||
psys.point_cache.filepath = relative_path
|
||||
|
||||
bpy.ops.ptcache.bake_all()
|
||||
|
||||
#bpy.ops.wm.save_mainfile(path = path + os.sep + root + "_baked.blend")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
|
||||
@rnaOperator
|
||||
class RENDER_OT_netclientanim(bpy.types.Operator):
|
||||
'''Start rendering an animation on network'''
|
||||
@@ -39,8 +97,8 @@ class RENDER_OT_netclientanim(bpy.types.Operator):
|
||||
scene = context.scene
|
||||
netsettings = scene.network_render
|
||||
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port)
|
||||
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||
|
||||
if conn:
|
||||
# Sending file
|
||||
scene.network_render.job_id = client.clientSendJob(conn, scene, True)
|
||||
@@ -48,7 +106,7 @@ class RENDER_OT_netclientanim(bpy.types.Operator):
|
||||
|
||||
bpy.ops.screen.render('INVOKE_AREA', animation=True)
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
@@ -66,14 +124,19 @@ class RENDER_OT_netclientsend(bpy.types.Operator):
|
||||
scene = context.scene
|
||||
netsettings = scene.network_render
|
||||
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port)
|
||||
try:
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||
|
||||
if conn:
|
||||
# Sending file
|
||||
scene.network_render.job_id = client.clientSendJob(conn, scene, True)
|
||||
conn.close()
|
||||
self.report('INFO', "Job sent to master")
|
||||
except Exception as err:
|
||||
self.report('ERROR', str(err))
|
||||
|
||||
if conn:
|
||||
# Sending file
|
||||
scene.network_render.job_id = client.clientSendJob(conn, scene, True)
|
||||
conn.close()
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
@@ -89,7 +152,7 @@ class RENDER_OT_netclientstatus(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
netsettings = context.scene.network_render
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port)
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||
|
||||
if conn:
|
||||
conn.request("GET", "/status")
|
||||
@@ -113,7 +176,7 @@ class RENDER_OT_netclientstatus(bpy.types.Operator):
|
||||
|
||||
job.name = j.name
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
@@ -143,7 +206,7 @@ class RENDER_OT_netclientblacklistslave(bpy.types.Operator):
|
||||
netsettings.slaves.remove(netsettings.active_slave_index)
|
||||
netsettings.active_slave_index = -1
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
@@ -173,7 +236,7 @@ class RENDER_OT_netclientwhitelistslave(bpy.types.Operator):
|
||||
netsettings.slaves_blacklist.remove(netsettings.active_blacklisted_slave_index)
|
||||
netsettings.active_blacklisted_slave_index = -1
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
@@ -190,7 +253,7 @@ class RENDER_OT_netclientslaves(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
netsettings = context.scene.network_render
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port)
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||
|
||||
if conn:
|
||||
conn.request("GET", "/slaves")
|
||||
@@ -219,7 +282,7 @@ class RENDER_OT_netclientslaves(bpy.types.Operator):
|
||||
slave = netsettings.slaves[-1]
|
||||
slave.name = s.name
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
@@ -236,19 +299,19 @@ class RENDER_OT_netclientcancel(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
netsettings = context.scene.network_render
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port)
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||
|
||||
if conn:
|
||||
job = netrender.jobs[netsettings.active_job_index]
|
||||
|
||||
conn.request("POST", "/cancel", headers={"job-id":job.id})
|
||||
conn.request("POST", cancelURL(job.id))
|
||||
|
||||
response = conn.getresponse()
|
||||
print( response.status, response.reason )
|
||||
|
||||
netsettings.jobs.remove(netsettings.active_job_index)
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
@@ -264,7 +327,7 @@ class RENDER_OT_netclientcancelall(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
netsettings = context.scene.network_render
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port)
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||
|
||||
if conn:
|
||||
conn.request("POST", "/clear")
|
||||
@@ -275,7 +338,7 @@ class RENDER_OT_netclientcancelall(bpy.types.Operator):
|
||||
while(len(netsettings.jobs) > 0):
|
||||
netsettings.jobs.remove(0)
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
@@ -294,7 +357,7 @@ class netclientdownload(bpy.types.Operator):
|
||||
netsettings = context.scene.network_render
|
||||
rd = context.scene.render_data
|
||||
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port)
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||
|
||||
if conn:
|
||||
job = netrender.jobs[netsettings.active_job_index]
|
||||
@@ -320,7 +383,7 @@ class netclientdownload(bpy.types.Operator):
|
||||
|
||||
conn.close()
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
@@ -335,7 +398,7 @@ class netclientscan(bpy.types.Operator):
|
||||
return True
|
||||
|
||||
def execute(self, context):
|
||||
address, port = clientScan()
|
||||
address, port = clientScan(self.report)
|
||||
|
||||
if address:
|
||||
scene = context.scene
|
||||
@@ -343,7 +406,7 @@ class netclientscan(bpy.types.Operator):
|
||||
netsettings.server_address = address
|
||||
netsettings.server_port = port
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
@@ -362,14 +425,14 @@ class netclientweb(bpy.types.Operator):
|
||||
|
||||
|
||||
# open connection to make sure server exists
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port)
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
webbrowser.open("http://%s:%i" % (netsettings.server_address, netsettings.server_port))
|
||||
|
||||
return ('FINISHED',)
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
|
||||
@@ -54,20 +54,19 @@ def slave_Info():
|
||||
|
||||
def testCancel(conn, job_id, frame_number):
|
||||
conn.request("HEAD", "/status", headers={"job-id":job_id, "job-frame": str(frame_number)})
|
||||
response = conn.getresponse()
|
||||
|
||||
|
||||
# cancelled if job isn't found anymore
|
||||
if response.status == http.client.NO_CONTENT:
|
||||
if conn.getresponse().status == http.client.NO_CONTENT:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def testFile(conn, job_id, slave_id, JOB_PREFIX, file_path, main_path = None):
|
||||
def testFile(conn, job_id, slave_id, file_index, JOB_PREFIX, file_path, main_path = None):
|
||||
job_full_path = prefixPath(JOB_PREFIX, file_path, main_path)
|
||||
|
||||
if not os.path.exists(job_full_path):
|
||||
temp_path = JOB_PREFIX + "slave.temp.blend"
|
||||
conn.request("GET", "/file", headers={"job-id": job_id, "slave-id":slave_id, "job-file":file_path})
|
||||
conn.request("GET", fileURL(job_id, file_index), headers={"slave-id":slave_id})
|
||||
response = conn.getresponse()
|
||||
|
||||
if response.status != http.client.OK:
|
||||
@@ -86,7 +85,6 @@ def testFile(conn, job_id, slave_id, JOB_PREFIX, file_path, main_path = None):
|
||||
|
||||
return job_full_path
|
||||
|
||||
|
||||
def render_slave(engine, netsettings):
|
||||
timeout = 1
|
||||
|
||||
@@ -120,21 +118,21 @@ def render_slave(engine, netsettings):
|
||||
|
||||
|
||||
if job.type == netrender.model.JOB_BLENDER:
|
||||
job_path = job.files[0][0] # data in files have format (path, start, end)
|
||||
job_path = job.files[0].filepath # path of main file
|
||||
main_path, main_file = os.path.split(job_path)
|
||||
|
||||
job_full_path = testFile(conn, job.id, slave_id, JOB_PREFIX, job_path)
|
||||
job_full_path = testFile(conn, job.id, slave_id, 0, JOB_PREFIX, job_path)
|
||||
print("Fullpath", job_full_path)
|
||||
print("File:", main_file, "and %i other files" % (len(job.files) - 1,))
|
||||
engine.update_stats("", "Render File", main_file, "for job", job.id)
|
||||
engine.update_stats("", "Render File "+ main_file+ " for job "+ job.id)
|
||||
|
||||
for file_path, start, end in job.files[1:]:
|
||||
print("\t", file_path)
|
||||
testFile(conn, job.id, slave_id, JOB_PREFIX, file_path, main_path)
|
||||
for rfile in job.files[1:]:
|
||||
print("\t", rfile.filepath)
|
||||
testFile(conn, job.id, slave_id, rfile.index, JOB_PREFIX, rfile.filepath, main_path)
|
||||
|
||||
# announce log to master
|
||||
logfile = netrender.model.LogFile(job.id, [frame.number for frame in job.frames])
|
||||
conn.request("POST", "/log", bytes(repr(logfile.serialize()), encoding='utf8'), headers={"slave-id":slave_id})
|
||||
logfile = netrender.model.LogFile(job.id, slave_id, [frame.number for frame in job.frames])
|
||||
conn.request("POST", "/log", bytes(repr(logfile.serialize()), encoding='utf8'))
|
||||
response = conn.getresponse()
|
||||
|
||||
|
||||
@@ -151,7 +149,7 @@ def render_slave(engine, netsettings):
|
||||
frame_args += ["-f", str(frame.number)]
|
||||
|
||||
val = SetErrorMode()
|
||||
process = subprocess.Popen([BLENDER_PATH, "-b", job_full_path, "-o", JOB_PREFIX + "######", "-E", "BLENDER_RENDER", "-F", "MULTILAYER"] + frame_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
process = subprocess.Popen([BLENDER_PATH, "-b", "-noaudio", job_full_path, "-o", JOB_PREFIX + "######", "-E", "BLENDER_RENDER", "-F", "MULTILAYER"] + frame_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
RestoreErrorMode(val)
|
||||
elif job.type == netrender.model.JOB_PROCESS:
|
||||
command = job.frames[0].command
|
||||
@@ -159,7 +157,7 @@ def render_slave(engine, netsettings):
|
||||
process = subprocess.Popen(command.split(" "), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
RestoreErrorMode(val)
|
||||
|
||||
headers = {"job-id":job.id, "slave-id":slave_id}
|
||||
headers = {"slave-id":slave_id}
|
||||
|
||||
cancelled = False
|
||||
stdout = bytes()
|
||||
@@ -173,8 +171,7 @@ def render_slave(engine, netsettings):
|
||||
# update logs if needed
|
||||
if stdout:
|
||||
# (only need to update on one frame, they are linked
|
||||
headers["job-frame"] = str(first_frame)
|
||||
conn.request("PUT", "/log", stdout, headers=headers)
|
||||
conn.request("PUT", logURL(job.id, first_frame), stdout, headers=headers)
|
||||
response = conn.getresponse()
|
||||
|
||||
stdout = bytes()
|
||||
@@ -203,12 +200,13 @@ def render_slave(engine, netsettings):
|
||||
# flush the rest of the logs
|
||||
if stdout:
|
||||
# (only need to update on one frame, they are linked
|
||||
headers["job-frame"] = str(first_frame)
|
||||
conn.request("PUT", "/log", stdout, headers=headers)
|
||||
response = conn.getresponse()
|
||||
conn.request("PUT", logURL(job.id, first_frame), stdout, headers=headers)
|
||||
if conn.getresponse().status == http.client.NO_CONTENT:
|
||||
continue
|
||||
|
||||
headers = {"job-id":job.id, "slave-id":slave_id, "job-time":str(avg_t)}
|
||||
|
||||
|
||||
if status == 0: # non zero status is error
|
||||
headers["job-result"] = str(DONE)
|
||||
for frame in job.frames:
|
||||
@@ -219,17 +217,20 @@ def render_slave(engine, netsettings):
|
||||
f = open(JOB_PREFIX + "%06d" % frame.number + ".exr", 'rb')
|
||||
conn.request("PUT", "/render", f, headers=headers)
|
||||
f.close()
|
||||
response = conn.getresponse()
|
||||
if conn.getresponse().status == http.client.NO_CONTENT:
|
||||
continue
|
||||
elif job.type == netrender.model.JOB_PROCESS:
|
||||
conn.request("PUT", "/render", headers=headers)
|
||||
response = conn.getresponse()
|
||||
if conn.getresponse().status == http.client.NO_CONTENT:
|
||||
continue
|
||||
else:
|
||||
headers["job-result"] = str(ERROR)
|
||||
for frame in job.frames:
|
||||
headers["job-frame"] = str(frame.number)
|
||||
# send error result back to server
|
||||
conn.request("PUT", "/render", headers=headers)
|
||||
response = conn.getresponse()
|
||||
if conn.getresponse().status == http.client.NO_CONTENT:
|
||||
continue
|
||||
else:
|
||||
if timeout < MAX_TIMEOUT:
|
||||
timeout += INCREMENT_TIMEOUT
|
||||
@@ -243,4 +244,4 @@ def render_slave(engine, netsettings):
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
pass
|
||||
pass
|
||||
|
||||
@@ -50,7 +50,7 @@ class RenderButtonsPanel(bpy.types.Panel):
|
||||
@rnaType
|
||||
class RENDER_PT_network_settings(RenderButtonsPanel):
|
||||
bl_label = "Network Settings"
|
||||
COMPAT_ENGINES = set(['NET_RENDER'])
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@@ -63,6 +63,11 @@ class RENDER_PT_network_settings(RenderButtonsPanel):
|
||||
split = layout.split()
|
||||
|
||||
col = split.column()
|
||||
|
||||
|
||||
if scene.network_render.mode in ("RENDER_MASTER", "RENDER_SLAVE"):
|
||||
col.operator("screen.render", text="Start", icon='PLAY').animation = True
|
||||
|
||||
col.prop(scene.network_render, "mode")
|
||||
col.prop(scene.network_render, "path")
|
||||
col.prop(scene.network_render, "server_address")
|
||||
@@ -71,16 +76,17 @@ class RENDER_PT_network_settings(RenderButtonsPanel):
|
||||
if scene.network_render.mode == "RENDER_MASTER":
|
||||
col.prop(scene.network_render, "server_broadcast")
|
||||
else:
|
||||
col.operator("render.netclientscan", icon="ICON_FILE_REFRESH", text="")
|
||||
col.operator("render.netclientscan", icon='FILE_REFRESH', text="")
|
||||
|
||||
@rnaType
|
||||
class RENDER_PT_network_job(RenderButtonsPanel):
|
||||
bl_label = "Job Settings"
|
||||
COMPAT_ENGINES = set(['NET_RENDER'])
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
def poll(self, context):
|
||||
scene = context.scene
|
||||
return super().poll(context) and scene.network_render.mode == "RENDER_CLIENT"
|
||||
return (super().poll(context)
|
||||
and scene.network_render.mode == "RENDER_CLIENT")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@@ -93,10 +99,14 @@ class RENDER_PT_network_job(RenderButtonsPanel):
|
||||
split = layout.split()
|
||||
|
||||
col = split.column()
|
||||
col.operator("render.netclientanim", icon='ICON_RENDER_ANIMATION')
|
||||
col.operator("render.netclientsend", icon="ICON_FILE_BLEND")
|
||||
col.operator("render.netclientweb", icon="ICON_QUESTION")
|
||||
if scene.network_render.server_address != "[default]":
|
||||
col.operator("render.netclientanim", icon='RENDER_ANIMATION')
|
||||
col.operator("render.netclientsend", icon='FILE_BLEND')
|
||||
if scene.network_render.job_id:
|
||||
col.operator("screen.render", text="Get Results", icon='RENDER_ANIMATION').animation = True
|
||||
col.operator("render.netclientweb", icon='QUESTION')
|
||||
col.prop(scene.network_render, "job_name")
|
||||
col.prop(scene.network_render, "job_category")
|
||||
row = col.row()
|
||||
row.prop(scene.network_render, "priority")
|
||||
row.prop(scene.network_render, "chunks")
|
||||
@@ -104,11 +114,13 @@ class RENDER_PT_network_job(RenderButtonsPanel):
|
||||
@rnaType
|
||||
class RENDER_PT_network_slaves(RenderButtonsPanel):
|
||||
bl_label = "Slaves Status"
|
||||
COMPAT_ENGINES = set(['NET_RENDER'])
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
def poll(self, context):
|
||||
scene = context.scene
|
||||
return super().poll(context) and scene.network_render.mode == "RENDER_CLIENT"
|
||||
return (super().poll(context)
|
||||
and scene.network_render.mode == "RENDER_CLIENT"
|
||||
and scene.network_render.server_address != "[default]")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@@ -120,8 +132,8 @@ class RENDER_PT_network_slaves(RenderButtonsPanel):
|
||||
row.template_list(netsettings, "slaves", netsettings, "active_slave_index", rows=2)
|
||||
|
||||
sub = row.column(align=True)
|
||||
sub.operator("render.netclientslaves", icon="ICON_FILE_REFRESH", text="")
|
||||
sub.operator("render.netclientblacklistslave", icon="ICON_ZOOMOUT", text="")
|
||||
sub.operator("render.netclientslaves", icon='FILE_REFRESH', text="")
|
||||
sub.operator("render.netclientblacklistslave", icon='ZOOMOUT', text="")
|
||||
|
||||
if len(netrender.slaves) == 0 and len(netsettings.slaves) > 0:
|
||||
while(len(netsettings.slaves) > 0):
|
||||
@@ -140,11 +152,13 @@ class RENDER_PT_network_slaves(RenderButtonsPanel):
|
||||
@rnaType
|
||||
class RENDER_PT_network_slaves_blacklist(RenderButtonsPanel):
|
||||
bl_label = "Slaves Blacklist"
|
||||
COMPAT_ENGINES = set(['NET_RENDER'])
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
def poll(self, context):
|
||||
scene = context.scene
|
||||
return super().poll(context) and scene.network_render.mode == "RENDER_CLIENT"
|
||||
return (super().poll(context)
|
||||
and scene.network_render.mode == "RENDER_CLIENT"
|
||||
and scene.network_render.server_address != "[default]")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@@ -156,7 +170,7 @@ class RENDER_PT_network_slaves_blacklist(RenderButtonsPanel):
|
||||
row.template_list(netsettings, "slaves_blacklist", netsettings, "active_blacklisted_slave_index", rows=2)
|
||||
|
||||
sub = row.column(align=True)
|
||||
sub.operator("render.netclientwhitelistslave", icon="ICON_ZOOMOUT", text="")
|
||||
sub.operator("render.netclientwhitelistslave", icon='ZOOMOUT', text="")
|
||||
|
||||
if len(netrender.blacklist) == 0 and len(netsettings.slaves_blacklist) > 0:
|
||||
while(len(netsettings.slaves_blacklist) > 0):
|
||||
@@ -175,11 +189,13 @@ class RENDER_PT_network_slaves_blacklist(RenderButtonsPanel):
|
||||
@rnaType
|
||||
class RENDER_PT_network_jobs(RenderButtonsPanel):
|
||||
bl_label = "Jobs"
|
||||
COMPAT_ENGINES = set(['NET_RENDER'])
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
def poll(self, context):
|
||||
scene = context.scene
|
||||
return super().poll(context) and scene.network_render.mode == "RENDER_CLIENT"
|
||||
return (super().poll(context)
|
||||
and scene.network_render.mode == "RENDER_CLIENT"
|
||||
and scene.network_render.server_address != "[default]")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@@ -191,10 +207,10 @@ class RENDER_PT_network_jobs(RenderButtonsPanel):
|
||||
row.template_list(netsettings, "jobs", netsettings, "active_job_index", rows=2)
|
||||
|
||||
sub = row.column(align=True)
|
||||
sub.operator("render.netclientstatus", icon="ICON_FILE_REFRESH", text="")
|
||||
sub.operator("render.netclientcancel", icon="ICON_ZOOMOUT", text="")
|
||||
sub.operator("render.netclientcancelall", icon="ICON_PANEL_CLOSE", text="")
|
||||
sub.operator("render.netclientdownload", icon='ICON_RENDER_ANIMATION', text="")
|
||||
sub.operator("render.netclientstatus", icon='FILE_REFRESH', text="")
|
||||
sub.operator("render.netclientcancel", icon='ZOOMOUT', text="")
|
||||
sub.operator("render.netclientcancelall", icon='PANEL_CLOSE', text="")
|
||||
sub.operator("render.netclientdownload", icon='RENDER_ANIMATION', text="")
|
||||
|
||||
if len(netrender.jobs) == 0 and len(netsettings.jobs) > 0:
|
||||
while(len(netsettings.jobs) > 0):
|
||||
@@ -264,6 +280,12 @@ NetRenderSettings.StringProperty( attr="job_name",
|
||||
maxlen = 128,
|
||||
default = "[default]")
|
||||
|
||||
NetRenderSettings.StringProperty( attr="job_category",
|
||||
name="Job category",
|
||||
description="Category of the job",
|
||||
maxlen = 128,
|
||||
default = "")
|
||||
|
||||
NetRenderSettings.IntProperty( attr="chunks",
|
||||
name="Chunks",
|
||||
description="Number of frame to dispatch to each slave in one chunk",
|
||||
|
||||
@@ -28,7 +28,7 @@ try:
|
||||
except:
|
||||
bpy = None
|
||||
|
||||
VERSION = b"0.5"
|
||||
VERSION = bytes("0.7", encoding='utf8')
|
||||
|
||||
# Jobs status
|
||||
JOB_WAITING = 0 # before all data has been entered
|
||||
@@ -36,13 +36,21 @@ JOB_PAUSED = 1 # paused by user
|
||||
JOB_FINISHED = 2 # finished rendering
|
||||
JOB_QUEUED = 3 # ready to be dispatched
|
||||
|
||||
JOB_STATUS_TEXT = {
|
||||
JOB_WAITING: "Waiting",
|
||||
JOB_PAUSED: "Paused",
|
||||
JOB_FINISHED: "Finished",
|
||||
JOB_QUEUED: "Queued"
|
||||
}
|
||||
|
||||
|
||||
# Frames status
|
||||
QUEUED = 0
|
||||
DISPATCHED = 1
|
||||
DONE = 2
|
||||
ERROR = 3
|
||||
|
||||
STATUS_TEXT = {
|
||||
FRAME_STATUS_TEXT = {
|
||||
QUEUED: "Queued",
|
||||
DISPATCHED: "Dispatched",
|
||||
DONE: "Done",
|
||||
@@ -54,43 +62,69 @@ def rnaType(rna_type):
|
||||
return rna_type
|
||||
|
||||
def rnaOperator(rna_op):
|
||||
if bpy: bpy.ops.add(rna_op)
|
||||
if bpy: bpy.types.register(rna_op)
|
||||
return rna_op
|
||||
|
||||
def clientScan():
|
||||
try:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
|
||||
s.settimeout(30)
|
||||
def reporting(report, message, errorType = None):
|
||||
if errorType:
|
||||
t = 'ERROR'
|
||||
else:
|
||||
t = 'INFO'
|
||||
|
||||
if report:
|
||||
report(t, message)
|
||||
return None
|
||||
elif errorType:
|
||||
raise errorType(message)
|
||||
else:
|
||||
return None
|
||||
|
||||
s.bind(('', 8000))
|
||||
|
||||
buf, address = s.recvfrom(64)
|
||||
|
||||
print("received:", buf)
|
||||
|
||||
address = address[0]
|
||||
port = int(str(buf, encoding='utf8'))
|
||||
return (address, port)
|
||||
except socket.timeout:
|
||||
print("no server info")
|
||||
return ("", 8000) # return default values
|
||||
def clientScan(report = None):
|
||||
try:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
|
||||
s.settimeout(30)
|
||||
|
||||
def clientConnection(address, port):
|
||||
if address == "[default]":
|
||||
s.bind(('', 8000))
|
||||
|
||||
buf, address = s.recvfrom(64)
|
||||
|
||||
address = address[0]
|
||||
port = int(str(buf, encoding='utf8'))
|
||||
|
||||
reporting(report, "Master server found")
|
||||
|
||||
return (address, port)
|
||||
except socket.timeout:
|
||||
reporting(report, "No master server on network", IOError)
|
||||
|
||||
return ("", 8000) # return default values
|
||||
|
||||
def clientConnection(address, port, report = None):
|
||||
if address == "[default]":
|
||||
# calling operator from python is fucked, scene isn't in context
|
||||
# if bpy:
|
||||
# bpy.ops.render.netclientscan()
|
||||
# else:
|
||||
address, port = clientScan()
|
||||
|
||||
conn = http.client.HTTPConnection(address, port)
|
||||
|
||||
if clientVerifyVersion(conn):
|
||||
return conn
|
||||
else:
|
||||
conn.close()
|
||||
return None
|
||||
address, port = clientScan()
|
||||
if address == "":
|
||||
return None
|
||||
|
||||
try:
|
||||
conn = http.client.HTTPConnection(address, port)
|
||||
|
||||
if conn:
|
||||
if clientVerifyVersion(conn):
|
||||
return conn
|
||||
else:
|
||||
conn.close()
|
||||
reporting(report, "Incorrect master version", ValueError)
|
||||
except Exception as err:
|
||||
if report:
|
||||
report('ERROR', str(err))
|
||||
return None
|
||||
else:
|
||||
raise
|
||||
|
||||
def clientVerifyVersion(conn):
|
||||
conn.request("GET", "/version")
|
||||
@@ -104,11 +138,23 @@ def clientVerifyVersion(conn):
|
||||
|
||||
if server_version != VERSION:
|
||||
print("Incorrect server version!")
|
||||
print("expected", VERSION, "received", server_version)
|
||||
print("expected", str(VERSION, encoding='utf8'), "received", str(server_version, encoding='utf8'))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def fileURL(job_id, file_index):
|
||||
return "/file_%s_%i" % (job_id, file_index)
|
||||
|
||||
def logURL(job_id, frame_number):
|
||||
return "/log_%s_%i.log" % (job_id, frame_number)
|
||||
|
||||
def renderURL(job_id, frame_number):
|
||||
return "/render_%s_%i.exr" % (job_id, frame_number)
|
||||
|
||||
def cancelURL(job_id):
|
||||
return "/cancel_%s" % (job_id)
|
||||
|
||||
def prefixPath(prefix_directory, file_path, prefix_path):
|
||||
if os.path.isabs(file_path):
|
||||
# if an absolute path, make sure path exists, if it doesn't, use relative local path
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# internal blender C module
|
||||
import _bpy
|
||||
from _bpy import types, props
|
||||
@@ -31,35 +33,52 @@ from bpy import ops as _ops_module
|
||||
# fake operator module
|
||||
ops = _ops_module.ops_fake_module
|
||||
|
||||
import sys
|
||||
DEBUG = ("-d" in sys.argv)
|
||||
|
||||
|
||||
def load_scripts(reload_scripts=False):
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
import time
|
||||
|
||||
|
||||
t_main = time.time()
|
||||
|
||||
def test_import(module_name):
|
||||
try:
|
||||
return __import__(module_name)
|
||||
t = time.time()
|
||||
ret = __import__(module_name)
|
||||
if DEBUG:
|
||||
print("time %s %.4f" % (module_name, time.time() - t))
|
||||
return ret
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return None
|
||||
|
||||
|
||||
for base_path in utils.script_paths():
|
||||
for path_subdir in ("ui", "op", "io"):
|
||||
path = os.path.join(base_path, path_subdir)
|
||||
sys.path.insert(0, path)
|
||||
for f in sorted(os.listdir(path)):
|
||||
if f.endswith(".py"):
|
||||
# python module
|
||||
mod = test_import(f[0:-3])
|
||||
elif "." not in f:
|
||||
# python package
|
||||
mod = test_import(f)
|
||||
else:
|
||||
mod = None
|
||||
if os.path.isdir(path):
|
||||
sys.path.insert(0, path)
|
||||
for f in sorted(os.listdir(path)):
|
||||
if f.endswith(".py"):
|
||||
# python module
|
||||
mod = test_import(f[0:-3])
|
||||
elif "." not in f:
|
||||
# python package
|
||||
mod = test_import(f)
|
||||
else:
|
||||
mod = None
|
||||
|
||||
if reload_scripts and mod:
|
||||
print("Reloading:", mod)
|
||||
reload(mod)
|
||||
|
||||
if DEBUG:
|
||||
print("Time %.4f" % (time.time() - t_main))
|
||||
|
||||
if reload_scripts and mod:
|
||||
print("Reloading:", mod)
|
||||
reload(mod)
|
||||
|
||||
def _main():
|
||||
|
||||
@@ -69,7 +88,8 @@ def _main():
|
||||
import sys
|
||||
sys.stdin = None
|
||||
|
||||
if "-d" in sys.argv and False: # Enable this to measure startup speed
|
||||
# if "-d" in sys.argv: # Enable this to measure startup speed
|
||||
if 0:
|
||||
import cProfile
|
||||
cProfile.run('import bpy; bpy.load_scripts()', 'blender.prof')
|
||||
|
||||
@@ -81,5 +101,3 @@ def _main():
|
||||
load_scripts()
|
||||
|
||||
_main()
|
||||
|
||||
|
||||
|
||||
@@ -21,8 +21,9 @@
|
||||
# for slightly faster access
|
||||
from _bpy import ops as ops_module
|
||||
|
||||
op_add = ops_module.add
|
||||
# op_add = ops_module.add
|
||||
op_remove = ops_module.remove
|
||||
op_add_macro = ops_module.add_macro
|
||||
op_dir = ops_module.dir
|
||||
op_call = ops_module.call
|
||||
op_as_string = ops_module.as_string
|
||||
@@ -59,6 +60,9 @@ class bpy_ops(object):
|
||||
def add(self, pyop):
|
||||
op_add(pyop)
|
||||
|
||||
def add_macro(self, pyop):
|
||||
op_add_macro(pyop)
|
||||
|
||||
def remove(self, pyop):
|
||||
op_remove(pyop)
|
||||
|
||||
@@ -130,7 +134,6 @@ class bpy_ops_submodule_op(object):
|
||||
|
||||
__keys__ = ('module', 'func')
|
||||
|
||||
|
||||
def _get_doc(self):
|
||||
return op_as_string(self.idname())
|
||||
|
||||
|
||||
@@ -16,36 +16,103 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
import os
|
||||
|
||||
|
||||
def expandpath(path):
|
||||
if path.startswith("//"):
|
||||
return os.path.join(os.path.dirname(bpy.data.filename), path[2:])
|
||||
|
||||
return path
|
||||
|
||||
|
||||
_unclean_chars = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, \
|
||||
17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, \
|
||||
35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 58, 59, 60, 61, 62, 63, \
|
||||
64, 91, 92, 93, 94, 96, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, \
|
||||
133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, \
|
||||
147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, \
|
||||
161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, \
|
||||
175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, \
|
||||
189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, \
|
||||
203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, \
|
||||
217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, \
|
||||
231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, \
|
||||
245, 246, 247, 248, 249, 250, 251, 252, 253, 254]
|
||||
|
||||
_unclean_chars = ''.join([chr(i) for i in _unclean_chars])
|
||||
|
||||
|
||||
def clean_name(name, replace="_"):
|
||||
'''
|
||||
All characters besides A-Z/a-z, 0-9 are replaced with "_"
|
||||
or the replace argumet if defined.
|
||||
'''
|
||||
for ch in _unclean_chars:
|
||||
name = name.replace(ch, replace)
|
||||
return name
|
||||
|
||||
|
||||
def display_name(name):
|
||||
'''
|
||||
Only capitalize all lowercase names, mixed case use them as is.
|
||||
should work with filenames and module names.
|
||||
'''
|
||||
name_base = os.path.splitext(name)[0]
|
||||
|
||||
# string replacements
|
||||
name_base = name_base.replace("_colon_", ":")
|
||||
|
||||
name_base = name_base.replace("_", " ")
|
||||
|
||||
if name_base.lower() == name_base:
|
||||
return ' '.join([w[0].upper() + w[1:] for w in name_base.split()])
|
||||
else:
|
||||
return name_base
|
||||
|
||||
|
||||
# base scripts
|
||||
_scripts = os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)
|
||||
_scripts = (os.path.normpath(_scripts), )
|
||||
|
||||
|
||||
def script_paths(*args):
|
||||
scripts = list(_scripts)
|
||||
|
||||
# add user scripts dir
|
||||
user_script_path = bpy.context.user_preferences.filepaths.python_scripts_directory
|
||||
|
||||
if not user_script_path:
|
||||
# XXX - WIN32 needs checking, perhaps better call a blender internal function.
|
||||
user_script_path = os.path.join(os.path.expanduser("~"), ".blender", "scripts")
|
||||
|
||||
user_script_path = os.path.normpath(user_script_path)
|
||||
|
||||
if user_script_path not in scripts and os.path.isdir(user_script_path):
|
||||
scripts.append(user_script_path)
|
||||
|
||||
if not args:
|
||||
return _scripts
|
||||
return scripts
|
||||
|
||||
subdir = os.path.join(*args)
|
||||
script_paths = []
|
||||
for path in _scripts:
|
||||
script_paths.append(os.path.join(path, subdir))
|
||||
for path in scripts:
|
||||
path_subdir = os.path.join(path, subdir)
|
||||
if os.path.isdir(path_subdir):
|
||||
script_paths.append(path_subdir)
|
||||
|
||||
return script_paths
|
||||
|
||||
|
||||
_presets = os.path.join(_scripts[0], "presets") # FIXME - multiple paths
|
||||
_presets = os.path.join(_scripts[0], "presets") # FIXME - multiple paths
|
||||
|
||||
|
||||
def preset_paths(subdir):
|
||||
'''
|
||||
Returns a list of paths for a spesific preset.
|
||||
'''
|
||||
|
||||
return (os.path.join(_presets, subdir), )
|
||||
'''
|
||||
Returns a list of paths for a spesific preset.
|
||||
'''
|
||||
|
||||
return (os.path.join(_presets, subdir), )
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
from _bpy import types as bpy_types
|
||||
|
||||
StructRNA = bpy_types.Struct.__bases__[0]
|
||||
@@ -22,6 +25,7 @@ StructRNA = bpy_types.Struct.__bases__[0]
|
||||
|
||||
|
||||
class Context(StructRNA):
|
||||
__slots__ = ()
|
||||
|
||||
def copy(self):
|
||||
new_context = {}
|
||||
@@ -34,26 +38,203 @@ class Context(StructRNA):
|
||||
|
||||
|
||||
class Object(bpy_types.ID):
|
||||
__slots__ = ()
|
||||
|
||||
def _get_children(self):
|
||||
@property
|
||||
def children(self):
|
||||
"""All the children of this object"""
|
||||
import bpy
|
||||
return [child for child in bpy.data.objects if child.parent == self]
|
||||
|
||||
children = property(_get_children)
|
||||
|
||||
class _GenericBone:
|
||||
"""
|
||||
functions for bones, common between Armature/Pose/Edit bones.
|
||||
internal subclassing use only.
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
def translate(self, vec):
|
||||
"""Utility function to add *vec* to the head and tail of this bone."""
|
||||
self.head += vec
|
||||
self.tail += vec
|
||||
|
||||
def parent_index(self, parent_test):
|
||||
"""
|
||||
The same as 'bone in other_bone.parent_recursive' but saved generating a list.
|
||||
"""
|
||||
# use the name so different types can be tested.
|
||||
name = parent_test.name
|
||||
|
||||
parent = self.parent
|
||||
i = 1
|
||||
while parent:
|
||||
if parent.name == name:
|
||||
return i
|
||||
parent = parent.parent
|
||||
i += 1
|
||||
|
||||
return 0
|
||||
|
||||
@property
|
||||
def basename(self):
|
||||
"""The name of this bone before any '.' character"""
|
||||
#return self.name.rsplit(".", 1)[0]
|
||||
return self.name.split(".")[0]
|
||||
|
||||
@property
|
||||
def parent_recursive(self):
|
||||
"""A list of parents, starting with the immediate parent"""
|
||||
parent_list = []
|
||||
parent = self.parent
|
||||
|
||||
while parent:
|
||||
if parent:
|
||||
parent_list.append(parent)
|
||||
|
||||
parent = parent.parent
|
||||
|
||||
return parent_list
|
||||
|
||||
@property
|
||||
def length(self):
|
||||
"""The distance from head to tail, when set the head is moved to fit the length."""
|
||||
return self.vector.length
|
||||
|
||||
@length.setter
|
||||
def length(self, value):
|
||||
self.tail = self.head + ((self.tail - self.head).normalize() * value)
|
||||
|
||||
@property
|
||||
def vector(self):
|
||||
"""The direction this bone is pointing. Utility function for (tail - head)"""
|
||||
return (self.tail - self.head)
|
||||
|
||||
@property
|
||||
def children(self):
|
||||
"""A list of all the bones children."""
|
||||
return [child for child in self._other_bones if child.parent == self]
|
||||
|
||||
@property
|
||||
def children_recursive(self):
|
||||
"""a list of all children from this bone."""
|
||||
bones_children = []
|
||||
for bone in self._other_bones:
|
||||
index = bone.parent_index(self)
|
||||
if index:
|
||||
bones_children.append((index, bone))
|
||||
|
||||
# sort by distance to parent
|
||||
bones_children.sort(key=lambda bone_pair: bone_pair[0])
|
||||
return [bone for index, bone in bones_children]
|
||||
|
||||
@property
|
||||
def children_recursive_basename(self):
|
||||
"""
|
||||
Returns a chain of children with the same base name as this bone
|
||||
Only direct chains are supported, forks caused by multiple children with matching basenames will
|
||||
terminate the function and not be returned.
|
||||
"""
|
||||
basename = self.basename
|
||||
chain = []
|
||||
|
||||
child = self
|
||||
while True:
|
||||
children = child.children
|
||||
children_basename = []
|
||||
|
||||
for child in children:
|
||||
if basename == child.basename:
|
||||
children_basename.append(child)
|
||||
|
||||
if len(children_basename) == 1:
|
||||
child = children_basename[0]
|
||||
chain.append(child)
|
||||
else:
|
||||
if len(children_basename):
|
||||
print("multiple basenames found, this is probably not what you want!", bone.name, children_basename)
|
||||
|
||||
break
|
||||
|
||||
return chain
|
||||
|
||||
@property
|
||||
def _other_bones(self):
|
||||
id_data = self.id_data
|
||||
id_data_type = type(id_data)
|
||||
|
||||
if id_data_type == bpy_types.Object:
|
||||
bones = id_data.pose.bones
|
||||
elif id_data_type == bpy_types.Armature:
|
||||
bones = id_data.edit_bones
|
||||
if not bones: # not in editmode
|
||||
bones = id_data.bones
|
||||
|
||||
return bones
|
||||
|
||||
|
||||
def ord_ind(i1,i2):
|
||||
if i1<i2: return i1,i2
|
||||
return i2,i1
|
||||
class PoseBone(StructRNA, _GenericBone):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class Bone(StructRNA, _GenericBone):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class EditBone(StructRNA, _GenericBone):
|
||||
__slots__ = ()
|
||||
|
||||
def align_orientation(self, other):
|
||||
"""
|
||||
Align this bone to another by moving its tail and settings its roll
|
||||
the length of the other bone is not used.
|
||||
"""
|
||||
vec = other.vector.normalize() * self.length
|
||||
self.tail = self.head + vec
|
||||
self.roll = other.roll
|
||||
|
||||
|
||||
def ord_ind(i1, i2):
|
||||
if i1 < i2:
|
||||
return i1, i2
|
||||
return i2, i1
|
||||
|
||||
|
||||
class Mesh(bpy_types.ID):
|
||||
__slots__ = ()
|
||||
|
||||
def _get_edge_keys(self):
|
||||
def from_pydata(self, verts, edges, faces):
|
||||
"""
|
||||
Make a mesh from a list of verts/edges/faces
|
||||
Until we have a nicer way to make geometry, use this.
|
||||
"""
|
||||
self.add_geometry(len(verts), len(edges), len(faces))
|
||||
|
||||
verts_flat = [f for v in verts for f in v]
|
||||
self.verts.foreach_set("co", verts_flat)
|
||||
del verts_flat
|
||||
|
||||
edges_flat = [i for e in edges for i in e]
|
||||
self.edges.foreach_set("verts", edges_flat)
|
||||
del edges_flat
|
||||
|
||||
def treat_face(f):
|
||||
if len(f) == 3:
|
||||
return f[0], f[1], f[2], 0
|
||||
elif f[3] == 0:
|
||||
return f[3], f[0], f[1], f[2]
|
||||
return f
|
||||
|
||||
faces_flat = [v for f in faces for v in treat_face(f)]
|
||||
self.faces.foreach_set("verts_raw", faces_flat)
|
||||
del faces_flat
|
||||
|
||||
@property
|
||||
def edge_keys(self):
|
||||
return [edge_key for face in self.faces for edge_key in face.edge_keys]
|
||||
|
||||
edge_keys = property(_get_edge_keys)
|
||||
|
||||
def _get_edge_face_count_dict(self):
|
||||
@property
|
||||
def edge_face_count_dict(self):
|
||||
face_edge_keys = [face.edge_keys for face in self.faces]
|
||||
face_edge_count = {}
|
||||
for face_keys in face_edge_keys:
|
||||
@@ -65,71 +246,172 @@ class Mesh(bpy_types.ID):
|
||||
|
||||
return face_edge_count
|
||||
|
||||
edge_face_count_dict = property(_get_edge_face_count_dict)
|
||||
|
||||
def _get_edge_face_count(self):
|
||||
@property
|
||||
def edge_face_count(self):
|
||||
edge_face_count_dict = self.edge_face_count_dict
|
||||
return [edge_face_count_dict.get(ed.key, 0) for ed in mesh.edges]
|
||||
|
||||
edge_face_count = property(_get_edge_face_count)
|
||||
def edge_loops(self, faces=None, seams=()):
|
||||
"""
|
||||
Edge loops defined by faces
|
||||
|
||||
Takes me.faces or a list of faces and returns the edge loops
|
||||
These edge loops are the edges that sit between quads, so they dont touch
|
||||
1 quad, note: not connected will make 2 edge loops, both only containing 2 edges.
|
||||
|
||||
return a list of edge key lists
|
||||
[ [(0,1), (4, 8), (3,8)], ...]
|
||||
|
||||
optionaly, seams are edge keys that will be removed
|
||||
"""
|
||||
|
||||
OTHER_INDEX = 2,3,0,1 # opposite face index
|
||||
|
||||
if faces is None:
|
||||
faces= self.faces
|
||||
|
||||
edges = {}
|
||||
|
||||
for f in faces:
|
||||
# if len(f) == 4:
|
||||
if f.verts_raw[3] != 0:
|
||||
edge_keys = f.edge_keys
|
||||
for i, edkey in enumerate(f.edge_keys):
|
||||
edges.setdefault(edkey, []).append(edge_keys[OTHER_INDEX[i]])
|
||||
|
||||
for edkey in seams:
|
||||
edges[edkey] = []
|
||||
|
||||
# Collect edge loops here
|
||||
edge_loops = []
|
||||
|
||||
for edkey, ed_adj in edges.items():
|
||||
if 0 <len(ed_adj) < 3: # 1 or 2
|
||||
# Seek the first edge
|
||||
context_loop = [edkey, ed_adj[0]]
|
||||
edge_loops.append(context_loop)
|
||||
if len(ed_adj) == 2:
|
||||
other_dir = ed_adj[1]
|
||||
else:
|
||||
other_dir = None
|
||||
|
||||
ed_adj[:] = []
|
||||
|
||||
flipped = False
|
||||
|
||||
while 1:
|
||||
# from knowing the last 2, look for th next.
|
||||
ed_adj = edges[context_loop[-1]]
|
||||
if len(ed_adj) != 2:
|
||||
|
||||
if other_dir and flipped==False: # the original edge had 2 other edges
|
||||
flipped = True # only flip the list once
|
||||
context_loop.reverse()
|
||||
ed_adj[:] = []
|
||||
context_loop.append(other_dir) # save 1 lookiup
|
||||
|
||||
ed_adj = edges[context_loop[-1]]
|
||||
if len(ed_adj) != 2:
|
||||
ed_adj[:] = []
|
||||
break
|
||||
else:
|
||||
ed_adj[:] = []
|
||||
break
|
||||
|
||||
i = ed_adj.index(context_loop[-2])
|
||||
context_loop.append( ed_adj[ not i] )
|
||||
|
||||
# Dont look at this again
|
||||
ed_adj[:] = []
|
||||
|
||||
|
||||
return edge_loops
|
||||
|
||||
|
||||
class MeshEdge(StructRNA):
|
||||
__slots__ = ()
|
||||
|
||||
def _get_key(self):
|
||||
@property
|
||||
def key(self):
|
||||
return ord_ind(*tuple(self.verts))
|
||||
|
||||
key = property(_get_key)
|
||||
|
||||
|
||||
class MeshFace(StructRNA):
|
||||
__slots__ = ()
|
||||
|
||||
def _get_edge_keys(self):
|
||||
@property
|
||||
def edge_keys(self):
|
||||
verts = tuple(self.verts)
|
||||
if len(verts)==3:
|
||||
return ord_ind(verts[0], verts[1]), ord_ind(verts[1], verts[2]), ord_ind(verts[2], verts[0])
|
||||
if len(verts) == 3:
|
||||
return ord_ind(verts[0], verts[1]), ord_ind(verts[1], verts[2]), ord_ind(verts[2], verts[0])
|
||||
|
||||
return ord_ind(verts[0], verts[1]), ord_ind(verts[1], verts[2]), ord_ind(verts[2], verts[3]), ord_ind(verts[3], verts[0])
|
||||
|
||||
edge_keys = property(_get_edge_keys)
|
||||
return ord_ind(verts[0], verts[1]), ord_ind(verts[1], verts[2]), ord_ind(verts[2], verts[3]), ord_ind(verts[3], verts[0])
|
||||
|
||||
|
||||
import collections
|
||||
|
||||
|
||||
class OrderedMeta(type):
|
||||
|
||||
def __init__(cls, name, bases, attributes):
|
||||
super(OrderedMeta, cls).__init__(name, bases, attributes)
|
||||
cls.order = list(attributes.keys())
|
||||
|
||||
def __prepare__(name, bases, **kwargs):
|
||||
return collections.OrderedDict()
|
||||
|
||||
|
||||
# Only defined so operators members can be used by accessing self.order
|
||||
class Operator(StructRNA, metaclass=OrderedMeta):
|
||||
pass
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class Macro(StructRNA, metaclass=OrderedMeta):
|
||||
# bpy_types is imported before ops is defined
|
||||
# so we have to do a local import on each run
|
||||
__slots__ = ()
|
||||
|
||||
@classmethod
|
||||
def define(self, opname):
|
||||
from _bpy import ops
|
||||
return ops.macro_define(self, opname)
|
||||
|
||||
|
||||
class Menu(StructRNA):
|
||||
__slots__ = ()
|
||||
|
||||
@classmethod
|
||||
def _dyn_menu_initialize(cls):
|
||||
draw_funcs = getattr(cls.draw, "_draw_funcs", None)
|
||||
|
||||
if draw_funcs is None:
|
||||
def draw_ls(*args):
|
||||
for func in draw_ls._draw_funcs:
|
||||
func(*args)
|
||||
|
||||
draw_funcs = draw_ls._draw_funcs = [cls.draw]
|
||||
cls.draw = draw_ls
|
||||
|
||||
return draw_funcs
|
||||
|
||||
@classmethod
|
||||
def append(cls, draw_func):
|
||||
"""Prepend an draw function to this menu, takes the same arguments as the menus draw function."""
|
||||
draw_funcs = cls._dyn_menu_initialize()
|
||||
draw_funcs.append(draw_func)
|
||||
|
||||
@classmethod
|
||||
def prepend(cls, draw_func):
|
||||
"""Prepend a draw function to this menu, takes the same arguments as the menus draw function."""
|
||||
draw_funcs = cls._dyn_menu_initialize()
|
||||
draw_funcs.insert(0, draw_func)
|
||||
|
||||
def path_menu(self, searchpaths, operator):
|
||||
layout = self.layout
|
||||
# hard coded to set the operators 'path' to the filename.
|
||||
|
||||
import os
|
||||
|
||||
def path_to_name(f):
|
||||
''' Only capitalize all lowercase names, mixed case use them as is.
|
||||
'''
|
||||
f_base = os.path.splitext(f)[0]
|
||||
|
||||
# string replacements
|
||||
f_base = f_base.replace("_colon_", ":")
|
||||
|
||||
f_base = f_base.replace("_", " ")
|
||||
|
||||
if f_base.lower() == f_base:
|
||||
return ' '.join([w[0].upper() + w[1:] for w in f_base.split()])
|
||||
else:
|
||||
return f_base
|
||||
import os
|
||||
import bpy.utils
|
||||
|
||||
layout = self.layout
|
||||
|
||||
@@ -145,12 +427,13 @@ class Menu(StructRNA):
|
||||
if f.startswith("."):
|
||||
continue
|
||||
|
||||
layout.operator(operator, text=path_to_name(f)).path = path
|
||||
|
||||
layout.operator(operator, text=bpy.utils.display_name(f)).path = path
|
||||
|
||||
def draw_preset(self, context):
|
||||
'''Define these on the subclass
|
||||
"""Define these on the subclass
|
||||
- preset_operator
|
||||
- preset_subdir
|
||||
'''
|
||||
"""
|
||||
import bpy
|
||||
self.path_menu(bpy.utils.preset_paths(self.preset_subdir), self.preset_operator)
|
||||
|
||||
|
||||
@@ -13,4 +13,6 @@
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
"""Package for console specific modules."""
|
||||
|
||||
@@ -13,6 +13,8 @@
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# <pep8-80 compliant>
|
||||
|
||||
import inspect
|
||||
import re
|
||||
|
||||
|
||||
@@ -21,6 +21,8 @@
|
||||
# the file COPYING, distributed as part of this software.
|
||||
#*****************************************************************************
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
"""Completer for import statements
|
||||
|
||||
Original code was from IPython/Extensions/ipy_completers.py. The following
|
||||
@@ -69,10 +71,13 @@ def get_root_modules():
|
||||
|
||||
modules += sys.builtin_module_names
|
||||
|
||||
# needed for modules defined in C
|
||||
modules += sys.modules.keys()
|
||||
|
||||
modules = list(set(modules))
|
||||
if '__init__' in modules:
|
||||
modules.remove('__init__')
|
||||
modules = sorted(set(modules))
|
||||
modules = sorted(modules)
|
||||
if store:
|
||||
ROOT_MODULES = modules
|
||||
return modules
|
||||
|
||||
@@ -13,6 +13,8 @@
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# <pep8-80 compliant>
|
||||
|
||||
"""Autocomplete with the standard library"""
|
||||
|
||||
import re
|
||||
|
||||
@@ -13,6 +13,8 @@
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# <pep8-80 compliant>
|
||||
|
||||
"""This module provides intellisense features such as:
|
||||
|
||||
* autocompletion
|
||||
|
||||
187
release/scripts/modules/graphviz_export.py
Normal file
187
release/scripts/modules/graphviz_export.py
Normal file
@@ -0,0 +1,187 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
|
||||
header = '''
|
||||
digraph ancestors {
|
||||
graph [fontsize=30 labelloc="t" label="" splines=false overlap=true, rankdir=BT];
|
||||
ratio = "auto" ;
|
||||
'''
|
||||
|
||||
footer = '''
|
||||
}
|
||||
'''
|
||||
|
||||
|
||||
def compat_str(text, line_length=0):
|
||||
|
||||
if line_length:
|
||||
text_ls = []
|
||||
while len(text) > line_length:
|
||||
text_ls.append(text[:line_length])
|
||||
text = text[line_length:]
|
||||
|
||||
if text:
|
||||
text_ls.append(text)
|
||||
text = '\n '.join(text_ls)
|
||||
|
||||
|
||||
#text = text.replace('.', '.\n')
|
||||
#text = text.replace(']', ']\n')
|
||||
text = text.replace("\n", "\\n")
|
||||
text = text.replace('"', '\\"')
|
||||
return text
|
||||
|
||||
|
||||
def graph_armature(obj, path, FAKE_PARENT=True, CONSTRAINTS=True, DRIVERS=True, XTRA_INFO=True):
|
||||
CONSTRAINTS = DRIVERS = True
|
||||
|
||||
fileobject = open(path, "w")
|
||||
fw = fileobject.write
|
||||
fw(header)
|
||||
fw('label = "%s::%s" ;' % (bpy.data.filename.split("/")[-1].split("\\")[-1], obj.name))
|
||||
|
||||
arm = obj.data
|
||||
|
||||
bones = [bone.name for bone in arm.bones]
|
||||
bones.sort()
|
||||
print("")
|
||||
for bone in bones:
|
||||
b = arm.bones[bone]
|
||||
print(">>", bone, ["*>", "->"][b.connected], getattr(getattr(b, "parent", ""), "name", ""))
|
||||
label = [bone]
|
||||
bone = arm.bones[bone]
|
||||
|
||||
for key, value in obj.pose.bones[bone.name].items():
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
|
||||
if type(value) == float:
|
||||
value = "%.3f" % value
|
||||
elif type(value) == str:
|
||||
value = compat_str(value)
|
||||
|
||||
label.append("%s = %s" % (key, value))
|
||||
|
||||
opts = ["shape=box", "regular=1", "style=filled", "fixedsize=false", 'label="%s"' % compat_str('\n'.join(label))]
|
||||
|
||||
if bone.name.startswith('ORG'):
|
||||
opts.append("fillcolor=yellow")
|
||||
else:
|
||||
opts.append("fillcolor=white")
|
||||
|
||||
|
||||
fw('"%s" [%s];\n' % (bone.name, ','.join(opts)))
|
||||
|
||||
fw('\n\n# Hierarchy:\n')
|
||||
|
||||
# Root node.
|
||||
if FAKE_PARENT:
|
||||
fw('"Object::%s" [];\n' % obj.name)
|
||||
|
||||
for bone in bones:
|
||||
bone = arm.bones[bone]
|
||||
|
||||
parent = bone.parent
|
||||
if parent:
|
||||
parent_name = parent.name
|
||||
connected = bone.connected
|
||||
elif FAKE_PARENT:
|
||||
parent_name = 'Object::%s' % obj.name
|
||||
connected = False
|
||||
else:
|
||||
continue
|
||||
|
||||
opts = ["dir=forward", "weight=2", "arrowhead=normal"]
|
||||
if not connected:
|
||||
opts.append("style=dotted")
|
||||
|
||||
fw('"%s" -> "%s" [%s] ;\n' % (bone.name, parent_name, ','.join(opts)))
|
||||
del bone
|
||||
|
||||
# constraints
|
||||
if CONSTRAINTS:
|
||||
fw('\n\n# Constraints:\n')
|
||||
for bone in bones:
|
||||
pbone = obj.pose.bones[bone]
|
||||
# must be ordered
|
||||
for constraint in pbone.constraints:
|
||||
subtarget = getattr(constraint, "subtarget", "")
|
||||
if subtarget:
|
||||
# TODO, not internal links
|
||||
opts = ['dir=forward', "weight=1", "arrowhead=normal", "arrowtail=none", "constraint=false", 'color="red"', 'labelfontsize=4']
|
||||
if XTRA_INFO:
|
||||
label = "%s\n%s" % (constraint.type, constraint.name)
|
||||
opts.append('label="%s"' % compat_str(label))
|
||||
fw('"%s" -> "%s" [%s] ;\n' % (pbone.name, subtarget, ','.join(opts)))
|
||||
|
||||
# Drivers
|
||||
if DRIVERS:
|
||||
fw('\n\n# Drivers:\n')
|
||||
|
||||
def rna_path_as_pbone(rna_path):
|
||||
if not rna_path.startswith("pose.bones["):
|
||||
return None
|
||||
|
||||
#rna_path_bone = rna_path[:rna_path.index("]") + 1]
|
||||
#return obj.path_resolve(rna_path_bone)
|
||||
bone_name = rna_path.split("[")[1].split("]")[0]
|
||||
return obj.pose.bones[bone_name[1:-1]]
|
||||
|
||||
animation_data = obj.animation_data
|
||||
if animation_data:
|
||||
|
||||
fcurve_drivers = [fcurve_driver for fcurve_driver in animation_data.drivers]
|
||||
fcurve_drivers.sort(key=lambda fcurve_driver: fcurve_driver.data_path)
|
||||
|
||||
for fcurve_driver in fcurve_drivers:
|
||||
rna_path = fcurve_driver.data_path
|
||||
pbone = rna_path_as_pbone(rna_path)
|
||||
|
||||
if pbone:
|
||||
for target in fcurve_driver.driver.targets:
|
||||
pbone_target = rna_path_as_pbone(target.data_path)
|
||||
rna_path_target = target.data_path
|
||||
if pbone_target:
|
||||
opts = ['dir=forward', "weight=1", "arrowhead=normal", "arrowtail=none", "constraint=false", 'color="blue"', "labelfontsize=4"] # ,
|
||||
display_source = rna_path.replace("pose.bones", "")
|
||||
display_target = rna_path_target.replace("pose.bones", "")
|
||||
if XTRA_INFO:
|
||||
label = "%s\\n%s" % (display_source, display_target)
|
||||
opts.append('label="%s"' % compat_str(label))
|
||||
fw('"%s" -> "%s" [%s] ;\n' % (pbone_target.name, pbone.name, ','.join(opts)))
|
||||
|
||||
fw(footer)
|
||||
fileobject.close()
|
||||
|
||||
'''
|
||||
print(".", end='')
|
||||
import sys
|
||||
sys.stdout.flush()
|
||||
'''
|
||||
print("\nSaved:", path)
|
||||
return True
|
||||
|
||||
if __name__ == "__main__":
|
||||
import os
|
||||
tmppath = "/tmp/test.dot"
|
||||
graph_armature(bpy.context.object, tmppath, CONSTRAINTS=True, DRIVERS=True)
|
||||
os.system("dot -Tpng %s > %s; eog %s &" % (tmppath, tmppath + '.png', tmppath + '.png'))
|
||||
301
release/scripts/modules/retopo.py
Normal file
301
release/scripts/modules/retopo.py
Normal file
@@ -0,0 +1,301 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
|
||||
EPS_SPLINE_DIV = 15.0 # remove doubles is ~15th the length of the spline
|
||||
|
||||
|
||||
def get_hub(co, _hubs, EPS_SPLINE):
|
||||
|
||||
if 1:
|
||||
for hub in _hubs.values():
|
||||
if (hub.co - co).length < EPS_SPLINE:
|
||||
return hub
|
||||
|
||||
key = co.toTuple(3)
|
||||
hub = _hubs[key] = Hub(co, key, len(_hubs))
|
||||
return hub
|
||||
else:
|
||||
pass
|
||||
|
||||
'''
|
||||
key = co.toTuple(3)
|
||||
try:
|
||||
return _hubs[key]
|
||||
except:
|
||||
hub = _hubs[key] = Hub(co, key, len(_hubs))
|
||||
return hub
|
||||
'''
|
||||
|
||||
|
||||
class Hub(object):
|
||||
__slots__ = "co", "key", "index", "links"
|
||||
|
||||
def __init__(self, co, key, index):
|
||||
self.co = co.copy()
|
||||
self.key = key
|
||||
self.index = index
|
||||
self.links = []
|
||||
|
||||
def get_weight(self):
|
||||
f = 0.0
|
||||
|
||||
for hub_other in self.links:
|
||||
f += (self.co - hub_other.co).length
|
||||
|
||||
def replace(self, other):
|
||||
for hub in self.links:
|
||||
try:
|
||||
hub.links.remove(self)
|
||||
except:
|
||||
pass
|
||||
if other not in hub.links:
|
||||
hub.links.append(other)
|
||||
|
||||
def dist(self, other):
|
||||
return (self.co - other.co).length
|
||||
|
||||
def calc_faces(self, hub_ls):
|
||||
faces = []
|
||||
# first tris
|
||||
for l_a in self.links:
|
||||
for l_b in l_a.links:
|
||||
if l_b is not self and l_b in self.links:
|
||||
# will give duplicates
|
||||
faces.append((self.index, l_a.index, l_b.index))
|
||||
|
||||
# now quads, check which links share 2 different verts directly
|
||||
def validate_quad(face):
|
||||
if len(set(face)) != len(face):
|
||||
return False
|
||||
if hub_ls[face[0]] in hub_ls[face[2]].links:
|
||||
return False
|
||||
if hub_ls[face[2]] in hub_ls[face[0]].links:
|
||||
return False
|
||||
|
||||
if hub_ls[face[1]] in hub_ls[face[3]].links:
|
||||
return False
|
||||
if hub_ls[face[3]] in hub_ls[face[1]].links:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
for i, l_a in enumerate(self.links):
|
||||
links_a = {l.index for l in l_a.links}
|
||||
for j in range(i):
|
||||
l_b = self.links[j]
|
||||
|
||||
links_b = {l.index for l in l_b.links}
|
||||
|
||||
isect = links_a.intersection(links_b)
|
||||
if len(isect) == 2:
|
||||
isect = list(isect)
|
||||
|
||||
# check there are no diagonal lines
|
||||
face = (isect[0], l_a.index, isect[1], l_b.index)
|
||||
if validate_quad(face):
|
||||
|
||||
faces.append(face)
|
||||
|
||||
return faces
|
||||
|
||||
|
||||
class Spline(object):
|
||||
__slots__ = "points", "hubs", "length"
|
||||
|
||||
def __init__(self, points):
|
||||
self.points = points
|
||||
self.hubs = []
|
||||
|
||||
# calc length
|
||||
f = 0.0
|
||||
co_prev = self.points[0]
|
||||
for co in self.points[1:]:
|
||||
f += (co - co_prev).length
|
||||
co_prev = co
|
||||
self.length = f
|
||||
|
||||
def link(self):
|
||||
if len(self.hubs) < 2:
|
||||
return
|
||||
|
||||
edges = list(set([i for i, hub in self.hubs]))
|
||||
edges.sort()
|
||||
|
||||
edges_order = {}
|
||||
for i in edges:
|
||||
edges_order[i] = []
|
||||
|
||||
|
||||
# self.hubs.sort()
|
||||
for i, hub in self.hubs:
|
||||
edges_order[i].append(hub)
|
||||
|
||||
hubs_order = []
|
||||
for i in edges:
|
||||
ls = edges_order[i]
|
||||
edge_start = self.points[i]
|
||||
ls.sort(key=lambda hub: (hub.co - edge_start).length)
|
||||
hubs_order.extend(ls)
|
||||
|
||||
# Now we have the order, connect the hubs
|
||||
hub_prev = hubs_order[0]
|
||||
|
||||
for hub in hubs_order[1:]:
|
||||
hub.links.append(hub_prev)
|
||||
hub_prev.links.append(hub)
|
||||
hub_prev = hub
|
||||
|
||||
|
||||
def get_points(stroke):
|
||||
return [point.co.copy() for point in stroke.points]
|
||||
|
||||
|
||||
def get_splines(gp):
|
||||
l = None
|
||||
for l in gp.layers:
|
||||
if l.active: # XXX - should be layers.active
|
||||
break
|
||||
if l:
|
||||
frame = l.active_frame
|
||||
return [Spline(get_points(stroke)) for stroke in frame.strokes]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def xsect_spline(sp_a, sp_b, _hubs):
|
||||
from Mathutils import LineIntersect
|
||||
from Mathutils import MidpointVecs
|
||||
from Geometry import ClosestPointOnLine
|
||||
pt_a_prev = pt_b_prev = None
|
||||
EPS_SPLINE = (sp_a.length + sp_b.length) / (EPS_SPLINE_DIV * 2)
|
||||
pt_a_prev = sp_a.points[0]
|
||||
for a, pt_a in enumerate(sp_a.points[1:]):
|
||||
pt_b_prev = sp_b.points[0]
|
||||
for b, pt_b in enumerate(sp_b.points[1:]):
|
||||
|
||||
# Now we have 2 edges
|
||||
# print(pt_a, pt_a_prev, pt_b, pt_b_prev)
|
||||
xsect = LineIntersect(pt_a, pt_a_prev, pt_b, pt_b_prev)
|
||||
if xsect is not None:
|
||||
if (xsect[0] - xsect[1]).length <= EPS_SPLINE:
|
||||
f = ClosestPointOnLine(xsect[1], pt_a, pt_a_prev)[1]
|
||||
# if f >= 0.0-EPS_SPLINE and f <= 1.0+EPS_SPLINE: # for some reason doesnt work so well, same below
|
||||
if f >= 0.0 and f <= 1.0:
|
||||
f = ClosestPointOnLine(xsect[0], pt_b, pt_b_prev)[1]
|
||||
# if f >= 0.0-EPS_SPLINE and f <= 1.0+EPS_SPLINE:
|
||||
if f >= 0.0 and f <= 1.0:
|
||||
# This wont happen often
|
||||
co = MidpointVecs(xsect[0], xsect[1])
|
||||
hub = get_hub(co, _hubs, EPS_SPLINE)
|
||||
|
||||
sp_a.hubs.append((a, hub))
|
||||
sp_b.hubs.append((b, hub))
|
||||
|
||||
pt_b_prev = pt_b
|
||||
|
||||
pt_a_prev = pt_a
|
||||
|
||||
|
||||
def calculate(gp):
|
||||
splines = get_splines(gp)
|
||||
_hubs = {}
|
||||
|
||||
for i, sp in enumerate(splines):
|
||||
for j, sp_other in enumerate(splines):
|
||||
if j <= i:
|
||||
continue
|
||||
|
||||
xsect_spline(sp, sp_other, _hubs)
|
||||
|
||||
for sp in splines:
|
||||
sp.link()
|
||||
|
||||
# remove these
|
||||
hubs_ls = [hub for hub in _hubs.values() if hub.index != -1]
|
||||
|
||||
_hubs.clear()
|
||||
_hubs = None
|
||||
|
||||
for i, hub in enumerate(hubs_ls):
|
||||
hub.index = i
|
||||
|
||||
# Now we have connected hubs, write all edges!
|
||||
def order(i1, i2):
|
||||
if i1 > i2:
|
||||
return i2, i1
|
||||
return i1, i2
|
||||
|
||||
edges = {}
|
||||
|
||||
for hub in hubs_ls:
|
||||
i1 = hub.index
|
||||
for hub_other in hub.links:
|
||||
i2 = hub_other.index
|
||||
edges[order(i1, i2)] = None
|
||||
|
||||
verts = []
|
||||
edges = edges.keys()
|
||||
faces = []
|
||||
|
||||
for hub in hubs_ls:
|
||||
verts.append(hub.co)
|
||||
faces.extend(hub.calc_faces(hubs_ls))
|
||||
|
||||
# remove double faces
|
||||
faces = dict([(tuple(sorted(f)), f) for f in faces]).values()
|
||||
|
||||
mesh = bpy.data.add_mesh("Retopo")
|
||||
mesh.from_pydata(verts, [], faces)
|
||||
|
||||
scene = bpy.context.scene
|
||||
mesh.update()
|
||||
obj_new = bpy.data.add_object('MESH', "Torus")
|
||||
obj_new.data = mesh
|
||||
scene.objects.link(obj_new)
|
||||
|
||||
return obj_new
|
||||
|
||||
|
||||
def main():
|
||||
scene = bpy.context.scene
|
||||
obj = bpy.context.object
|
||||
|
||||
gp = None
|
||||
|
||||
if obj:
|
||||
gp = obj.grease_pencil
|
||||
|
||||
if not gp:
|
||||
gp = scene.grease_pencil
|
||||
|
||||
if not gp:
|
||||
raise Exception("no active grease pencil")
|
||||
|
||||
obj_new = calculate(gp)
|
||||
|
||||
scene.objects.active = obj_new
|
||||
obj_new.selected = True
|
||||
|
||||
# nasty, recalc normals
|
||||
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
|
||||
bpy.ops.mesh.normals_make_consistent(inside=False)
|
||||
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
|
||||
446
release/scripts/modules/rigify/__init__.py
Normal file
446
release/scripts/modules/rigify/__init__.py
Normal file
@@ -0,0 +1,446 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from Mathutils import Vector
|
||||
|
||||
# TODO, have these in a more general module
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
SPECIAL_TYPES = "root",
|
||||
LAYER_TYPES = "main", "extra", "ik", "fk"
|
||||
|
||||
|
||||
class RigifyError(Exception):
|
||||
"""Exception raised for errors in the metarig.
|
||||
"""
|
||||
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.message)
|
||||
|
||||
|
||||
def submodule_func_from_type(bone_type):
|
||||
type_pair = bone_type.split(".")
|
||||
|
||||
# 'leg.ik' will look for an ik function in the leg module
|
||||
# 'leg' will look up leg.main
|
||||
if len(type_pair) == 1:
|
||||
type_pair = type_pair[0], "main"
|
||||
|
||||
type_name, func_name = type_pair
|
||||
|
||||
# from rigify import leg
|
||||
try:
|
||||
submod = __import__(name="%s.%s" % (__package__, type_name), fromlist=[type_name])
|
||||
except ImportError:
|
||||
raise RigifyError("python module for type '%s' not found" % type_name)
|
||||
|
||||
reload(submod)
|
||||
return type_name, submod, getattr(submod, func_name)
|
||||
|
||||
|
||||
def get_submodule_types():
|
||||
import os
|
||||
submodules = []
|
||||
files = os.listdir(os.path.dirname(__file__))
|
||||
for f in files:
|
||||
if not f.startswith("_") and f.endswith(".py"):
|
||||
submodules.append(f[:-3])
|
||||
|
||||
return sorted(submodules)
|
||||
|
||||
|
||||
def get_bone_type_options(pbone, type_name):
|
||||
options = {}
|
||||
bone_name = pbone.name
|
||||
for key, value in pbone.items():
|
||||
key_pair = key.split(".")
|
||||
if key_pair[0] == type_name:
|
||||
if len(key_pair) != 2:
|
||||
raise RigifyError("option error for bone '%s', property name was not a pair '%s'" % (bone_name, key_pair))
|
||||
options[key_pair[1]] = value
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def get_layer_dict(options):
|
||||
'''
|
||||
Extracts layer info from a bone options dict
|
||||
defaulting to the layer index if not set.
|
||||
'''
|
||||
layer_default = [False] * 32
|
||||
result = {}
|
||||
for i, layer_type in enumerate(LAYER_TYPES):
|
||||
# no matter if its not defined
|
||||
layer_index = options.get("layer_" + layer_type, i + 2)
|
||||
layer = layer_default[:]
|
||||
layer[layer_index-1] = True
|
||||
result[layer_type] = layer
|
||||
return result
|
||||
|
||||
|
||||
def validate_rig(context, obj):
|
||||
'''
|
||||
Makes no changes
|
||||
only runs the metarig definitions and reports errors
|
||||
'''
|
||||
type_found = False
|
||||
|
||||
for pbone in obj.pose.bones:
|
||||
bone_name = pbone.name
|
||||
bone_type = pbone.get("type", "")
|
||||
|
||||
if bone_type:
|
||||
bone_type_list = [bt for bt in bone_type.replace(",", " ").split()]
|
||||
else:
|
||||
bone_type_list = []
|
||||
|
||||
for bone_type in bone_type_list:
|
||||
if bone_type.split(".")[0] in SPECIAL_TYPES:
|
||||
continue
|
||||
|
||||
type_name, submod, type_func = submodule_func_from_type(bone_type)
|
||||
reload(submod)
|
||||
submod.metarig_definition(obj, bone_name)
|
||||
type_found = True
|
||||
|
||||
get_bone_type_options(pbone, bone_type)
|
||||
|
||||
# missing, - check for duplicate root bone.
|
||||
|
||||
if not type_found:
|
||||
raise RigifyError("This rig has no 'type' properties defined on any pose bones, nothing to do")
|
||||
|
||||
|
||||
def generate_rig(context, obj_orig, prefix="ORG-", META_DEF=True):
|
||||
'''
|
||||
Main function for generating
|
||||
'''
|
||||
from collections import OrderedDict
|
||||
import rigify_utils
|
||||
reload(rigify_utils)
|
||||
|
||||
# Not needed but catches any errors before duplicating
|
||||
validate_rig(context, obj_orig)
|
||||
|
||||
global_undo = context.user_preferences.edit.global_undo
|
||||
context.user_preferences.edit.global_undo = False
|
||||
mode_orig = context.mode
|
||||
rest_backup = obj_orig.data.pose_position
|
||||
obj_orig.data.pose_position = 'REST'
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
scene = context.scene
|
||||
|
||||
# copy object and data
|
||||
obj_orig.selected = False
|
||||
obj = obj_orig.copy()
|
||||
obj.data = obj_orig.data.copy()
|
||||
scene.objects.link(obj)
|
||||
scene.objects.active = obj
|
||||
obj.selected = True
|
||||
|
||||
if META_DEF:
|
||||
obj_def = obj_orig.copy()
|
||||
obj_def.data = obj_orig.data.copy()
|
||||
scene.objects.link(obj_def)
|
||||
|
||||
arm = obj.data
|
||||
|
||||
# original name mapping
|
||||
base_names = {}
|
||||
|
||||
# add all new parentless children to this bone
|
||||
root_bone = None
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
for bone in arm.edit_bones:
|
||||
bone_name = bone.name
|
||||
if obj.pose.bones[bone_name].get("type", "") != "root":
|
||||
bone.name = prefix + bone_name
|
||||
base_names[bone.name] = bone_name # new -> old mapping
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# key: bone name
|
||||
# value: {type:definition, ...}
|
||||
# where type is the submodule name - leg, arm etc
|
||||
# and definition is a list of bone names
|
||||
bone_definitions = {}
|
||||
|
||||
# key: bone name
|
||||
# value: [functions, ...]
|
||||
# each function is from the module. eg leg.ik, arm.main
|
||||
bone_typeinfos = {}
|
||||
|
||||
# key: bone name
|
||||
# value: [new_bone_name, ...]
|
||||
# where each bone with a 'type' stores a list of bones that it created
|
||||
# ...needed so we can override the root parent
|
||||
bone_genesis = {}
|
||||
|
||||
|
||||
# inspect all bones and assign their definitions before modifying
|
||||
for pbone in obj.pose.bones:
|
||||
bone_name = pbone.name
|
||||
bone_type = pbone.get("type", "")
|
||||
if bone_type:
|
||||
bone_type_list = [bt for bt in bone_type.replace(",", " ").split()]
|
||||
|
||||
# not essential but means running autorig again wont do anything
|
||||
del pbone["type"]
|
||||
else:
|
||||
bone_type_list = []
|
||||
|
||||
if bone_type_list == ["root"]: # special case!
|
||||
if root_bone:
|
||||
raise RigifyError("cant have more then 1 root bone, found '%s' and '%s' to have type==root" % (root_bone, bone_name))
|
||||
root_bone = bone_name
|
||||
bone_type_list[:] = []
|
||||
|
||||
for bone_type in bone_type_list:
|
||||
type_name, submod, type_func = submodule_func_from_type(bone_type)
|
||||
reload(submod)
|
||||
|
||||
bone_def_dict = bone_definitions.setdefault(bone_name, {})
|
||||
|
||||
# Only calculate bone definitions once
|
||||
if type_name not in bone_def_dict:
|
||||
bone_def_dict[type_name] = submod.metarig_definition(obj, bone_name)
|
||||
|
||||
bone_typeinfo = bone_typeinfos.setdefault(bone_name, [])
|
||||
bone_typeinfo.append((type_name, type_func))
|
||||
|
||||
|
||||
# sort bones, not needed but gives more pradictable execution which may be useful in rare cases
|
||||
bones_sorted = obj.pose.bones.values()
|
||||
bones_sorted.sort(key=lambda pbone: pbone.name) # first sort by names
|
||||
bones_sorted.sort(key=lambda pbone: len(pbone.parent_recursive)) # parents before children
|
||||
|
||||
# now we have all the info about bones we can start operating on them
|
||||
# for pbone in obj.pose.bones:
|
||||
for pbone in bones_sorted:
|
||||
bone_name = pbone.name
|
||||
|
||||
if bone_name not in bone_typeinfos:
|
||||
continue
|
||||
|
||||
bone_def_dict = bone_definitions[bone_name]
|
||||
|
||||
# Only blend results from the same submodule, eg.
|
||||
# leg.ik and arm.fk could not be blended.
|
||||
results = OrderedDict()
|
||||
|
||||
bone_names_pre = {bone.name for bone in arm.bones}
|
||||
|
||||
for type_name, type_func in bone_typeinfos[bone_name]:
|
||||
# this bones definition of the current typeinfo
|
||||
definition = bone_def_dict[type_name]
|
||||
options = get_bone_type_options(pbone, type_name)
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
ret = type_func(obj, definition, base_names, options)
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
if ret:
|
||||
result_submod = results.setdefault(type_name, [])
|
||||
|
||||
if result_submod and len(result_submod[-1]) != len(ret):
|
||||
raise RigifyError("bone lists not compatible: %s, %s" % (result_submod[-1], ret))
|
||||
|
||||
result_submod.append(ret)
|
||||
|
||||
for result_submod in results.values():
|
||||
# blend 2 chains
|
||||
definition = bone_def_dict[type_name]
|
||||
|
||||
if len(result_submod) == 2:
|
||||
blend_bone_list(obj, definition, result_submod[0], result_submod[1], target_bone=bone_name)
|
||||
|
||||
|
||||
bone_names_post = {bone.name for bone in arm.bones}
|
||||
|
||||
# Store which bones were created from this one
|
||||
bone_genesis[bone_name] = list(bone_names_post - bone_names_pre)
|
||||
|
||||
# need a reverse lookup on bone_genesis so as to know immediately
|
||||
# where a bone comes from
|
||||
bone_genesis_reverse = {}
|
||||
for bone_name, bone_children in bone_genesis.items():
|
||||
for bone_child_name in bone_children:
|
||||
bone_genesis_reverse[bone_child_name] = bone_name
|
||||
|
||||
|
||||
if root_bone:
|
||||
# assign all new parentless bones to this
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
root_ebone = arm.edit_bones[root_bone]
|
||||
for ebone in arm.edit_bones:
|
||||
bone_name = ebone.name
|
||||
if ebone.parent is None and bone_name not in base_names:
|
||||
# check for override
|
||||
bone_creator = bone_genesis_reverse[bone_name]
|
||||
pbone_creator = obj.pose.bones[bone_creator]
|
||||
root_bone_override = pbone_creator.get("root", "")
|
||||
|
||||
if root_bone_override:
|
||||
root_ebone_tmp = arm.edit_bones[root_bone_override]
|
||||
else:
|
||||
root_ebone_tmp = root_ebone
|
||||
|
||||
ebone.connected = False
|
||||
ebone.parent = root_ebone_tmp
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
if META_DEF:
|
||||
# for pbone in obj_def.pose.bones:
|
||||
for bone_name, bone_name_new in base_names.items():
|
||||
#pbone_from = bone_name
|
||||
pbone = obj_def.pose.bones[bone_name_new]
|
||||
|
||||
con = pbone.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = bone_name
|
||||
|
||||
if not pbone.bone.connected:
|
||||
con = pbone.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = bone_name
|
||||
|
||||
# would be 'REST' from when copied
|
||||
obj_def.data.pose_position = 'POSE'
|
||||
|
||||
# todo - make a more generic system?
|
||||
layer_tot = [False] * 32
|
||||
layer_last = layer_tot[:]
|
||||
layer_last[31] = True
|
||||
layer_second_last = layer_tot[:]
|
||||
layer_second_last[30] = True
|
||||
|
||||
for bone_name, bone in arm.bones.items():
|
||||
if bone_name.startswith(prefix):
|
||||
bone.layer = layer_last
|
||||
elif bone_name.startswith("MCH"): # XXX fixme
|
||||
bone.layer = layer_second_last
|
||||
|
||||
layer_tot[:] = [max(lay) for lay in zip(layer_tot, bone.layer)]
|
||||
|
||||
# Only for demo'ing
|
||||
arm.layer = layer_tot
|
||||
|
||||
|
||||
# obj.restrict_view = True
|
||||
obj.data.draw_axes = False
|
||||
|
||||
bpy.ops.object.mode_set(mode=mode_orig)
|
||||
obj_orig.data.pose_position = rest_backup
|
||||
obj.data.pose_position = 'POSE'
|
||||
context.user_preferences.edit.global_undo = global_undo
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def generate_test(context, metarig_type="", GENERATE_FINAL=True):
|
||||
import os
|
||||
new_objects = []
|
||||
|
||||
scene = context.scene
|
||||
|
||||
def create_empty_armature(name):
|
||||
obj_new = bpy.data.add_object('ARMATURE', name)
|
||||
armature = bpy.data.add_armature(name)
|
||||
obj_new.data = armature
|
||||
scene.objects.link(obj_new)
|
||||
scene.objects.active = obj_new
|
||||
for obj in scene.objects:
|
||||
obj.selected = False
|
||||
obj_new.selected = True
|
||||
|
||||
for module_name in get_submodule_types():
|
||||
if (metarig_type and module_name != metarig_type):
|
||||
continue
|
||||
|
||||
# XXX workaround!, problem with updating the pose matrix.
|
||||
if module_name == "delta":
|
||||
continue
|
||||
|
||||
type_name, submodule, func = submodule_func_from_type(module_name)
|
||||
|
||||
metarig_template = getattr(submodule, "metarig_template", None)
|
||||
|
||||
if metarig_template:
|
||||
create_empty_armature("meta_" + module_name) # sets active
|
||||
metarig_template()
|
||||
obj = context.active_object
|
||||
obj.location = scene.cursor_location
|
||||
|
||||
if GENERATE_FINAL:
|
||||
obj_new = generate_rig(context, obj)
|
||||
new_objects.append((obj, obj_new))
|
||||
else:
|
||||
new_objects.append((obj, None))
|
||||
else:
|
||||
print("note: rig type '%s' has no metarig_template(), can't test this", module_name)
|
||||
|
||||
return new_objects
|
||||
|
||||
|
||||
def generate_test_all(context, GRAPH=False):
|
||||
import rigify
|
||||
import rigify_utils
|
||||
import graphviz_export
|
||||
import os
|
||||
reload(rigify)
|
||||
reload(rigify_utils)
|
||||
reload(graphviz_export)
|
||||
|
||||
new_objects = rigify.generate_test(context)
|
||||
|
||||
if GRAPH:
|
||||
base_name = os.path.splitext(bpy.data.filename)[0]
|
||||
for obj, obj_new in new_objects:
|
||||
for obj in (obj, obj_new):
|
||||
fn = base_name + "-" + bpy.utils.clean_name(obj.name)
|
||||
|
||||
path_dot = fn + ".dot"
|
||||
path_png = fn + ".png"
|
||||
saved = graphviz_export.graph_armature(obj, path_dot, CONSTRAINTS=True, DRIVERS=True)
|
||||
|
||||
#if saved:
|
||||
# os.system("dot -Tpng %s > %s; eog %s" % (path_dot, path_png, path_png))
|
||||
|
||||
i = 0
|
||||
for obj, obj_new in new_objects:
|
||||
obj.data.drawtype = 'STICK'
|
||||
obj.location[1] += i
|
||||
obj_new.location[1] += i
|
||||
obj_new.selected = False
|
||||
obj.selected = True
|
||||
i += 4
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_rig(bpy.context, bpy.context.active_object)
|
||||
291
release/scripts/modules/rigify/arm_biped_generic.py
Normal file
291
release/scripts/modules/rigify/arm_biped_generic.py
Normal file
@@ -0,0 +1,291 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from math import radians
|
||||
from rigify import RigifyError, get_layer_dict
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple, add_pole_target_bone, add_stretch_to, blend_bone_list, get_side_name, get_base_name
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
from Mathutils import Vector
|
||||
|
||||
METARIG_NAMES = "shoulder", "arm", "forearm", "hand"
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('shoulder')
|
||||
bone.head[:] = 0.0000, -0.0425, 0.0000
|
||||
bone.tail[:] = 0.0942, -0.0075, 0.0333
|
||||
bone.roll = -0.2227
|
||||
bone.connected = False
|
||||
bone = arm.edit_bones.new('upper_arm')
|
||||
bone.head[:] = 0.1066, -0.0076, -0.0010
|
||||
bone.tail[:] = 0.2855, 0.0206, -0.0104
|
||||
bone.roll = 1.6152
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['shoulder']
|
||||
bone = arm.edit_bones.new('forearm')
|
||||
bone.head[:] = 0.2855, 0.0206, -0.0104
|
||||
bone.tail[:] = 0.4550, -0.0076, -0.0023
|
||||
bone.roll = 1.5153
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['upper_arm']
|
||||
bone = arm.edit_bones.new('hand')
|
||||
bone.head[:] = 0.4550, -0.0076, -0.0023
|
||||
bone.tail[:] = 0.5423, -0.0146, -0.0131
|
||||
bone.roll = -3.0083
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['forearm']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['upper_arm']
|
||||
pbone['type'] = 'arm_biped_generic'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
mt = bone_class_instance(obj, METARIG_NAMES) # meta
|
||||
mt.arm = orig_bone_name
|
||||
mt.update()
|
||||
|
||||
mt.shoulder_p = mt.arm_p.parent
|
||||
|
||||
if not mt.shoulder_p:
|
||||
raise RigifyError("could not find '%s' parent, skipping:" % orig_bone_name)
|
||||
|
||||
mt.shoulder = mt.shoulder_p.name
|
||||
|
||||
# We could have some bones attached, find the bone that has this as its 2nd parent
|
||||
hands = []
|
||||
for pbone in obj.pose.bones:
|
||||
index = pbone.parent_index(mt.arm_p)
|
||||
if index == 2 and pbone.bone.connected and pbone.bone.parent.connected:
|
||||
hands.append(pbone)
|
||||
|
||||
if len(hands) != 1:
|
||||
raise RigifyError("Found %s possible hands attached to this arm, expected 1 from bone: %s" % ([pbone.name for pbone in hands], orig_bone_name))
|
||||
|
||||
# first add the 2 new bones
|
||||
mt.hand_p = hands[0]
|
||||
mt.hand = mt.hand_p.name
|
||||
|
||||
mt.forearm_p = mt.hand_p.parent
|
||||
mt.forearm = mt.forearm_p.name
|
||||
|
||||
return mt.names()
|
||||
|
||||
|
||||
def ik(obj, definitions, base_names, options):
|
||||
|
||||
arm = obj.data
|
||||
|
||||
mt = bone_class_instance(obj, METARIG_NAMES)
|
||||
mt.shoulder, mt.arm, mt.forearm, mt.hand = definitions
|
||||
mt.update()
|
||||
|
||||
ik = bone_class_instance(obj, ["pole", "pole_vis", "hand_vis"])
|
||||
ik_chain = mt.copy(to_fmt="MCH-%s_ik", base_names=base_names, exclude_attrs=["shoulder"])
|
||||
|
||||
# IK needs no parent_index
|
||||
ik_chain.hand_e.connected = False
|
||||
ik_chain.hand_e.parent = None
|
||||
ik_chain.hand_e.local_location = False
|
||||
ik_chain.rename("hand", get_base_name(base_names[mt.hand]) + "_ik" + get_side_name(mt.hand))
|
||||
|
||||
ik_chain.arm_e.connected = False
|
||||
ik_chain.arm_e.parent = mt.shoulder_e
|
||||
|
||||
# Add the bone used for the arms poll target
|
||||
#ik.pole = add_pole_target_bone(obj, mt.forearm, get_base_name(base_names[mt.forearm]) + "_target" + get_side_name(mt.forearm), mode='ZAVERAGE')
|
||||
ik.pole = add_pole_target_bone(obj, mt.forearm, "elbow_target" + get_side_name(mt.forearm), mode='ZAVERAGE')
|
||||
|
||||
ik.update()
|
||||
ik.pole_e.local_location = False
|
||||
|
||||
# option: elbow_parent
|
||||
elbow_parent_name = options.get("elbow_parent", "")
|
||||
|
||||
if elbow_parent_name:
|
||||
try:
|
||||
elbow_parent_e = arm.edit_bones[elbow_parent_name]
|
||||
except:
|
||||
# TODO, old/new parent mapping
|
||||
raise RigifyError("parent bone from property 'arm_biped_generic.elbow_parent' not found '%s'" % elbow_parent_name)
|
||||
ik.pole_e.parent = elbow_parent_e
|
||||
|
||||
# update bones after this!
|
||||
ik.hand_vis = add_stretch_to(obj, mt.hand, ik_chain.hand, "VIS-%s_ik" % base_names[mt.hand])
|
||||
ik.pole_vis = add_stretch_to(obj, mt.forearm, ik.pole, "VIS-%s_ik" % base_names[mt.forearm])
|
||||
|
||||
ik.update()
|
||||
ik.hand_vis_e.restrict_select = True
|
||||
ik.pole_vis_e.restrict_select = True
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
mt.update()
|
||||
ik.update()
|
||||
ik_chain.update()
|
||||
|
||||
# Set IK dof
|
||||
ik_chain.forearm_p.ik_dof_x = True
|
||||
ik_chain.forearm_p.ik_dof_y = False
|
||||
ik_chain.forearm_p.ik_dof_z = False
|
||||
|
||||
con = ik_chain.forearm_p.constraints.new('IK')
|
||||
con.target = obj
|
||||
con.subtarget = ik_chain.hand
|
||||
con.pole_target = obj
|
||||
con.pole_subtarget = ik.pole
|
||||
|
||||
con.use_tail = True
|
||||
con.use_stretch = True
|
||||
con.use_target = True
|
||||
con.use_rotation = False
|
||||
con.chain_length = 2
|
||||
con.pole_angle = -90.0 # XXX, RAD2DEG
|
||||
|
||||
# last step setup layers
|
||||
layers = get_layer_dict(options)
|
||||
lay = layers["ik"]
|
||||
for attr in ik_chain.attr_names:
|
||||
getattr(ik_chain, attr + "_b").layer = lay
|
||||
for attr in ik.attr_names:
|
||||
getattr(ik, attr + "_b").layer = lay
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
# don't blend the shoulder
|
||||
return [None] + ik_chain.names()
|
||||
|
||||
|
||||
def fk(obj, definitions, base_names, options):
|
||||
|
||||
arm = obj.data
|
||||
|
||||
mt = bone_class_instance(obj, METARIG_NAMES)
|
||||
mt.shoulder, mt.arm, mt.forearm, mt.hand = definitions
|
||||
mt.update()
|
||||
|
||||
ex = bone_class_instance(obj, ["socket", "hand_delta"])
|
||||
fk_chain = mt.copy(base_names=base_names)
|
||||
|
||||
# shoulder is used as a hinge
|
||||
fk_chain.rename("shoulder", "MCH-%s_hinge" % base_names[mt.arm])
|
||||
fk_chain.shoulder_e.translate(Vector(0.0, fk_chain.shoulder_e.length / 2, 0.0))
|
||||
|
||||
# upper arm constrains to this.
|
||||
ex.socket_e = copy_bone_simple(arm, mt.arm, "MCH-%s_socket" % base_names[mt.arm])
|
||||
ex.socket = ex.socket_e.name
|
||||
ex.socket_e.connected = False
|
||||
ex.socket_e.parent = mt.shoulder_e
|
||||
ex.socket_e.length *= 0.5
|
||||
|
||||
# insert the 'DLT-hand', between the forearm and the hand
|
||||
# copies forarm rotation
|
||||
ex.hand_delta_e = copy_bone_simple(arm, fk_chain.hand, "DLT-%s" % base_names[mt.hand], parent=True)
|
||||
ex.hand_delta = ex.hand_delta_e.name
|
||||
ex.hand_delta_e.length *= 0.5
|
||||
ex.hand_delta_e.connected = False
|
||||
if "hand_roll" in options:
|
||||
ex.hand_delta_e.roll += radians(options["hand_roll"])
|
||||
|
||||
fk_chain.hand_e.connected = False
|
||||
fk_chain.hand_e.parent = ex.hand_delta_e
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
mt.update()
|
||||
ex.update()
|
||||
fk_chain.update()
|
||||
|
||||
# Set rotation modes and axis locks
|
||||
fk_chain.forearm_p.rotation_mode = 'XYZ'
|
||||
fk_chain.forearm_p.lock_rotation = (False, True, True)
|
||||
fk_chain.hand_p.rotation_mode = 'ZXY'
|
||||
|
||||
con = fk_chain.arm_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.socket
|
||||
|
||||
fk_chain.hand_p.lock_location = True, True, True
|
||||
con = ex.hand_delta_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = fk_chain.forearm
|
||||
|
||||
def hinge_setup():
|
||||
# Hinge constraint & driver
|
||||
con = fk_chain.shoulder_p.constraints.new('COPY_ROTATION')
|
||||
con.name = "hinge"
|
||||
con.target = obj
|
||||
con.subtarget = mt.shoulder
|
||||
driver_fcurve = con.driver_add("influence", 0)
|
||||
driver = driver_fcurve.driver
|
||||
|
||||
|
||||
controller_path = fk_chain.arm_p.path_to_id()
|
||||
# add custom prop
|
||||
fk_chain.arm_p["hinge"] = 0.0
|
||||
prop = rna_idprop_ui_prop_get(fk_chain.arm_p, "hinge", create=True)
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
|
||||
# *****
|
||||
driver = driver_fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
|
||||
tar = driver.targets.new()
|
||||
tar.name = "hinge"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = controller_path + '["hinge"]'
|
||||
|
||||
mod = driver_fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
hinge_setup()
|
||||
|
||||
|
||||
# last step setup layers
|
||||
layers = get_layer_dict(options)
|
||||
lay = layers["fk"]
|
||||
for attr in fk_chain.attr_names:
|
||||
getattr(fk_chain, attr + "_b").layer = lay
|
||||
|
||||
lay = layers["extra"]
|
||||
for attr in ex.attr_names:
|
||||
getattr(ex, attr + "_b").layer = lay
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
return None, fk_chain.arm, fk_chain.forearm, fk_chain.hand
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
bones_fk = fk(obj, bone_definition, base_names, options)
|
||||
bones_ik = ik(obj, bone_definition, base_names, options)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
blend_bone_list(obj, bone_definition, bones_fk, bones_ik, target_bone=bones_fk[1], blend_default=1.0)
|
||||
87
release/scripts/modules/rigify/copy.py
Normal file
87
release/scripts/modules/rigify/copy.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import get_layer_dict
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple
|
||||
|
||||
METARIG_NAMES = ("cpy",)
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('Bone')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
bone.roll = 0.0000
|
||||
bone.connected = False
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['Bone']
|
||||
pbone['type'] = 'copy'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
return [orig_bone_name]
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
arm = obj.data
|
||||
mt = bone_class_instance(obj, METARIG_NAMES)
|
||||
mt.cpy = bone_definition[0]
|
||||
mt.update()
|
||||
cp = bone_class_instance(obj, ["cpy"])
|
||||
cp.cpy_e = copy_bone_simple(arm, mt.cpy, base_names[mt.cpy], parent=True)
|
||||
cp.cpy = cp.cpy_e.name
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
cp.update()
|
||||
mt.update()
|
||||
|
||||
if not cp.cpy_b.connected:
|
||||
con = mt.cpy_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = cp.cpy
|
||||
|
||||
con = mt.cpy_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = cp.cpy
|
||||
|
||||
con = mt.cpy_p.constraints.new('COPY_SCALE')
|
||||
con.target = obj
|
||||
con.subtarget = cp.cpy
|
||||
|
||||
# Rotation mode and axis locks
|
||||
cp.cpy_p.rotation_mode = mt.cpy_p.rotation_mode
|
||||
cp.cpy_p.lock_location = tuple(mt.cpy_p.lock_location)
|
||||
cp.cpy_p.lock_rotations_4d = mt.cpy_p.lock_rotations_4d
|
||||
cp.cpy_p.lock_rotation = tuple(mt.cpy_p.lock_rotation)
|
||||
cp.cpy_p.lock_rotation_w = mt.cpy_p.lock_rotation_w
|
||||
cp.cpy_p.lock_scale = tuple(mt.cpy_p.lock_scale)
|
||||
|
||||
# setup layers last
|
||||
layers = get_layer_dict(options)
|
||||
cp.cpy_b.layer = layers["main"]
|
||||
|
||||
return [mt.cpy]
|
||||
162
release/scripts/modules/rigify/delta.py
Normal file
162
release/scripts/modules/rigify/delta.py
Normal file
@@ -0,0 +1,162 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
|
||||
# not used, defined for completeness
|
||||
METARIG_NAMES = tuple()
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('bonesker')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = -0.0000, 0.7382, 0.1895
|
||||
bone.roll = -0.0000
|
||||
bone.connected = False
|
||||
bone = arm.edit_bones.new('delta')
|
||||
bone.head[:] = -0.0497, 0.8414, 0.3530
|
||||
bone.tail[:] = -0.2511, 1.1588, 0.9653
|
||||
bone.roll = 2.6044
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['bonesker']
|
||||
bone = arm.edit_bones.new('boney')
|
||||
bone.head[:] = 0.7940, 2.5592, 0.4134
|
||||
bone.tail[:] = 0.7940, 3.3975, 0.4890
|
||||
bone.roll = 3.1416
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['delta']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['delta']
|
||||
pbone['type'] = 'delta'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the head, its parent is the body,
|
||||
# its only child the first of a chain with matching basenames.
|
||||
eg.
|
||||
body -> head -> neck_01 -> neck_02 -> neck_03.... etc
|
||||
'''
|
||||
arm = obj.data
|
||||
delta = arm.bones[orig_bone_name]
|
||||
children = delta.children
|
||||
|
||||
if len(children) != 1:
|
||||
raise RigifyError("only 1 child supported for delta on bone '%s'" % delta.name)
|
||||
|
||||
if delta.connected:
|
||||
raise RigifyError("bone cannot be connected to its parent '%s'" % delta.name)
|
||||
|
||||
bone_definition = [delta.name, children[0].name]
|
||||
|
||||
return bone_definition
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
'''
|
||||
Use this bone to define a delta thats applied to its child in pose mode.
|
||||
'''
|
||||
mode_orig = obj.mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
delta_name, child_name = bone_definition
|
||||
|
||||
delta_pbone = obj.pose.bones[delta_name]
|
||||
|
||||
arm = obj.data
|
||||
child_pbone = obj.pose.bones[child_name]
|
||||
|
||||
delta_phead = delta_pbone.head.copy()
|
||||
delta_ptail = delta_pbone.tail.copy()
|
||||
delta_pmatrix = delta_pbone.matrix.copy()
|
||||
|
||||
child_phead = child_pbone.head.copy()
|
||||
child_ptail = child_pbone.tail.copy()
|
||||
child_pmatrix = child_pbone.matrix.copy()
|
||||
|
||||
|
||||
children = delta_pbone.children
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
delta_ebone = arm.edit_bones[delta_name]
|
||||
child_ebone = arm.edit_bones[child_name]
|
||||
|
||||
delta_head = delta_ebone.head.copy()
|
||||
delta_tail = delta_ebone.tail.copy()
|
||||
|
||||
child_head = child_ebone.head.copy()
|
||||
child_tail = child_ebone.tail.copy()
|
||||
|
||||
#arm.edit_bones.remove(delta_ebone)
|
||||
#del delta_ebone # cant use this
|
||||
del child_pbone
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
# Move the child bone to the deltas location
|
||||
obj.animation_data_create()
|
||||
delta_pbone = obj.pose.bones[delta_name]
|
||||
# child_pbone = obj.pose.bones[child_name]
|
||||
|
||||
# ------------------- drivers
|
||||
|
||||
delta_pbone.rotation_mode = 'XYZ'
|
||||
|
||||
rot = delta_pmatrix.invert().rotationPart() * child_pmatrix.rotationPart()
|
||||
rot = rot.invert().toEuler()
|
||||
|
||||
fcurve_drivers = delta_pbone.driver_add("rotation_euler", -1)
|
||||
for i, fcurve_driver in enumerate(fcurve_drivers):
|
||||
driver = fcurve_driver.driver
|
||||
driver.type = 'AVERAGE'
|
||||
#mod = fcurve_driver.modifiers.new('GENERATOR')
|
||||
mod = fcurve_driver.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = rot[i]
|
||||
mod.coefficients[1] = 0.0
|
||||
|
||||
# tricky, find the transform to drive the bone to this location.
|
||||
delta_head_offset = child_pmatrix.rotationPart() * (delta_phead - child_phead)
|
||||
|
||||
fcurve_drivers = delta_pbone.driver_add("location", -1)
|
||||
for i, fcurve_driver in enumerate(fcurve_drivers):
|
||||
driver = fcurve_driver.driver
|
||||
driver.type = 'AVERAGE'
|
||||
#mod = fcurve_driver.modifiers.new('GENERATOR')
|
||||
mod = fcurve_driver.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = delta_head_offset[i]
|
||||
mod.coefficients[1] = 0.0
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
bpy.ops.object.mode_set(mode=mode_orig)
|
||||
|
||||
# no blendeing
|
||||
return None
|
||||
228
release/scripts/modules/rigify/finger_curl.py
Normal file
228
release/scripts/modules/rigify/finger_curl.py
Normal file
@@ -0,0 +1,228 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError, get_layer_dict
|
||||
from rigify_utils import copy_bone_simple, get_side_name
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
from functools import reduce
|
||||
|
||||
METARIG_NAMES = "finger_01", "finger_02", "finger_03"
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('finger.01')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0353, -0.0184, -0.0053
|
||||
bone.roll = -2.8722
|
||||
bone.connected = False
|
||||
bone = arm.edit_bones.new('finger.02')
|
||||
bone.head[:] = 0.0353, -0.0184, -0.0053
|
||||
bone.tail[:] = 0.0702, -0.0364, -0.0146
|
||||
bone.roll = -2.7099
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['finger.01']
|
||||
bone = arm.edit_bones.new('finger.03')
|
||||
bone.head[:] = 0.0702, -0.0364, -0.0146
|
||||
bone.tail[:] = 0.0903, -0.0461, -0.0298
|
||||
bone.roll = -2.1709
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['finger.02']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['finger.01']
|
||||
pbone['type'] = 'finger_curl'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the first in a chain
|
||||
Expects a chain of at least 2 children.
|
||||
eg.
|
||||
finger -> finger_01 -> finger_02
|
||||
'''
|
||||
|
||||
bone_definition = []
|
||||
|
||||
orig_bone = obj.data.bones[orig_bone_name]
|
||||
|
||||
bone_definition.append(orig_bone.name)
|
||||
|
||||
bone = orig_bone
|
||||
chain = 0
|
||||
while chain < 2: # first 2 bones only have 1 child
|
||||
children = bone.children
|
||||
|
||||
if len(children) != 1:
|
||||
raise RigifyError("expected the chain to have 2 children from bone '%s' without a fork" % orig_bone_name)
|
||||
bone = children[0]
|
||||
bone_definition.append(bone.name) # finger_02, finger_03
|
||||
chain += 1
|
||||
|
||||
if len(bone_definition) != len(METARIG_NAMES):
|
||||
raise RigifyError("internal problem, expected %d bones" % len(METARIG_NAMES))
|
||||
|
||||
return bone_definition
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
# *** EDITMODE
|
||||
|
||||
# get assosiated data
|
||||
arm = obj.data
|
||||
orig_ebone = arm.edit_bones[bone_definition[0]]
|
||||
|
||||
obj.animation_data_create() # needed if its a new armature with no keys
|
||||
|
||||
children = orig_ebone.children_recursive
|
||||
tot_len = reduce(lambda f, ebone: f + ebone.length, children, orig_ebone.length)
|
||||
|
||||
# FIXME, the line below is far too arbitrary
|
||||
base_name = base_names[bone_definition[0]].rsplit(".", 2)[0]
|
||||
|
||||
# first make a new bone at the location of the finger
|
||||
#control_ebone = arm.edit_bones.new(base_name)
|
||||
control_ebone = copy_bone_simple(arm, bone_definition[0], base_name + get_side_name(base_names[bone_definition[0]]), parent=True)
|
||||
control_bone_name = control_ebone.name # we dont know if we get the name requested
|
||||
|
||||
control_ebone.connected = orig_ebone.connected
|
||||
control_ebone.parent = orig_ebone.parent
|
||||
control_ebone.length = tot_len
|
||||
|
||||
# now add bones inbetween this and its children recursively
|
||||
|
||||
# switching modes so store names only!
|
||||
children = [ebone.name for ebone in children]
|
||||
|
||||
driver_bone_pairs = []
|
||||
|
||||
for child_bone_name in children:
|
||||
child_ebone = arm.edit_bones[child_bone_name]
|
||||
|
||||
# finger.02 --> finger_driver.02
|
||||
driver_bone_name = child_bone_name.split('.')
|
||||
driver_bone_name = driver_bone_name[0] + "_driver." + ".".join(driver_bone_name[1:])
|
||||
|
||||
driver_ebone = copy_bone_simple(arm, child_ebone.name, driver_bone_name)
|
||||
driver_ebone.length *= 0.5
|
||||
|
||||
# Insert driver_ebone in the chain without connected parents
|
||||
driver_ebone.connected = False
|
||||
driver_ebone.parent = child_ebone.parent
|
||||
|
||||
child_ebone.connected = False
|
||||
child_ebone.parent = driver_ebone
|
||||
|
||||
# Add the drivers to these when in posemode.
|
||||
driver_bone_pairs.append((child_bone_name, driver_bone_name))
|
||||
|
||||
del control_ebone
|
||||
|
||||
|
||||
# *** POSEMODE
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
orig_pbone = obj.pose.bones[bone_definition[0]]
|
||||
control_pbone = obj.pose.bones[control_bone_name]
|
||||
|
||||
|
||||
# only allow Y scale
|
||||
control_pbone.lock_scale = (True, False, True)
|
||||
|
||||
control_pbone["bend_ratio"] = 0.4
|
||||
prop = rna_idprop_ui_prop_get(control_pbone, "bend_ratio", create=True)
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
con = orig_pbone.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = control_bone_name
|
||||
|
||||
con = orig_pbone.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = control_bone_name
|
||||
|
||||
|
||||
|
||||
# setup child drivers on each new smaller bone added. assume 2 for now.
|
||||
|
||||
# drives the bones
|
||||
controller_path = control_pbone.path_to_id() # 'pose.bones["%s"]' % control_bone_name
|
||||
|
||||
i = 0
|
||||
for child_bone_name, driver_bone_name in driver_bone_pairs:
|
||||
|
||||
# XXX - todo, any number
|
||||
if i == 2:
|
||||
break
|
||||
|
||||
driver_pbone = obj.pose.bones[driver_bone_name]
|
||||
|
||||
driver_pbone.rotation_mode = 'YZX'
|
||||
fcurve_driver = driver_pbone.driver_add("rotation_euler", 0)
|
||||
|
||||
#obj.driver_add('pose.bones["%s"].scale', 1)
|
||||
#obj.animation_data.drivers[-1] # XXX, WATCH THIS
|
||||
driver = fcurve_driver.driver
|
||||
|
||||
# scale target
|
||||
tar = driver.targets.new()
|
||||
tar.name = "scale"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = controller_path + '.scale[1]'
|
||||
|
||||
# bend target
|
||||
tar = driver.targets.new()
|
||||
tar.name = "br"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = controller_path + '["bend_ratio"]'
|
||||
|
||||
# XXX - todo, any number
|
||||
if i == 0:
|
||||
driver.expression = '(-scale+1.0)*pi*2.0*(1.0-br)'
|
||||
elif i == 1:
|
||||
driver.expression = '(-scale+1.0)*pi*2.0*br'
|
||||
|
||||
child_pbone = obj.pose.bones[child_bone_name]
|
||||
|
||||
# only allow X rotation
|
||||
driver_pbone.lock_rotation = child_pbone.lock_rotation = (False, True, True)
|
||||
|
||||
i += 1
|
||||
|
||||
|
||||
# last step setup layers
|
||||
layers = get_layer_dict(options)
|
||||
lay = layers["extra"]
|
||||
for child_bone_name, driver_bone_name in driver_bone_pairs:
|
||||
arm.bones[driver_bone_name].layer = lay
|
||||
|
||||
lay = layers["main"]
|
||||
arm.bones[control_bone_name].layer = lay
|
||||
|
||||
# no blending the result of this
|
||||
return None
|
||||
374
release/scripts/modules/rigify/leg_biped_generic.py
Normal file
374
release/scripts/modules/rigify/leg_biped_generic.py
Normal file
@@ -0,0 +1,374 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError, get_layer_dict
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple, blend_bone_list, get_side_name, get_base_name
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
METARIG_NAMES = "hips", "thigh", "shin", "foot", "toe", "heel"
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('hips')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0000, 0.0000, 0.2506
|
||||
bone.roll = 0.0000
|
||||
bone.connected = False
|
||||
bone = arm.edit_bones.new('thigh')
|
||||
bone.head[:] = 0.1253, 0.0000, -0.0000
|
||||
bone.tail[:] = 0.0752, -0.0251, -0.4260
|
||||
bone.roll = 0.1171
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['hips']
|
||||
bone = arm.edit_bones.new('shin')
|
||||
bone.head[:] = 0.0752, -0.0251, -0.4260
|
||||
bone.tail[:] = 0.0752, 0.0000, -0.8771
|
||||
bone.roll = 0.0000
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['thigh']
|
||||
bone = arm.edit_bones.new('foot')
|
||||
bone.head[:] = 0.0752, 0.0000, -0.8771
|
||||
bone.tail[:] = 0.1013, -0.1481, -0.9773
|
||||
bone.roll = -0.4662
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['shin']
|
||||
bone = arm.edit_bones.new('toe')
|
||||
bone.head[:] = 0.1013, -0.1481, -0.9773
|
||||
bone.tail[:] = 0.1100, -0.2479, -0.9773
|
||||
bone.roll = 3.1416
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['foot']
|
||||
bone = arm.edit_bones.new('heel')
|
||||
bone.head[:] = 0.0652, 0.0501, -1.0024
|
||||
bone.tail[:] = 0.0927, -0.1002, -1.0024
|
||||
bone.roll = 0.0000
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['foot']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['thigh']
|
||||
pbone['type'] = 'leg_biped_generic'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the first in a chain
|
||||
Expects a chain of at least 3 children.
|
||||
eg.
|
||||
thigh -> shin -> foot -> [toe, heel]
|
||||
'''
|
||||
|
||||
bone_definition = []
|
||||
|
||||
orig_bone = obj.data.bones[orig_bone_name]
|
||||
orig_bone_parent = orig_bone.parent
|
||||
|
||||
if orig_bone_parent is None:
|
||||
raise RigifyError("expected the thigh bone to have a parent hip bone")
|
||||
|
||||
bone_definition.append(orig_bone_parent.name)
|
||||
bone_definition.append(orig_bone.name)
|
||||
|
||||
|
||||
bone = orig_bone
|
||||
chain = 0
|
||||
while chain < 2: # first 2 bones only have 1 child
|
||||
children = bone.children
|
||||
|
||||
if len(children) != 1:
|
||||
raise RigifyError("expected the thigh bone to have 3 children without a fork")
|
||||
bone = children[0]
|
||||
bone_definition.append(bone.name) # shin, foot
|
||||
chain += 1
|
||||
|
||||
children = bone.children
|
||||
# Now there must be 2 children, only one connected
|
||||
if len(children) != 2:
|
||||
raise RigifyError("expected the foot bone:'%s' to have 2 children" % bone.name)
|
||||
|
||||
if children[0].connected == children[1].connected:
|
||||
raise RigifyError("expected one bone to be connected")
|
||||
|
||||
toe, heel = children
|
||||
if heel.connected:
|
||||
toe, heel = heel, toe
|
||||
|
||||
|
||||
bone_definition.append(toe.name)
|
||||
bone_definition.append(heel.name)
|
||||
|
||||
if len(bone_definition) != len(METARIG_NAMES):
|
||||
raise RigifyError("internal problem, expected %d bones" % len(METARIG_NAMES))
|
||||
|
||||
return bone_definition
|
||||
|
||||
|
||||
def ik(obj, bone_definition, base_names, options):
|
||||
arm = obj.data
|
||||
|
||||
# setup the existing bones, use names from METARIG_NAMES
|
||||
mt_chain = bone_class_instance(obj, ["thigh", "shin", "foot", "toe"])
|
||||
mt = bone_class_instance(obj, ["hips", "heel"])
|
||||
|
||||
mt.attr_initialize(METARIG_NAMES, bone_definition)
|
||||
mt_chain.attr_initialize(METARIG_NAMES, bone_definition)
|
||||
|
||||
# children of ik_foot
|
||||
ik = bone_class_instance(obj, ["foot", "foot_roll", "foot_roll_01", "foot_roll_02", "knee_target"])
|
||||
|
||||
# Make a new chain
|
||||
ik_chain = mt_chain.copy(to_fmt="MCH-%s", base_names=base_names)
|
||||
|
||||
# simple rename
|
||||
ik_chain.rename("thigh", ik_chain.thigh + "_ik")
|
||||
ik_chain.rename("shin", ik_chain.shin + "_ik")
|
||||
|
||||
# make sure leg is child of hips
|
||||
ik_chain.thigh_e.parent = mt.hips_e
|
||||
|
||||
# ik foot: no parents
|
||||
base_foot_name = get_base_name(base_names[mt_chain.foot])
|
||||
ik.foot_e = copy_bone_simple(arm, mt_chain.foot, base_foot_name + "_ik" + get_side_name(base_names[mt_chain.foot]))
|
||||
ik.foot = ik.foot_e.name
|
||||
ik.foot_e.tail.z = ik.foot_e.head.z
|
||||
ik.foot_e.roll = 0.0
|
||||
ik.foot_e.local_location = False
|
||||
|
||||
# foot roll: heel pointing backwards, half length
|
||||
ik.foot_roll_e = copy_bone_simple(arm, mt.heel, base_foot_name + "_roll" + get_side_name(base_names[mt_chain.foot]))
|
||||
ik.foot_roll = ik.foot_roll_e.name
|
||||
ik.foot_roll_e.tail = ik.foot_roll_e.head + ik.foot_roll_e.vector / 2.0
|
||||
ik.foot_roll_e.parent = ik.foot_e # heel is disconnected
|
||||
|
||||
# heel pointing forwards to the toe base, parent of the following 2 bones
|
||||
ik.foot_roll_01_e = copy_bone_simple(arm, mt.heel, "MCH-%s_roll.01" % base_foot_name)
|
||||
ik.foot_roll_01 = ik.foot_roll_01_e.name
|
||||
ik.foot_roll_01_e.tail = mt_chain.foot_e.tail
|
||||
ik.foot_roll_01_e.parent = ik.foot_e # heel is disconnected
|
||||
|
||||
# same as above but reverse direction
|
||||
ik.foot_roll_02_e = copy_bone_simple(arm, mt.heel, "MCH-%s_roll.02" % base_foot_name)
|
||||
ik.foot_roll_02 = ik.foot_roll_02_e.name
|
||||
ik.foot_roll_02_e.parent = ik.foot_roll_01_e # heel is disconnected
|
||||
ik.foot_roll_02_e.head = mt_chain.foot_e.tail
|
||||
ik.foot_roll_02_e.tail = mt.heel_e.head
|
||||
|
||||
del base_foot_name
|
||||
|
||||
# rename 'MCH-toe' --> to 'toe_ik' and make the child of ik.foot_roll_01
|
||||
# ------------------ FK or IK?
|
||||
ik_chain.rename("toe", get_base_name(base_names[mt_chain.toe]) + "_ik" + get_side_name(base_names[mt_chain.toe]))
|
||||
ik_chain.toe_e.connected = False
|
||||
ik_chain.toe_e.parent = ik.foot_roll_01_e
|
||||
|
||||
# re-parent ik_chain.foot to the
|
||||
ik_chain.foot_e.connected = False
|
||||
ik_chain.foot_e.parent = ik.foot_roll_02_e
|
||||
|
||||
|
||||
# knee target is the heel moved up and forward on its local axis
|
||||
ik.knee_target_e = copy_bone_simple(arm, mt.heel, "knee_target")
|
||||
ik.knee_target = ik.knee_target_e.name
|
||||
offset = ik.knee_target_e.tail - ik.knee_target_e.head
|
||||
offset.z = 0
|
||||
offset.length = mt_chain.shin_e.head.z - mt.heel_e.head.z
|
||||
offset.z += offset.length
|
||||
ik.knee_target_e.translate(offset)
|
||||
ik.knee_target_e.length *= 0.5
|
||||
ik.knee_target_e.parent = ik.foot_e
|
||||
ik.knee_target_e.local_location = False
|
||||
|
||||
# roll the bone to point up... could also point in the same direction as ik.foot_roll
|
||||
# ik.foot_roll_02_e.matrix * Vector(0.0, 0.0, 1.0) # ACK!, no rest matrix in editmode
|
||||
ik.foot_roll_01_e.align_roll((0.0, 0.0, -1.0))
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
ik.update()
|
||||
mt_chain.update()
|
||||
ik_chain.update()
|
||||
|
||||
# Set IK dof
|
||||
ik_chain.shin_p.ik_dof_x = True
|
||||
ik_chain.shin_p.ik_dof_y = False
|
||||
ik_chain.shin_p.ik_dof_z = False
|
||||
|
||||
# Set rotation modes and axis locks
|
||||
ik.foot_roll_p.rotation_mode = 'XYZ'
|
||||
ik.foot_roll_p.lock_rotation = False, True, True
|
||||
ik_chain.toe_p.rotation_mode = 'YXZ'
|
||||
ik_chain.toe_p.lock_rotation = False, True, True
|
||||
|
||||
# IK
|
||||
con = ik_chain.shin_p.constraints.new('IK')
|
||||
con.chain_length = 2
|
||||
con.iterations = 500
|
||||
con.pole_angle = -90.0 # XXX - in deg!
|
||||
con.use_tail = True
|
||||
con.use_stretch = True
|
||||
con.use_target = True
|
||||
con.use_rotation = False
|
||||
con.weight = 1.0
|
||||
|
||||
con.target = obj
|
||||
con.subtarget = ik_chain.foot
|
||||
|
||||
con.pole_target = obj
|
||||
con.pole_subtarget = ik.knee_target
|
||||
|
||||
# foot roll
|
||||
cons = [ \
|
||||
(ik.foot_roll_01_p.constraints.new('COPY_ROTATION'), ik.foot_roll_01_p.constraints.new('LIMIT_ROTATION')), \
|
||||
(ik.foot_roll_02_p.constraints.new('COPY_ROTATION'), ik.foot_roll_02_p.constraints.new('LIMIT_ROTATION'))]
|
||||
|
||||
for con, con_l in cons:
|
||||
con.target = obj
|
||||
con.subtarget = ik.foot_roll
|
||||
con.use_x, con.use_y, con.use_z = True, False, False
|
||||
con.target_space = con.owner_space = 'LOCAL'
|
||||
|
||||
con = con_l
|
||||
con.use_limit_x, con.use_limit_y, con.use_limit_z = True, False, False
|
||||
con.owner_space = 'LOCAL'
|
||||
|
||||
if con_l is cons[-1][-1]:
|
||||
con.minimum_x = 0.0
|
||||
con.maximum_x = 180.0 # XXX -deg
|
||||
else:
|
||||
con.minimum_x = -180.0 # XXX -deg
|
||||
con.maximum_x = 0.0
|
||||
|
||||
|
||||
# last step setup layers
|
||||
layers = get_layer_dict(options)
|
||||
lay = layers["ik"]
|
||||
for attr in ik_chain.attr_names:
|
||||
getattr(ik_chain, attr + "_b").layer = lay
|
||||
for attr in ik.attr_names:
|
||||
getattr(ik, attr + "_b").layer = lay
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
return None, ik_chain.thigh, ik_chain.shin, ik_chain.foot, ik_chain.toe, None
|
||||
|
||||
|
||||
def fk(obj, bone_definition, base_names, options):
|
||||
from Mathutils import Vector
|
||||
arm = obj.data
|
||||
|
||||
# these account for all bones in METARIG_NAMES
|
||||
mt_chain = bone_class_instance(obj, ["thigh", "shin", "foot", "toe"])
|
||||
mt = bone_class_instance(obj, ["hips", "heel"])
|
||||
|
||||
# new bones
|
||||
ex = bone_class_instance(obj, ["thigh_socket", "thigh_hinge"])
|
||||
|
||||
for bone_class in (mt, mt_chain):
|
||||
for attr in bone_class.attr_names:
|
||||
i = METARIG_NAMES.index(attr)
|
||||
ebone = arm.edit_bones[bone_definition[i]]
|
||||
setattr(bone_class, attr, ebone.name)
|
||||
bone_class.update()
|
||||
|
||||
ex.thigh_socket_e = copy_bone_simple(arm, mt_chain.thigh, "MCH-%s_socket" % base_names[mt_chain.thigh], parent=True)
|
||||
ex.thigh_socket = ex.thigh_socket_e.name
|
||||
ex.thigh_socket_e.tail = ex.thigh_socket_e.head + Vector(0.0, 0.0, ex.thigh_socket_e.length / 4.0)
|
||||
|
||||
ex.thigh_hinge_e = copy_bone_simple(arm, mt.hips, "MCH-%s_hinge" % base_names[mt_chain.thigh], parent=False)
|
||||
ex.thigh_hinge = ex.thigh_hinge_e.name
|
||||
|
||||
fk_chain = mt_chain.copy(base_names=base_names) # fk has no prefix!
|
||||
|
||||
fk_chain.thigh_e.connected = False
|
||||
fk_chain.thigh_e.parent = ex.thigh_hinge_e
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
ex.update()
|
||||
mt_chain.update()
|
||||
fk_chain.update()
|
||||
|
||||
# Set rotation modes and axis locks
|
||||
fk_chain.shin_p.rotation_mode = 'XYZ'
|
||||
fk_chain.shin_p.lock_rotation = False, True, True
|
||||
fk_chain.foot_p.rotation_mode = 'YXZ'
|
||||
fk_chain.toe_p.rotation_mode = 'YXZ'
|
||||
fk_chain.toe_p.lock_rotation = False, True, True
|
||||
|
||||
con = fk_chain.thigh_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.thigh_socket
|
||||
|
||||
# hinge
|
||||
prop = rna_idprop_ui_prop_get(fk_chain.thigh_p, "hinge", create=True)
|
||||
fk_chain.thigh_p["hinge"] = 0.5
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
con = ex.thigh_hinge_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = mt.hips
|
||||
|
||||
# add driver
|
||||
hinge_driver_path = fk_chain.thigh_p.path_to_id() + '["hinge"]'
|
||||
|
||||
fcurve = con.driver_add("influence", 0)
|
||||
driver = fcurve.driver
|
||||
tar = driver.targets.new()
|
||||
driver.type = 'AVERAGE'
|
||||
tar.name = "var"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = hinge_driver_path
|
||||
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
|
||||
# last step setup layers
|
||||
layers = get_layer_dict(options)
|
||||
lay = layers["fk"]
|
||||
for attr in fk_chain.attr_names:
|
||||
getattr(fk_chain, attr + "_b").layer = lay
|
||||
|
||||
lay = layers["extra"]
|
||||
for attr in ex.attr_names:
|
||||
getattr(ex, attr + "_b").layer = lay
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# dont blend the hips or heel
|
||||
return None, fk_chain.thigh, fk_chain.shin, fk_chain.foot, fk_chain.toe, None
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
bones_fk = fk(obj, bone_definition, base_names, options)
|
||||
bones_ik = ik(obj, bone_definition, base_names, options)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
blend_bone_list(obj, bone_definition, bones_fk, bones_ik, target_bone=bones_ik[1], blend_default=0.0)
|
||||
225
release/scripts/modules/rigify/leg_quadruped_generic.py
Normal file
225
release/scripts/modules/rigify/leg_quadruped_generic.py
Normal file
@@ -0,0 +1,225 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError, get_layer_dict
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple, blend_bone_list, get_side_name, get_base_name, add_pole_target_bone
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
from Mathutils import Vector
|
||||
|
||||
METARIG_NAMES = "hips", "thigh", "shin", "foot", "toe"
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('body')
|
||||
bone.head[:] = -0.0728, -0.2427, 0.0000
|
||||
bone.tail[:] = -0.0728, -0.2427, 0.2427
|
||||
bone.roll = 0.0000
|
||||
bone.connected = False
|
||||
bone = arm.edit_bones.new('thigh')
|
||||
bone.head[:] = 0.0000, 0.0000, -0.0000
|
||||
bone.tail[:] = 0.0813, -0.2109, -0.3374
|
||||
bone.roll = -0.4656
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['body']
|
||||
bone = arm.edit_bones.new('shin')
|
||||
bone.head[:] = 0.0813, -0.2109, -0.3374
|
||||
bone.tail[:] = 0.0714, -0.0043, -0.5830
|
||||
bone.roll = -0.2024
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['thigh']
|
||||
bone = arm.edit_bones.new('foot')
|
||||
bone.head[:] = 0.0714, -0.0043, -0.5830
|
||||
bone.tail[:] = 0.0929, -0.0484, -0.7652
|
||||
bone.roll = -0.3766
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['shin']
|
||||
bone = arm.edit_bones.new('toe')
|
||||
bone.head[:] = 0.0929, -0.0484, -0.7652
|
||||
bone.tail[:] = 0.1146, -0.1244, -0.7652
|
||||
bone.roll = -0.0000
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['foot']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['thigh']
|
||||
pbone['type'] = 'leg_quadruped_generic'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the first in a chain
|
||||
Expects a chain of at least 3 children.
|
||||
eg.
|
||||
thigh -> shin -> foot -> [toe, heel]
|
||||
'''
|
||||
|
||||
bone_definition = []
|
||||
|
||||
orig_bone = obj.data.bones[orig_bone_name]
|
||||
orig_bone_parent = orig_bone.parent
|
||||
|
||||
if orig_bone_parent is None:
|
||||
raise RigifyError("expected the thigh bone to have a parent hip bone")
|
||||
|
||||
bone_definition.append(orig_bone_parent.name)
|
||||
bone_definition.append(orig_bone.name)
|
||||
|
||||
|
||||
bone = orig_bone
|
||||
chain = 0
|
||||
while chain < 3: # first 2 bones only have 1 child
|
||||
children = bone.children
|
||||
|
||||
if len(children) != 1:
|
||||
raise RigifyError("expected the thigh bone to have 3 children without a fork")
|
||||
bone = children[0]
|
||||
bone_definition.append(bone.name) # shin, foot
|
||||
chain += 1
|
||||
|
||||
if len(bone_definition) != len(METARIG_NAMES):
|
||||
raise RigifyError("internal problem, expected %d bones" % len(METARIG_NAMES))
|
||||
|
||||
return bone_definition
|
||||
|
||||
|
||||
def ik(obj, bone_definition, base_names, options):
|
||||
arm = obj.data
|
||||
|
||||
# setup the existing bones, use names from METARIG_NAMES
|
||||
mt = bone_class_instance(obj, ["hips"])
|
||||
mt_chain = bone_class_instance(obj, ["thigh", "shin", "foot", "toe"])
|
||||
|
||||
mt.attr_initialize(METARIG_NAMES, bone_definition)
|
||||
mt_chain.attr_initialize(METARIG_NAMES, bone_definition)
|
||||
|
||||
ik_chain = mt_chain.copy(to_fmt="%s", base_names=base_names)
|
||||
|
||||
ik_chain.thigh_e.connected = False
|
||||
ik_chain.thigh_e.parent = mt.hips_e
|
||||
|
||||
ik_chain.foot_e.parent = None
|
||||
ik_chain.rename("foot", ik_chain.foot + "_ik")
|
||||
|
||||
# keep the foot_ik as the parent
|
||||
ik_chain.toe_e.connected = False
|
||||
|
||||
# must be after disconnecting the toe
|
||||
ik_chain.foot_e.align_orientation(mt_chain.toe_e)
|
||||
|
||||
# children of ik_foot
|
||||
ik = bone_class_instance(obj, ["foot", "foot_roll", "foot_roll_01", "foot_roll_02", "knee_target", "foot_target"])
|
||||
|
||||
ik.knee_target = add_pole_target_bone(obj, mt_chain.shin, "knee_target") #XXX - pick a better name
|
||||
ik.update()
|
||||
ik.knee_target_e.parent = mt.hips_e
|
||||
|
||||
# foot roll is an interesting one!
|
||||
# plot a vector from the toe bones head, bactwards to the length of the foot
|
||||
# then align it with the foot but reverse direction.
|
||||
ik.foot_roll_e = copy_bone_simple(arm, mt_chain.toe, base_names[mt_chain.foot] + "_roll")
|
||||
ik.foot_roll = ik.foot_roll_e.name
|
||||
ik.foot_roll_e.parent = ik_chain.foot_e
|
||||
ik.foot_roll_e.translate(- (mt_chain.toe_e.vector.normalize() * mt_chain.foot_e.length))
|
||||
ik.foot_roll_e.align_orientation(mt_chain.foot_e)
|
||||
ik.foot_roll_e.tail = ik.foot_roll_e.head - ik.foot_roll_e.vector # flip
|
||||
ik.foot_roll_e.align_roll(mt_chain.foot_e.matrix.rotationPart() * Vector(0.0, 0.0, -1.0))
|
||||
|
||||
# MCH-foot
|
||||
ik.foot_roll_01_e = copy_bone_simple(arm, mt_chain.foot, "MCH-" + base_names[mt_chain.foot])
|
||||
ik.foot_roll_01 = ik.foot_roll_01_e.name
|
||||
ik.foot_roll_01_e.parent = ik_chain.foot_e
|
||||
ik.foot_roll_01_e.head, ik.foot_roll_01_e.tail = mt_chain.foot_e.tail, mt_chain.foot_e.head
|
||||
ik.foot_roll_01_e.roll = ik.foot_roll_e.roll
|
||||
|
||||
# ik_target, child of MCH-foot
|
||||
ik.foot_target_e = copy_bone_simple(arm, mt_chain.foot, base_names[mt_chain.foot] + "_ik_target")
|
||||
ik.foot_target = ik.foot_target_e.name
|
||||
ik.foot_target_e.parent = ik.foot_roll_01_e
|
||||
ik.foot_target_e.align_orientation(ik_chain.foot_e)
|
||||
ik.foot_target_e.length = ik_chain.foot_e.length / 2.0
|
||||
ik.foot_target_e.connected = True
|
||||
|
||||
# MCH-foot.02 child of MCH-foot
|
||||
ik.foot_roll_02_e = copy_bone_simple(arm, mt_chain.foot, "MCH-%s_02" % base_names[mt_chain.foot])
|
||||
ik.foot_roll_02 = ik.foot_roll_02_e.name
|
||||
ik.foot_roll_02_e.parent = ik.foot_roll_01_e
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
mt.update()
|
||||
mt_chain.update()
|
||||
ik.update()
|
||||
ik_chain.update()
|
||||
|
||||
# simple constraining of orig bones
|
||||
con = mt_chain.thigh_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ik_chain.thigh
|
||||
|
||||
con = mt_chain.shin_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ik_chain.shin
|
||||
|
||||
con = mt_chain.foot_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ik.foot_roll_02
|
||||
|
||||
con = mt_chain.toe_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ik_chain.toe
|
||||
|
||||
# others...
|
||||
con = ik.foot_roll_01_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ik.foot_roll
|
||||
|
||||
|
||||
# IK
|
||||
con = ik_chain.shin_p.constraints.new('IK')
|
||||
con.chain_length = 2
|
||||
con.iterations = 500
|
||||
con.pole_angle = -90.0 # XXX - in deg!
|
||||
con.use_tail = True
|
||||
con.use_stretch = True
|
||||
con.use_target = True
|
||||
con.use_rotation = False
|
||||
con.weight = 1.0
|
||||
|
||||
con.target = obj
|
||||
con.subtarget = ik.foot_target
|
||||
|
||||
con.pole_target = obj
|
||||
con.pole_subtarget = ik.knee_target
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
return None, ik_chain.thigh, ik_chain.shin, ik_chain.foot, ik_chain.toe
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
bones_ik = ik(obj, bone_definition, base_names, options)
|
||||
return bones_ik
|
||||
310
release/scripts/modules/rigify/neck_flex.py
Normal file
310
release/scripts/modules/rigify/neck_flex.py
Normal file
@@ -0,0 +1,310 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError, get_layer_dict
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
# not used, defined for completeness
|
||||
METARIG_NAMES = ("body", "head")
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('body')
|
||||
bone.head[:] = 0.0000, -0.0276, -0.1328
|
||||
bone.tail[:] = 0.0000, -0.0170, -0.0197
|
||||
bone.roll = 0.0000
|
||||
bone.connected = False
|
||||
bone = arm.edit_bones.new('head')
|
||||
bone.head[:] = 0.0000, -0.0170, -0.0197
|
||||
bone.tail[:] = 0.0000, 0.0726, 0.1354
|
||||
bone.roll = 0.0000
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['body']
|
||||
bone = arm.edit_bones.new('neck.01')
|
||||
bone.head[:] = 0.0000, -0.0170, -0.0197
|
||||
bone.tail[:] = 0.0000, -0.0099, 0.0146
|
||||
bone.roll = 0.0000
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['head']
|
||||
bone = arm.edit_bones.new('neck.02')
|
||||
bone.head[:] = 0.0000, -0.0099, 0.0146
|
||||
bone.tail[:] = 0.0000, -0.0242, 0.0514
|
||||
bone.roll = 0.0000
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['neck.01']
|
||||
bone = arm.edit_bones.new('neck.03')
|
||||
bone.head[:] = 0.0000, -0.0242, 0.0514
|
||||
bone.tail[:] = 0.0000, -0.0417, 0.0868
|
||||
bone.roll = 0.0000
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['neck.02']
|
||||
bone = arm.edit_bones.new('neck.04')
|
||||
bone.head[:] = 0.0000, -0.0417, 0.0868
|
||||
bone.tail[:] = 0.0000, -0.0509, 0.1190
|
||||
bone.roll = 0.0000
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['neck.03']
|
||||
bone = arm.edit_bones.new('neck.05')
|
||||
bone.head[:] = 0.0000, -0.0509, 0.1190
|
||||
bone.tail[:] = 0.0000, -0.0537, 0.1600
|
||||
bone.roll = 0.0000
|
||||
bone.connected = True
|
||||
bone.parent = arm.edit_bones['neck.04']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['head']
|
||||
pbone['type'] = 'neck_flex'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the head, its parent is the body,
|
||||
# its only child the first of a chain with matching basenames.
|
||||
eg.
|
||||
body -> head -> neck_01 -> neck_02 -> neck_03.... etc
|
||||
'''
|
||||
arm = obj.data
|
||||
head = arm.bones[orig_bone_name]
|
||||
body = head.parent
|
||||
|
||||
children = head.children
|
||||
if len(children) != 1:
|
||||
raise RigifyError("expected the head bone '%s' to have only 1 child." % orig_bone_name)
|
||||
|
||||
child = children[0]
|
||||
bone_definition = [body.name, head.name, child.name]
|
||||
bone_definition.extend([child.name for child in child.children_recursive_basename])
|
||||
return bone_definition
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
from Mathutils import Vector
|
||||
|
||||
arm = obj.data
|
||||
|
||||
# Initialize container classes for convenience
|
||||
mt = bone_class_instance(obj, ["body", "head"]) # meta
|
||||
mt.body = bone_definition[0]
|
||||
mt.head = bone_definition[1]
|
||||
mt.update()
|
||||
|
||||
neck_chain = bone_definition[2:]
|
||||
|
||||
mt_chain = bone_class_instance(obj, [("neck_%.2d" % (i + 1)) for i in range(len(neck_chain))]) # 99 bones enough eh?
|
||||
for i, attr in enumerate(mt_chain.attr_names):
|
||||
setattr(mt_chain, attr, neck_chain[i])
|
||||
mt_chain.update()
|
||||
|
||||
neck_chain_basename = base_names[mt_chain.neck_01_e.name].split(".")[0]
|
||||
neck_chain_segment_length = mt_chain.neck_01_e.length
|
||||
|
||||
ex = bone_class_instance(obj, ["head", "head_hinge", "neck_socket", "head_ctrl"]) # hinge & extras
|
||||
|
||||
# Add the head hinge at the bodys location, becomes the parent of the original head
|
||||
|
||||
# apply everything to this copy of the chain
|
||||
ex_chain = mt_chain.copy(base_names=base_names)
|
||||
ex_chain.neck_01_e.parent = mt_chain.neck_01_e.parent
|
||||
|
||||
|
||||
# Copy the head bone and offset
|
||||
ex.head_e = copy_bone_simple(arm, mt.head, "MCH_%s" % base_names[mt.head], parent=True)
|
||||
ex.head_e.connected = False
|
||||
ex.head = ex.head_e.name
|
||||
# offset
|
||||
head_length = ex.head_e.length
|
||||
ex.head_e.head.y += head_length / 2.0
|
||||
ex.head_e.tail.y += head_length / 2.0
|
||||
|
||||
# Yes, use the body bone but call it a head hinge
|
||||
ex.head_hinge_e = copy_bone_simple(arm, mt.body, "MCH_%s_hinge" % base_names[mt.head], parent=False)
|
||||
ex.head_hinge_e.connected = False
|
||||
ex.head_hinge = ex.head_hinge_e.name
|
||||
ex.head_hinge_e.head.y += head_length / 4.0
|
||||
ex.head_hinge_e.tail.y += head_length / 4.0
|
||||
|
||||
# Insert the neck socket, the head copys this loation
|
||||
ex.neck_socket_e = arm.edit_bones.new("MCH-%s_socked" % neck_chain_basename)
|
||||
ex.neck_socket = ex.neck_socket_e.name
|
||||
ex.neck_socket_e.connected = False
|
||||
ex.neck_socket_e.parent = mt.body_e
|
||||
ex.neck_socket_e.head = mt.head_e.head
|
||||
ex.neck_socket_e.tail = mt.head_e.head - Vector(0.0, neck_chain_segment_length / 2.0, 0.0)
|
||||
ex.neck_socket_e.roll = 0.0
|
||||
|
||||
|
||||
# copy of the head for controling
|
||||
ex.head_ctrl_e = copy_bone_simple(arm, mt.head, base_names[mt.head])
|
||||
ex.head_ctrl = ex.head_ctrl_e.name
|
||||
ex.head_ctrl_e.parent = ex.head_hinge_e
|
||||
|
||||
for i, attr in enumerate(ex_chain.attr_names):
|
||||
neck_e = getattr(ex_chain, attr + "_e")
|
||||
|
||||
# dont store parent names, re-reference as each chain bones parent.
|
||||
neck_e_parent = arm.edit_bones.new("MCH-rot_%s" % base_names[getattr(mt_chain, attr)])
|
||||
neck_e_parent.head = neck_e.head
|
||||
neck_e_parent.tail = neck_e.head + (mt.head_e.vector.normalize() * neck_chain_segment_length / 2.0)
|
||||
neck_e_parent.roll = mt.head_e.roll
|
||||
|
||||
orig_parent = neck_e.parent
|
||||
neck_e.connected = False
|
||||
neck_e.parent = neck_e_parent
|
||||
neck_e_parent.connected = False
|
||||
|
||||
if i == 0:
|
||||
neck_e_parent.parent = mt.body_e
|
||||
else:
|
||||
neck_e_parent.parent = orig_parent
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
mt.update()
|
||||
mt_chain.update()
|
||||
ex_chain.update()
|
||||
ex.update()
|
||||
|
||||
# Simple one off constraints, no drivers
|
||||
con = ex.head_ctrl_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.neck_socket
|
||||
|
||||
con = ex.head_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.head_ctrl
|
||||
|
||||
# driven hinge
|
||||
prop = rna_idprop_ui_prop_get(ex.head_ctrl_p, "hinge", create=True)
|
||||
ex.head_ctrl_p["hinge"] = 0.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
con = ex.head_hinge_p.constraints.new('COPY_ROTATION')
|
||||
con.name = "hinge"
|
||||
con.target = obj
|
||||
con.subtarget = mt.body
|
||||
|
||||
# add driver
|
||||
hinge_driver_path = ex.head_ctrl_p.path_to_id() + '["hinge"]'
|
||||
|
||||
fcurve = con.driver_add("influence", 0)
|
||||
driver = fcurve.driver
|
||||
tar = driver.targets.new()
|
||||
driver.type = 'AVERAGE'
|
||||
tar.name = "var"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = hinge_driver_path
|
||||
|
||||
#mod = fcurve_driver.modifiers.new('GENERATOR')
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
head_driver_path = ex.head_ctrl_p.path_to_id()
|
||||
|
||||
target_names = [("b%.2d" % (i + 1)) for i in range(len(neck_chain))]
|
||||
|
||||
ex.head_ctrl_p["bend_tot"] = 0.0
|
||||
fcurve = ex.head_ctrl_p.driver_add('["bend_tot"]', 0)
|
||||
driver = fcurve.driver
|
||||
driver.type = 'SUM'
|
||||
fcurve.modifiers.remove(0) # grr dont need a modifier
|
||||
|
||||
for i in range(len(neck_chain)):
|
||||
tar = driver.targets.new()
|
||||
tar.name = target_names[i]
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = head_driver_path + ('["bend_%.2d"]' % (i + 1))
|
||||
|
||||
|
||||
for i, attr in enumerate(ex_chain.attr_names):
|
||||
neck_p = getattr(ex_chain, attr + "_p")
|
||||
neck_p.lock_location = True, True, True
|
||||
neck_p.lock_location = True, True, True
|
||||
neck_p.lock_rotations_4d = True
|
||||
|
||||
# Add bend prop
|
||||
prop_name = "bend_%.2d" % (i + 1)
|
||||
prop = rna_idprop_ui_prop_get(ex.head_ctrl_p, prop_name, create=True)
|
||||
ex.head_ctrl_p[prop_name] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
# add parent constraint
|
||||
neck_p_parent = neck_p.parent
|
||||
|
||||
# add constraint
|
||||
con = neck_p_parent.constraints.new('COPY_ROTATION')
|
||||
con.name = "Copy Rotation"
|
||||
con.target = obj
|
||||
con.subtarget = ex.head
|
||||
con.owner_space = 'LOCAL'
|
||||
con.target_space = 'LOCAL'
|
||||
|
||||
fcurve = con.driver_add("influence", 0)
|
||||
driver = fcurve.driver
|
||||
driver.type = 'SCRIPTED'
|
||||
driver.expression = "bend/bend_tot"
|
||||
|
||||
fcurve.modifiers.remove(0) # grr dont need a modifier
|
||||
|
||||
|
||||
# add target
|
||||
tar = driver.targets.new()
|
||||
tar.name = "bend_tot"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = head_driver_path + ('["bend_tot"]')
|
||||
|
||||
tar = driver.targets.new()
|
||||
tar.name = "bend"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = head_driver_path + ('["%s"]' % prop_name)
|
||||
|
||||
|
||||
# finally constrain the original bone to this one
|
||||
orig_neck_p = getattr(mt_chain, attr + "_p")
|
||||
con = orig_neck_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = neck_p.name
|
||||
|
||||
|
||||
# last step setup layers
|
||||
layers = get_layer_dict(options)
|
||||
lay = layers["extra"]
|
||||
for attr in ex_chain.attr_names:
|
||||
getattr(ex_chain, attr + "_b").layer = lay
|
||||
for attr in ex.attr_names:
|
||||
getattr(ex, attr + "_b").layer = lay
|
||||
|
||||
|
||||
# no blending the result of this
|
||||
return None
|
||||
243
release/scripts/modules/rigify/palm_curl.py
Normal file
243
release/scripts/modules/rigify/palm_curl.py
Normal file
@@ -0,0 +1,243 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import get_layer_dict
|
||||
from rigify_utils import copy_bone_simple, get_side_name
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
# not used, defined for completeness
|
||||
METARIG_NAMES = tuple()
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('hand')
|
||||
bone.head[:] = 0.0004, -0.0629, 0.0000
|
||||
bone.tail[:] = 0.0021, -0.0209, 0.0000
|
||||
bone.roll = 0.0000
|
||||
bone.connected = False
|
||||
bone = arm.edit_bones.new('palm.03')
|
||||
bone.head[:] = -0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0025, 0.0644, -0.0065
|
||||
bone.roll = -3.1396
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
bone = arm.edit_bones.new('palm.02')
|
||||
bone.head[:] = 0.0252, -0.0000, 0.0000
|
||||
bone.tail[:] = 0.0324, 0.0627, -0.0065
|
||||
bone.roll = -3.1357
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
bone = arm.edit_bones.new('palm.01')
|
||||
bone.head[:] = 0.0504, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0703, 0.0508, -0.0065
|
||||
bone.roll = -3.1190
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
bone = arm.edit_bones.new('palm.04')
|
||||
bone.head[:] = -0.0252, 0.0000, 0.0000
|
||||
bone.tail[:] = -0.0286, 0.0606, -0.0065
|
||||
bone.roll = 3.1386
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
bone = arm.edit_bones.new('palm.05')
|
||||
bone.head[:] = -0.0504, 0.0000, 0.0000
|
||||
bone.tail[:] = -0.0669, 0.0534, -0.0065
|
||||
bone.roll = 3.1239
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
bone = arm.edit_bones.new('thumb')
|
||||
bone.head[:] = 0.0682, -0.0148, 0.0000
|
||||
bone.tail[:] = 0.1063, 0.0242, -0.0065
|
||||
bone.roll = -3.0929
|
||||
bone.connected = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['palm.01']
|
||||
pbone['type'] = 'palm_curl'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the first in an array of siblings with a matching basename
|
||||
sorted with pointer first, little finger last.
|
||||
eg.
|
||||
[pointer, middle, ring, pinky... ] # any number of fingers
|
||||
'''
|
||||
arm = obj.data
|
||||
|
||||
palm_bone = arm.bones[orig_bone_name]
|
||||
palm_parent = palm_bone.parent
|
||||
palm_base = palm_bone.basename
|
||||
bone_definition = [bone.name for bone in palm_parent.children if bone.basename == palm_base]
|
||||
bone_definition.sort()
|
||||
bone_definition.reverse()
|
||||
|
||||
return [palm_parent.name] + bone_definition
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
arm = obj.data
|
||||
|
||||
children = bone_definition[1:]
|
||||
|
||||
# Make a copy of the pinky
|
||||
# simply assume the pinky has the lowest name
|
||||
pinky_ebone = arm.edit_bones[children[0]]
|
||||
ring_ebone = arm.edit_bones[children[1]]
|
||||
|
||||
# FIXME, why split the second one?
|
||||
base_name = base_names[pinky_ebone.name].rsplit('.', 2)[0]
|
||||
|
||||
control_ebone = copy_bone_simple(arm, pinky_ebone.name, base_name + get_side_name(base_names[pinky_ebone.name]), parent=True)
|
||||
control_name = control_ebone.name
|
||||
|
||||
offset = (pinky_ebone.head - ring_ebone.head)
|
||||
|
||||
control_ebone.translate(offset)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
arm = obj.data
|
||||
control_pbone = obj.pose.bones[control_name]
|
||||
pinky_pbone = obj.pose.bones[children[0]]
|
||||
|
||||
control_pbone.rotation_mode = 'YZX'
|
||||
control_pbone.lock_rotation = False, True, True
|
||||
|
||||
driver_fcurves = pinky_pbone.driver_add("rotation_euler")
|
||||
|
||||
|
||||
controller_path = control_pbone.path_to_id()
|
||||
|
||||
# add custom prop
|
||||
control_pbone["spread"] = 0.0
|
||||
prop = rna_idprop_ui_prop_get(control_pbone, "spread", create=True)
|
||||
prop["soft_min"] = -1.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
|
||||
# *****
|
||||
driver = driver_fcurves[0].driver
|
||||
driver.type = 'AVERAGE'
|
||||
|
||||
tar = driver.targets.new()
|
||||
tar.name = "x"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = controller_path + ".rotation_euler[0]"
|
||||
|
||||
|
||||
# *****
|
||||
driver = driver_fcurves[1].driver
|
||||
driver.expression = "-x/4.0"
|
||||
|
||||
tar = driver.targets.new()
|
||||
tar.name = "x"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = controller_path + ".rotation_euler[0]"
|
||||
|
||||
|
||||
# *****
|
||||
driver = driver_fcurves[2].driver
|
||||
driver.expression = "(1.0-cos(x))-s"
|
||||
|
||||
for fcurve in driver_fcurves:
|
||||
fcurve.modifiers.remove(0) # grr dont need a modifier
|
||||
|
||||
tar = driver.targets.new()
|
||||
tar.name = "x"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = controller_path + ".rotation_euler[0]"
|
||||
|
||||
tar = driver.targets.new()
|
||||
tar.name = "s"
|
||||
tar.id_type = 'OBJECT'
|
||||
tar.id = obj
|
||||
tar.data_path = controller_path + '["spread"]'
|
||||
|
||||
|
||||
for i, child_name in enumerate(children):
|
||||
child_pbone = obj.pose.bones[child_name]
|
||||
child_pbone.rotation_mode = 'YZX'
|
||||
|
||||
if child_name != children[-1] and child_name != children[0]:
|
||||
|
||||
# this is somewhat arbitrary but seems to look good
|
||||
inf = i / (len(children) + 1)
|
||||
inf = 1.0 - inf
|
||||
inf = ((inf * inf) + inf) / 2.0
|
||||
|
||||
# used for X/Y constraint
|
||||
inf_minor = inf * inf
|
||||
|
||||
con = child_pbone.constraints.new('COPY_ROTATION')
|
||||
con.name = "Copy Z Rot"
|
||||
con.target = obj
|
||||
con.subtarget = children[0] # also pinky_pbone
|
||||
con.owner_space = con.target_space = 'LOCAL'
|
||||
con.use_x, con.use_y, con.use_z = False, False, True
|
||||
con.influence = inf
|
||||
|
||||
con = child_pbone.constraints.new('COPY_ROTATION')
|
||||
con.name = "Copy XY Rot"
|
||||
con.target = obj
|
||||
con.subtarget = children[0] # also pinky_pbone
|
||||
con.owner_space = con.target_space = 'LOCAL'
|
||||
con.use_x, con.use_y, con.use_z = True, True, False
|
||||
con.influence = inf_minor
|
||||
|
||||
|
||||
child_pbone = obj.pose.bones[children[-1]]
|
||||
child_pbone.rotation_mode = 'QUATERNION'
|
||||
|
||||
# fix at the end since there is some trouble with tx info not being updated otherwise
|
||||
def x_direction():
|
||||
# NOTE: the direction of the Z rotation depends on which side the palm is on.
|
||||
# we could do a simple side-of-x test but better to work out the direction
|
||||
# the hand is facing.
|
||||
from Mathutils import Vector, AngleBetweenVecs
|
||||
from math import degrees
|
||||
child_pbone_01 = obj.pose.bones[children[0]].bone
|
||||
child_pbone_02 = obj.pose.bones[children[1]].bone
|
||||
|
||||
rel_vec = child_pbone_01.head - child_pbone_02.head
|
||||
x_vec = child_pbone_01.matrix.rotationPart() * Vector(1.0, 0.0, 0.0)
|
||||
|
||||
return degrees(AngleBetweenVecs(rel_vec, x_vec)) > 90.0
|
||||
|
||||
if x_direction(): # flip
|
||||
driver.expression = "-(%s)" % driver.expression
|
||||
|
||||
|
||||
# last step setup layers
|
||||
layers = get_layer_dict(options)
|
||||
arm.bones[control_name].layer = layers["extra"]
|
||||
|
||||
|
||||
# no blending the result of this
|
||||
return None
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user