Merged changes in the trunk up to revision 34010.
This commit is contained in:
@@ -690,7 +690,7 @@ elseif(WIN32)
|
|||||||
endif()
|
endif()
|
||||||
|
|
||||||
# used in many places so include globally, like OpenGL
|
# used in many places so include globally, like OpenGL
|
||||||
blender_include_dirs(${PTHREADS_INC})
|
blender_include_dirs("${PTHREADS_INC}")
|
||||||
|
|
||||||
elseif(APPLE)
|
elseif(APPLE)
|
||||||
|
|
||||||
@@ -945,7 +945,7 @@ endif()
|
|||||||
#-----------------------------------------------------------------------------
|
#-----------------------------------------------------------------------------
|
||||||
# Configure OpenGL.
|
# Configure OpenGL.
|
||||||
find_package(OpenGL)
|
find_package(OpenGL)
|
||||||
blender_include_dirs(${OPENGL_INCLUDE_DIR})
|
blender_include_dirs("${OPENGL_INCLUDE_DIR}")
|
||||||
# unset(OPENGL_LIBRARIES CACHE) # not compat with older cmake
|
# unset(OPENGL_LIBRARIES CACHE) # not compat with older cmake
|
||||||
# unset(OPENGL_xmesa_INCLUDE_DIR CACHE) # not compat with older cmake
|
# unset(OPENGL_xmesa_INCLUDE_DIR CACHE) # not compat with older cmake
|
||||||
|
|
||||||
|
|||||||
@@ -412,7 +412,7 @@ if env['WITH_BF_PLAYER']:
|
|||||||
playerlist += B.create_blender_liblist(env, 'player2')
|
playerlist += B.create_blender_liblist(env, 'player2')
|
||||||
playerlist += B.create_blender_liblist(env, 'intern')
|
playerlist += B.create_blender_liblist(env, 'intern')
|
||||||
playerlist += B.create_blender_liblist(env, 'extern')
|
playerlist += B.create_blender_liblist(env, 'extern')
|
||||||
env.BlenderProg(B.root_build_dir, "blenderplayer", playerlist, thestatlibs + dobj + thesyslibs, [B.root_build_dir+'/lib'] + thelibincs, 'blenderplayer')
|
env.BlenderProg(B.root_build_dir, "blenderplayer", playerlist + thestatlibs + dobj, thesyslibs, [B.root_build_dir+'/lib'] + thelibincs, 'blenderplayer')
|
||||||
|
|
||||||
##### Now define some targets
|
##### Now define some targets
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
.TH "BLENDER" "1" "November 09, 2010" "Blender Blender 2\&.55 (sub 0) "
|
.TH "BLENDER" "1" "January 01, 2011" "Blender Blender 2\&.56 (sub 0) "
|
||||||
|
|
||||||
.SH NAME
|
.SH NAME
|
||||||
blender \- a 3D modelling and rendering package
|
blender \- a 3D modelling and rendering package
|
||||||
@@ -15,7 +15,7 @@ Use Blender to create TV commercials, to make technical visualizations, business
|
|||||||
http://www.blender.org
|
http://www.blender.org
|
||||||
.SH OPTIONS
|
.SH OPTIONS
|
||||||
|
|
||||||
Blender 2.55 (sub 0) Build
|
Blender 2.56 (sub 0) Build
|
||||||
Usage: blender [args ...] [file] [args ...]
|
Usage: blender [args ...] [file] [args ...]
|
||||||
.br
|
.br
|
||||||
.SS "Render Options:"
|
.SS "Render Options:"
|
||||||
|
|||||||
@@ -1,189 +0,0 @@
|
|||||||
# Blender.Geometry module and its subtypes
|
|
||||||
|
|
||||||
"""
|
|
||||||
The Blender.Geometry submodule.
|
|
||||||
|
|
||||||
Geometry
|
|
||||||
========
|
|
||||||
(when accessing it from the Game Engine use Geometry instead of Blender.Geometry)
|
|
||||||
|
|
||||||
This new module provides access to a geometry function.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def Intersect(vec1, vec2, vec3, ray, orig, clip=1):
|
|
||||||
"""
|
|
||||||
Return the intersection between a ray and a triangle, if possible, return None otherwise.
|
|
||||||
@type vec1: Vector object.
|
|
||||||
@param vec1: A 3d vector, one corner of the triangle.
|
|
||||||
@type vec2: Vector object.
|
|
||||||
@param vec2: A 3d vector, one corner of the triangle.
|
|
||||||
@type vec3: Vector object.
|
|
||||||
@param vec3: A 3d vector, one corner of the triangle.
|
|
||||||
@type ray: Vector object.
|
|
||||||
@param ray: A 3d vector, the orientation of the ray. the length of the ray is not used, only the direction.
|
|
||||||
@type orig: Vector object.
|
|
||||||
@param orig: A 3d vector, the origin of the ray.
|
|
||||||
@type clip: integer
|
|
||||||
@param clip: if 0, don't restrict the intersection to the area of the triangle, use the infinite plane defined by the triangle.
|
|
||||||
@rtype: Vector object
|
|
||||||
@return: The intersection between a ray and a triangle, if possible, None otherwise.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def TriangleArea(vec1, vec2, vec3):
|
|
||||||
"""
|
|
||||||
Return the area size of the 2D or 3D triangle defined.
|
|
||||||
@type vec1: Vector object.
|
|
||||||
@param vec1: A 2d or 3d vector, one corner of the triangle.
|
|
||||||
@type vec2: Vector object.
|
|
||||||
@param vec2: A 2d or 3d vector, one corner of the triangle.
|
|
||||||
@type vec3: Vector object.
|
|
||||||
@param vec3: A 2d or 3d vector, one corner of the triangle.
|
|
||||||
@rtype: float
|
|
||||||
@return: The area size of the 2D or 3D triangle defined.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def TriangleNormal(vec1, vec2, vec3):
|
|
||||||
"""
|
|
||||||
Return the normal of the 3D triangle defined.
|
|
||||||
@type vec1: Vector object.
|
|
||||||
@param vec1: A 3d vector, one corner of the triangle.
|
|
||||||
@type vec2: Vector object.
|
|
||||||
@param vec2: A 3d vector, one corner of the triangle.
|
|
||||||
@type vec3: Vector object.
|
|
||||||
@param vec3: A 3d vector, one corner of the triangle.
|
|
||||||
@rtype: float
|
|
||||||
@return: The normal of the 3D triangle defined.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def QuadNormal(vec1, vec2, vec3, vec4):
|
|
||||||
"""
|
|
||||||
Return the normal of the 3D quad defined.
|
|
||||||
@type vec1: Vector object.
|
|
||||||
@param vec1: A 3d vector, the first vertex of the quad.
|
|
||||||
@type vec2: Vector object.
|
|
||||||
@param vec2: A 3d vector, the second vertex of the quad.
|
|
||||||
@type vec3: Vector object.
|
|
||||||
@param vec3: A 3d vector, the third vertex of the quad.
|
|
||||||
@type vec4: Vector object.
|
|
||||||
@param vec4: A 3d vector, the fourth vertex of the quad.
|
|
||||||
@rtype: float
|
|
||||||
@return: The normal of the 3D quad defined.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def LineIntersect(vec1, vec2, vec3, vec4):
|
|
||||||
"""
|
|
||||||
Return a tuple with the points on each line respectively closest to the other
|
|
||||||
(when both lines intersect, both vector hold the same value).
|
|
||||||
The lines are evaluated as infinite lines in space, the values returned may not be between the 2 points given for each line.
|
|
||||||
@type vec1: Vector object.
|
|
||||||
@param vec1: A 3d vector, one point on the first line.
|
|
||||||
@type vec2: Vector object.
|
|
||||||
@param vec2: A 3d vector, another point on the first line.
|
|
||||||
@type vec3: Vector object.
|
|
||||||
@param vec3: A 3d vector, one point on the second line.
|
|
||||||
@type vec4: Vector object.
|
|
||||||
@param vec4: A 3d vector, another point on the second line.
|
|
||||||
@rtype: (Vector object, Vector object)
|
|
||||||
@return: A tuple with the points on each line respectively closest to the other.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def PolyFill(polylines):
|
|
||||||
"""
|
|
||||||
Takes a list of polylines and calculates triangles that would fill in the polylines.
|
|
||||||
Multiple lines can be used to make holes inside a polyline, or fill in 2 separate lines at once.
|
|
||||||
@type polylines: List of lists containing vectors, each representing a closed polyline.
|
|
||||||
@rtype: list
|
|
||||||
@return: a list if tuples each a tuple of 3 ints representing a triangle indexing the points given.
|
|
||||||
@note: 2D Vectors will have an assumed Z axis of zero, 4D Vectors W axis is ignored.
|
|
||||||
@note: The order of points in a polyline effect the direction returned triangles face, reverse the order of a polyline to flip the normal of returned faces.
|
|
||||||
|
|
||||||
I{B{Example:}}
|
|
||||||
|
|
||||||
The example below creates 2 polylines and fills them in with faces, then makes a mesh in the current scene::
|
|
||||||
import Blender
|
|
||||||
Vector= Blender.mathutils.Vector
|
|
||||||
|
|
||||||
# Outline of 5 points
|
|
||||||
polyline1= [Vector(-2.0, 1.0, 1.0), Vector(-1.0, 2.0, 1.0), Vector(1.0, 2.0, 1.0), Vector(1.0, -1.0, 1.0), Vector(-1.0, -1.0, 1.0)]
|
|
||||||
polyline2= [Vector(-1, 1, 1.0), Vector(0, 1, 1.0), Vector(0, 0, 1.0), Vector(-1.0, 0.0, 1.0)]
|
|
||||||
fill= Blender.Geometry.PolyFill([polyline1, polyline2])
|
|
||||||
|
|
||||||
# Make a new mesh and add the truangles into it
|
|
||||||
me= Blender.Mesh.New()
|
|
||||||
me.verts.extend(polyline1)
|
|
||||||
me.verts.extend(polyline2)
|
|
||||||
me.faces.extend(fill) # Add the faces, they reference the verts in polyline 1 and 2
|
|
||||||
|
|
||||||
scn = Blender.Scene.GetCurrent()
|
|
||||||
ob = scn.objects.new(me)
|
|
||||||
Blender.Redraw()
|
|
||||||
"""
|
|
||||||
|
|
||||||
def LineIntersect2D(vec1, vec2, vec3, vec4):
|
|
||||||
"""
|
|
||||||
Takes 2 lines vec1, vec2 for the 2 points of the first line and vec2, vec3 for the 2 points of the second line.
|
|
||||||
@rtype: Vector
|
|
||||||
@return: a 2D Vector for the intersection or None where there is no intersection.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def ClosestPointOnLine(pt, vec1, vec2):
|
|
||||||
"""
|
|
||||||
Takes 2 lines vec1, vec2 for the 2 points of the first line and vec2, vec3 for the 2 points of the second line.
|
|
||||||
@rtype: tuple
|
|
||||||
@return: a tuple containing a vector and a float, the vector is the closest point on the line, the float is the position on the line, between 0 and 1 the point is on the line.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def PointInTriangle2D(pt, tri_pt1, tri_pt2, tri_pt3):
|
|
||||||
"""
|
|
||||||
Takes 4 vectors (one for the test point and 3 for the triangle)
|
|
||||||
This is a 2d function so only X and Y are used, Z and W will be ignored.
|
|
||||||
@rtype: int
|
|
||||||
@return: 1 for a clockwise intersection, -1 for counter clockwise intersection, 0 when there is no intersection.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def PointInQuad2D(pt, quad_pt1, quad_pt2, quad_pt3):
|
|
||||||
"""
|
|
||||||
Takes 5 vectors (one for the test point and 5 for the quad)
|
|
||||||
This is a 2d function so only X and Y are used, Z and W will be ignored.
|
|
||||||
@rtype: int
|
|
||||||
@return: 1 for a clockwise intersection, -1 for counter clockwise intersection, 0 when there is no intersection.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def BoxPack2D(boxlist):
|
|
||||||
"""
|
|
||||||
Takes a list of 2D boxes and packs them into a square.
|
|
||||||
Each box in boxlist must be a list of at least 4 items - [x,y,w,h], after running this script,
|
|
||||||
the X and Y values in each box will be moved to packed, non overlapping locations.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
# Make 500 random boxes, pack them and make a mesh from it
|
|
||||||
from Blender import Geometry, Scene, Mesh
|
|
||||||
import random
|
|
||||||
boxes = []
|
|
||||||
for i in xrange(500):
|
|
||||||
boxes.append( [0,0, random.random()+0.1, random.random()+0.1] )
|
|
||||||
boxsize = Geometry.BoxPack2D(boxes)
|
|
||||||
print 'BoxSize', boxsize
|
|
||||||
me = Mesh.New()
|
|
||||||
for x in boxes:
|
|
||||||
me.verts.extend([(x[0],x[1], 0), (x[0],x[1]+x[3], 0), (x[0]+x[2],x[1]+x[3], 0), (x[0]+x[2],x[1], 0) ])
|
|
||||||
v1= me.verts[-1]
|
|
||||||
v2= me.verts[-2]
|
|
||||||
v3= me.verts[-3]
|
|
||||||
v4= me.verts[-4]
|
|
||||||
me.faces.extend([(v1,v2,v3,v4)])
|
|
||||||
scn = Scene.GetCurrent()
|
|
||||||
scn.objects.new(me)
|
|
||||||
|
|
||||||
@note: Each boxlist item can be longer then 4, the extra items are ignored and stay untouched.
|
|
||||||
@rtype: tuple
|
|
||||||
@return: a tuple pair - (width, height) of all the packed boxes.
|
|
||||||
"""
|
|
||||||
def BezierInterp(vec_knot_1, vec_handle_1, vec_handle_2, vec_knot_2, resolution):
|
|
||||||
"""
|
|
||||||
Takes 4 vectors representing a bezier curve and returns a list of vector points.
|
|
||||||
@note: any vector size is supported, the largest dimension from the input will be used for all returned vectors/
|
|
||||||
@rtype: list
|
|
||||||
@return: a list of vectors the size of resolution including the start and end points (vec_knot_1 and vec_knot_2)
|
|
||||||
"""
|
|
||||||
@@ -1,156 +0,0 @@
|
|||||||
# Blender.mathutils module and its subtypes
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Vector:
|
|
||||||
"""
|
|
||||||
|
|
||||||
@attention: Vector data can be wrapped or non-wrapped. When a object is wrapped it
|
|
||||||
means that the object will give you direct access to the data inside of blender. Modification
|
|
||||||
of this object will directly change the data inside of blender. To copy a wrapped object
|
|
||||||
you need to use the object's constructor. If you copy and object by assignment you will not get
|
|
||||||
a second copy but a second reference to the same data. Only certain functions will return
|
|
||||||
wrapped data. This will be indicated in the method description.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(list = None):
|
|
||||||
"""
|
|
||||||
Create a new 2d, 3d, or 4d Vector object from a list of floating point numbers.
|
|
||||||
@note: that python uses higher precission floating point numbers, so values assigned to a vector may have some rounding error.
|
|
||||||
|
|
||||||
|
|
||||||
Example::
|
|
||||||
v = Vector(1,0,0)
|
|
||||||
v = Vector(myVec)
|
|
||||||
v = Vector(list)
|
|
||||||
@type list: PyList of float or int
|
|
||||||
@param list: The list of values for the Vector object. Can be a sequence or raw numbers.
|
|
||||||
Must be 2, 3, or 4 values. The list is mapped to the parameters as [x,y,z,w].
|
|
||||||
@rtype: Vector object.
|
|
||||||
@return: It depends wheter a parameter was passed:
|
|
||||||
- (list): Vector object initialized with the given values;
|
|
||||||
- (): An empty 3 dimensional vector.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Euler:
|
|
||||||
"""
|
|
||||||
The Euler object
|
|
||||||
================
|
|
||||||
This object gives access to Eulers in Blender.
|
|
||||||
@note: You can access a euler object like a sequence
|
|
||||||
- x = euler[0]
|
|
||||||
@note: Comparison operators can be done:
|
|
||||||
- ==, != test numeric values within epsilon
|
|
||||||
@attention: Euler data can be wrapped or non-wrapped. When a object is wrapped it
|
|
||||||
means that the object will give you direct access to the data inside of blender. Modification
|
|
||||||
of this object will directly change the data inside of blender. To copy a wrapped object
|
|
||||||
you need to use the object's constructor. If you copy and object by assignment you will not get
|
|
||||||
a second copy but a second reference to the same data. Only certain functions will return
|
|
||||||
wrapped data. This will be indicated in the method description.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(list = None):
|
|
||||||
"""
|
|
||||||
Create a new euler object.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
euler = Euler(45,0,0)
|
|
||||||
euler = Euler(myEuler)
|
|
||||||
euler = Euler(sequence)
|
|
||||||
@type list: PyList of float/int
|
|
||||||
@param list: 3d list to initialize euler
|
|
||||||
@rtype: Euler object
|
|
||||||
@return: Euler representing heading, pitch, bank.
|
|
||||||
@note: Values are in degrees.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Quaternion:
|
|
||||||
"""
|
|
||||||
The Quaternion object
|
|
||||||
=====================
|
|
||||||
This object gives access to Quaternions in Blender.
|
|
||||||
@note: Comparison operators can be done:
|
|
||||||
- ==, != test numeric values within epsilon
|
|
||||||
@note: Math can be performed on Quaternion classes
|
|
||||||
- quat + quat
|
|
||||||
- quat - quat
|
|
||||||
- quat * float/int
|
|
||||||
- quat * vec
|
|
||||||
- quat * quat
|
|
||||||
@note: You can access a quaternion object like a sequence
|
|
||||||
- x = quat[0]
|
|
||||||
@attention: Quaternion data can be wrapped or non-wrapped. When a object is wrapped it
|
|
||||||
means that the object will give you direct access to the data inside of blender. Modification
|
|
||||||
of this object will directly change the data inside of blender. To copy a wrapped object
|
|
||||||
you need to use the object's constructor. If you copy and object by assignment you will not get
|
|
||||||
a second copy but a second reference to the same data. Only certain functions will return
|
|
||||||
wrapped data. This will be indicated in the method description.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(list, angle = None):
|
|
||||||
"""
|
|
||||||
Create a new quaternion object from initialized values.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
quat = Quaternion(1,2,3,4)
|
|
||||||
quat = Quaternion(axis, angle)
|
|
||||||
quat = Quaternion()
|
|
||||||
quat = Quaternion(180, list)
|
|
||||||
|
|
||||||
@type list: PyList of int/float
|
|
||||||
@param list: A 3d or 4d list to initialize quaternion.
|
|
||||||
4d if intializing [w,x,y,z], 3d if used as an axis of rotation.
|
|
||||||
@type angle: float (optional)
|
|
||||||
@param angle: An arbitrary rotation amount around 'list'.
|
|
||||||
List is used as an axis of rotation in this case.
|
|
||||||
@rtype: New quaternion object.
|
|
||||||
@return: It depends wheter a parameter was passed:
|
|
||||||
- (list/angle): Quaternion object initialized with the given values;
|
|
||||||
- (): An identity 4 dimensional quaternion.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Matrix:
|
|
||||||
"""
|
|
||||||
The Matrix Object
|
|
||||||
=================
|
|
||||||
@note: Math can be performed on Matrix classes
|
|
||||||
- mat + mat
|
|
||||||
- mat - mat
|
|
||||||
- mat * float/int
|
|
||||||
- mat * vec
|
|
||||||
- mat * mat
|
|
||||||
@note: Comparison operators can be done:
|
|
||||||
- ==, != test numeric values within epsilon
|
|
||||||
@note: You can access a quaternion object like a 2d sequence
|
|
||||||
- x = matrix[0][1]
|
|
||||||
- vector = matrix[2]
|
|
||||||
@attention: Quaternion data can be wrapped or non-wrapped. When a object is wrapped it
|
|
||||||
means that the object will give you direct access to the data inside of blender. Modification
|
|
||||||
of this object will directly change the data inside of blender. To copy a wrapped object
|
|
||||||
you need to use the object's constructor. If you copy and object by assignment you will not get
|
|
||||||
a second copy but a second reference to the same data. Only certain functions will return
|
|
||||||
wrapped data. This will be indicated in the method description.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(list1 = None, list2 = None, list3 = None, list4 = None):
|
|
||||||
"""
|
|
||||||
Create a new matrix object from initialized values.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
matrix = Matrix([1,1,1],[0,1,0],[1,0,0])
|
|
||||||
matrix = Matrix(mat)
|
|
||||||
matrix = Matrix(seq1, seq2, vector)
|
|
||||||
|
|
||||||
@type list1: PyList of int/float
|
|
||||||
@param list1: A 2d,3d or 4d list.
|
|
||||||
@type list2: PyList of int/float
|
|
||||||
@param list2: A 2d,3d or 4d list.
|
|
||||||
@type list3: PyList of int/float
|
|
||||||
@param list3: A 2d,3d or 4d list.
|
|
||||||
@type list4: PyList of int/float
|
|
||||||
@param list4: A 2d,3d or 4d list.
|
|
||||||
@rtype: New matrix object.
|
|
||||||
@return: It depends wheter a parameter was passed:
|
|
||||||
- (list1, etc.): Matrix object initialized with the given values;
|
|
||||||
- (): An empty 3 dimensional matrix.
|
|
||||||
"""
|
|
||||||
@@ -188,7 +188,16 @@ void AUD_SequencerReader::read(int & length, sample_t* & buffer)
|
|||||||
delete strip->reader;
|
delete strip->reader;
|
||||||
|
|
||||||
if(strip->old_sound)
|
if(strip->old_sound)
|
||||||
strip->reader = m_mixer->prepare(strip->old_sound->createReader());
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
strip->reader = m_mixer->prepare(strip->old_sound->createReader());
|
||||||
|
}
|
||||||
|
catch(AUD_Exception)
|
||||||
|
{
|
||||||
|
strip->reader = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
else
|
else
|
||||||
strip->reader = NULL;
|
strip->reader = NULL;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -488,11 +488,6 @@ GHOST_TKey GHOST_SystemWin32::convertKey(GHOST_IWindow *window, WPARAM wParam, L
|
|||||||
system->retrieveModifierKeys(oldModifiers);
|
system->retrieveModifierKeys(oldModifiers);
|
||||||
system->getModifierKeys(newModifiers);
|
system->getModifierKeys(newModifiers);
|
||||||
|
|
||||||
// check if modifier keys different from this event have changed and trigger those
|
|
||||||
// This can happen when some action takes a long time (Blender not responding), resulting
|
|
||||||
// in dropped events.
|
|
||||||
system->handleModifierKeys(window, wParam, lParam, oldModifiers, newModifiers);
|
|
||||||
|
|
||||||
//std::cout << wParam << " " << system->m_curKeyStatus[wParam] << " shift pressed: " << system->shiftPressed() << std::endl;
|
//std::cout << wParam << " " << system->m_curKeyStatus[wParam] << " shift pressed: " << system->shiftPressed() << std::endl;
|
||||||
|
|
||||||
if ((wParam >= '0') && (wParam <= '9')) {
|
if ((wParam >= '0') && (wParam <= '9')) {
|
||||||
@@ -777,12 +772,13 @@ LRESULT CALLBACK GHOST_SystemWin32::s_llKeyboardProc(int nCode, WPARAM wParam, L
|
|||||||
|
|
||||||
KBDLLHOOKSTRUCT &keyb = *(PKBDLLHOOKSTRUCT)(lParam);
|
KBDLLHOOKSTRUCT &keyb = *(PKBDLLHOOKSTRUCT)(lParam);
|
||||||
system->m_prevKeyStatus[keyb.vkCode] = system->m_curKeyStatus[keyb.vkCode];
|
system->m_prevKeyStatus[keyb.vkCode] = system->m_curKeyStatus[keyb.vkCode];
|
||||||
//std::cout << "ll: " << keyb.vkCode << " " << down << " ";
|
//std::cout << "ll: " << keyb.vkCode << " " << down << " ||| ";
|
||||||
if(keyb.flags) {
|
if(keyb.flags) {
|
||||||
if((keyb.flags & LLKHF_EXTENDED) == LLKHF_EXTENDED) {
|
if((keyb.flags & LLKHF_EXTENDED) == LLKHF_EXTENDED) {
|
||||||
//std::cout << "extended ";
|
//std::cout << "extended ";
|
||||||
}
|
}
|
||||||
if((keyb.flags & LLKHF_ALTDOWN) == LLKHF_ALTDOWN) {
|
if((keyb.flags & LLKHF_ALTDOWN) == LLKHF_ALTDOWN) {
|
||||||
|
//std::cout << "alt ";
|
||||||
}
|
}
|
||||||
if((keyb.flags & LLKHF_INJECTED)== LLKHF_INJECTED) {
|
if((keyb.flags & LLKHF_INJECTED)== LLKHF_INJECTED) {
|
||||||
//std::cout << "injected ";
|
//std::cout << "injected ";
|
||||||
|
|||||||
@@ -143,6 +143,7 @@ extern "C" {
|
|||||||
GHOST_TDragnDropTypes m_draggedObjectType;
|
GHOST_TDragnDropTypes m_draggedObjectType;
|
||||||
}
|
}
|
||||||
- (void)setSystemAndWindowCocoa:(GHOST_SystemCocoa *)sysCocoa windowCocoa:(GHOST_WindowCocoa *)winCocoa;
|
- (void)setSystemAndWindowCocoa:(GHOST_SystemCocoa *)sysCocoa windowCocoa:(GHOST_WindowCocoa *)winCocoa;
|
||||||
|
- (GHOST_SystemCocoa*)systemCocoa;
|
||||||
@end
|
@end
|
||||||
@implementation CocoaWindow
|
@implementation CocoaWindow
|
||||||
- (void)setSystemAndWindowCocoa:(GHOST_SystemCocoa *)sysCocoa windowCocoa:(GHOST_WindowCocoa *)winCocoa
|
- (void)setSystemAndWindowCocoa:(GHOST_SystemCocoa *)sysCocoa windowCocoa:(GHOST_WindowCocoa *)winCocoa
|
||||||
@@ -150,6 +151,10 @@ extern "C" {
|
|||||||
systemCocoa = sysCocoa;
|
systemCocoa = sysCocoa;
|
||||||
associatedWindow = winCocoa;
|
associatedWindow = winCocoa;
|
||||||
}
|
}
|
||||||
|
- (GHOST_SystemCocoa*)systemCocoa
|
||||||
|
{
|
||||||
|
return systemCocoa;
|
||||||
|
}
|
||||||
|
|
||||||
-(BOOL)canBecomeKeyWindow
|
-(BOOL)canBecomeKeyWindow
|
||||||
{
|
{
|
||||||
@@ -777,6 +782,7 @@ GHOST_TSuccess GHOST_WindowCocoa::setState(GHOST_TWindowState state)
|
|||||||
[tmpWindow setReleasedWhenClosed:NO];
|
[tmpWindow setReleasedWhenClosed:NO];
|
||||||
[tmpWindow setAcceptsMouseMovedEvents:YES];
|
[tmpWindow setAcceptsMouseMovedEvents:YES];
|
||||||
[tmpWindow setDelegate:[m_window delegate]];
|
[tmpWindow setDelegate:[m_window delegate]];
|
||||||
|
[tmpWindow setSystemAndWindowCocoa:[m_window systemCocoa] windowCocoa:this];
|
||||||
[tmpWindow registerForDraggedTypes:[NSArray arrayWithObjects:NSFilenamesPboardType,
|
[tmpWindow registerForDraggedTypes:[NSArray arrayWithObjects:NSFilenamesPboardType,
|
||||||
NSStringPboardType, NSTIFFPboardType, nil]];
|
NSStringPboardType, NSTIFFPboardType, nil]];
|
||||||
|
|
||||||
@@ -837,6 +843,7 @@ GHOST_TSuccess GHOST_WindowCocoa::setState(GHOST_TWindowState state)
|
|||||||
[tmpWindow setReleasedWhenClosed:NO];
|
[tmpWindow setReleasedWhenClosed:NO];
|
||||||
[tmpWindow setAcceptsMouseMovedEvents:YES];
|
[tmpWindow setAcceptsMouseMovedEvents:YES];
|
||||||
[tmpWindow setDelegate:[m_window delegate]];
|
[tmpWindow setDelegate:[m_window delegate]];
|
||||||
|
[tmpWindow setSystemAndWindowCocoa:[m_window systemCocoa] windowCocoa:this];
|
||||||
[tmpWindow registerForDraggedTypes:[NSArray arrayWithObjects:NSFilenamesPboardType,
|
[tmpWindow registerForDraggedTypes:[NSArray arrayWithObjects:NSFilenamesPboardType,
|
||||||
NSStringPboardType, NSTIFFPboardType, nil]];
|
NSStringPboardType, NSTIFFPboardType, nil]];
|
||||||
//Forbid to resize the window below the blender defined minimum one
|
//Forbid to resize the window below the blender defined minimum one
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
2.55-beta
|
2.56-beta
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 12 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 170 KiB After Width: | Height: | Size: 171 KiB |
@@ -57,7 +57,7 @@ for line in infile.readlines():
|
|||||||
infile.close()
|
infile.close()
|
||||||
|
|
||||||
# Major was changed to float, but minor is still a string
|
# Major was changed to float, but minor is still a string
|
||||||
# Note: removed returning minor, this messes up with install path code in BLI module
|
# Note: removed returning minor, messes up install path code in BLI module
|
||||||
if major:
|
if major:
|
||||||
print "%.2f" % major
|
print "%.2f" % major
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -56,6 +56,12 @@ init_data = True
|
|||||||
|
|
||||||
def register():
|
def register():
|
||||||
ui.addProperties()
|
ui.addProperties()
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
scene = bpy.context.scene
|
||||||
|
if scene:
|
||||||
|
netsettings = scene.network_render
|
||||||
|
ui.init_data(netsettings)
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
|
|||||||
@@ -25,6 +25,9 @@ class RatingRule:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.enabled = True
|
self.enabled = True
|
||||||
|
|
||||||
|
def id(self):
|
||||||
|
return str(id(self))
|
||||||
|
|
||||||
def rate(self, job):
|
def rate(self, job):
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
@@ -32,6 +35,9 @@ class ExclusionRule:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.enabled = True
|
self.enabled = True
|
||||||
|
|
||||||
|
def id(self):
|
||||||
|
return str(id(self))
|
||||||
|
|
||||||
def test(self, job):
|
def test(self, job):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -39,6 +45,9 @@ class PriorityRule:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.enabled = True
|
self.enabled = True
|
||||||
|
|
||||||
|
def id(self):
|
||||||
|
return str(id(self))
|
||||||
|
|
||||||
def test(self, job):
|
def test(self, job):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -50,13 +59,13 @@ class Balancer:
|
|||||||
|
|
||||||
def ruleByID(self, rule_id):
|
def ruleByID(self, rule_id):
|
||||||
for rule in self.rules:
|
for rule in self.rules:
|
||||||
if id(rule) == rule_id:
|
if rule.id() == rule_id:
|
||||||
return rule
|
return rule
|
||||||
for rule in self.priorities:
|
for rule in self.priorities:
|
||||||
if id(rule) == rule_id:
|
if rule.id() == rule_id:
|
||||||
return rule
|
return rule
|
||||||
for rule in self.exceptions:
|
for rule in self.exceptions:
|
||||||
if id(rule) == rule_id:
|
if rule.id() == rule_id:
|
||||||
return rule
|
return rule
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -221,9 +221,7 @@ def clientSendJobBlender(conn, scene, anim = False):
|
|||||||
elif modifier.type == "SOFT_BODY":
|
elif modifier.type == "SOFT_BODY":
|
||||||
addPointCache(job, object, modifier.point_cache, default_path)
|
addPointCache(job, object, modifier.point_cache, default_path)
|
||||||
elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
|
elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
|
||||||
addPointCache(job, object, modifier.domain_settings.point_cache_low, default_path)
|
addPointCache(job, object, modifier.domain_settings.point_cache, default_path)
|
||||||
if modifier.domain_settings.use_high_resolution:
|
|
||||||
addPointCache(job, object, modifier.domain_settings.point_cache_high, default_path)
|
|
||||||
elif modifier.type == "MULTIRES" and modifier.is_external:
|
elif modifier.type == "MULTIRES" and modifier.is_external:
|
||||||
file_path = bpy.path.abspath(modifier.filepath)
|
file_path = bpy.path.abspath(modifier.filepath)
|
||||||
job.addFile(file_path)
|
job.addFile(file_path)
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ from netrender.utils import *
|
|||||||
import netrender.model
|
import netrender.model
|
||||||
import netrender.balancing
|
import netrender.balancing
|
||||||
import netrender.master_html
|
import netrender.master_html
|
||||||
|
import netrender.thumbnail as thumbnail
|
||||||
|
|
||||||
class MRenderFile(netrender.model.RenderFile):
|
class MRenderFile(netrender.model.RenderFile):
|
||||||
def __init__(self, filepath, index, start, end, signature):
|
def __init__(self, filepath, index, start, end, signature):
|
||||||
@@ -203,6 +204,15 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
# is extremely slow due to some timeout..
|
# is extremely slow due to some timeout..
|
||||||
sys.stderr.write("[%s] %s\n" % (self.log_date_time_string(), format%args))
|
sys.stderr.write("[%s] %s\n" % (self.log_date_time_string(), format%args))
|
||||||
|
|
||||||
|
def getInfoMap(self):
|
||||||
|
length = int(self.headers['content-length'])
|
||||||
|
|
||||||
|
if length > 0:
|
||||||
|
msg = str(self.rfile.read(length), encoding='utf8')
|
||||||
|
return json.loads(msg)
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
def send_head(self, code = http.client.OK, headers = {}, content = "application/octet-stream"):
|
def send_head(self, code = http.client.OK, headers = {}, content = "application/octet-stream"):
|
||||||
self.send_response(code)
|
self.send_response(code)
|
||||||
self.send_header("Content-type", content)
|
self.send_header("Content-type", content)
|
||||||
@@ -299,7 +309,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
elif frame.status == DONE:
|
elif frame.status == DONE:
|
||||||
filename = os.path.join(job.save_path, "%06d.exr" % frame_number)
|
filename = os.path.join(job.save_path, "%06d.exr" % frame_number)
|
||||||
|
|
||||||
thumbname = thumbnail(filename)
|
thumbname = thumbnail.generate(filename)
|
||||||
|
|
||||||
if thumbname:
|
if thumbname:
|
||||||
f = open(thumbname, 'rb')
|
f = open(thumbname, 'rb')
|
||||||
@@ -518,8 +528,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
job = self.server.getJobID(job_id)
|
job = self.server.getJobID(job_id)
|
||||||
|
|
||||||
if job:
|
if job:
|
||||||
length = int(self.headers['content-length'])
|
info_map = self.getInfoMap()
|
||||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
|
||||||
|
|
||||||
job.edit(info_map)
|
job.edit(info_map)
|
||||||
self.send_head()
|
self.send_head()
|
||||||
@@ -531,8 +540,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
self.send_head(http.client.NO_CONTENT)
|
self.send_head(http.client.NO_CONTENT)
|
||||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||||
elif self.path == "/balance_limit":
|
elif self.path == "/balance_limit":
|
||||||
length = int(self.headers['content-length'])
|
info_map = self.getInfoMap()
|
||||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
|
||||||
for rule_id, limit in info_map.items():
|
for rule_id, limit in info_map.items():
|
||||||
try:
|
try:
|
||||||
rule = self.server.balancer.ruleByID(rule_id)
|
rule = self.server.balancer.ruleByID(rule_id)
|
||||||
@@ -544,8 +552,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
self.send_head()
|
self.send_head()
|
||||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||||
elif self.path == "/balance_enable":
|
elif self.path == "/balance_enable":
|
||||||
length = int(self.headers['content-length'])
|
info_map = self.getInfoMap()
|
||||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
|
||||||
for rule_id, enabled in info_map.items():
|
for rule_id, enabled in info_map.items():
|
||||||
rule = self.server.balancer.ruleByID(rule_id)
|
rule = self.server.balancer.ruleByID(rule_id)
|
||||||
if rule:
|
if rule:
|
||||||
@@ -557,13 +564,8 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
match = cancel_pattern.match(self.path)
|
match = cancel_pattern.match(self.path)
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
length = int(self.headers['content-length'])
|
info_map = self.getInfoMap()
|
||||||
|
clear = info_map.get("clear", False)
|
||||||
if length > 0:
|
|
||||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
|
||||||
clear = info_map.get("clear", False)
|
|
||||||
else:
|
|
||||||
clear = False
|
|
||||||
|
|
||||||
job_id = match.groups()[0]
|
job_id = match.groups()[0]
|
||||||
|
|
||||||
@@ -584,13 +586,8 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
match = pause_pattern.match(self.path)
|
match = pause_pattern.match(self.path)
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
length = int(self.headers['content-length'])
|
info_map = self.getInfoMap()
|
||||||
|
status = info_map.get("status", None)
|
||||||
if length > 0:
|
|
||||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
|
||||||
status = info_map.get("status", None)
|
|
||||||
else:
|
|
||||||
status = None
|
|
||||||
|
|
||||||
job_id = match.groups()[0]
|
job_id = match.groups()[0]
|
||||||
|
|
||||||
@@ -609,13 +606,8 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||||
elif self.path == "/clear":
|
elif self.path == "/clear":
|
||||||
# cancel all jobs
|
# cancel all jobs
|
||||||
length = int(self.headers['content-length'])
|
info_map = self.getInfoMap()
|
||||||
|
clear = info_map.get("clear", False)
|
||||||
if length > 0:
|
|
||||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
|
||||||
clear = info_map.get("clear", False)
|
|
||||||
else:
|
|
||||||
clear = False
|
|
||||||
|
|
||||||
self.server.stats("", "Clearing jobs")
|
self.server.stats("", "Clearing jobs")
|
||||||
self.server.clear(clear)
|
self.server.clear(clear)
|
||||||
|
|||||||
@@ -183,28 +183,28 @@ def get(handler):
|
|||||||
for rule in handler.server.balancer.rules:
|
for rule in handler.server.balancer.rules:
|
||||||
rowTable(
|
rowTable(
|
||||||
"rating",
|
"rating",
|
||||||
checkbox("", rule.enabled, "balance_enable('%i', '%s')" % (id(rule), str(not rule.enabled))),
|
checkbox("", rule.enabled, "balance_enable('%s', '%s')" % (rule.id(), str(not rule.enabled).lower())),
|
||||||
rule,
|
rule,
|
||||||
rule.str_limit() +
|
rule.str_limit() +
|
||||||
"""<button title="edit limit" onclick="balance_edit('%i', '%s');">edit</button>""" % (id(rule), str(rule.limit)) if hasattr(rule, "limit") else " "
|
"""<button title="edit limit" onclick="balance_edit('%s', '%s');">edit</button>""" % (rule.id(), str(rule.limit)) if hasattr(rule, "limit") else " "
|
||||||
)
|
)
|
||||||
|
|
||||||
for rule in handler.server.balancer.priorities:
|
for rule in handler.server.balancer.priorities:
|
||||||
rowTable(
|
rowTable(
|
||||||
"priority",
|
"priority",
|
||||||
checkbox("", rule.enabled, "balance_enable('%i', '%s')" % (id(rule), str(not rule.enabled))),
|
checkbox("", rule.enabled, "balance_enable('%s', '%s')" % (rule.id(), str(not rule.enabled).lower())),
|
||||||
rule,
|
rule,
|
||||||
rule.str_limit() +
|
rule.str_limit() +
|
||||||
"""<button title="edit limit" onclick="balance_edit('%i', '%s');">edit</button>""" % (id(rule), str(rule.limit)) if hasattr(rule, "limit") else " "
|
"""<button title="edit limit" onclick="balance_edit('%s', '%s');">edit</button>""" % (rule.id(), str(rule.limit)) if hasattr(rule, "limit") else " "
|
||||||
)
|
)
|
||||||
|
|
||||||
for rule in handler.server.balancer.exceptions:
|
for rule in handler.server.balancer.exceptions:
|
||||||
rowTable(
|
rowTable(
|
||||||
"exception",
|
"exception",
|
||||||
checkbox("", rule.enabled, "balance_enable('%i', '%s')" % (id(rule), str(not rule.enabled))),
|
checkbox("", rule.enabled, "balance_enable('%s', '%s')" % (rule.id(), str(not rule.enabled).lower())),
|
||||||
rule,
|
rule,
|
||||||
rule.str_limit() +
|
rule.str_limit() +
|
||||||
"""<button title="edit limit" onclick="balance_edit('%i', '%s');">edit</button>""" % (id(rule), str(rule.limit)) if hasattr(rule, "limit") else " "
|
"""<button title="edit limit" onclick="balance_edit('%s', '%s');">edit</button>""" % (rule.id(), str(rule.limit)) if hasattr(rule, "limit") else " "
|
||||||
)
|
)
|
||||||
|
|
||||||
endTable()
|
endTable()
|
||||||
|
|||||||
@@ -20,9 +20,9 @@ function clear_jobs()
|
|||||||
var r=confirm("Also delete files on master?");
|
var r=confirm("Also delete files on master?");
|
||||||
|
|
||||||
if (r==true) {
|
if (r==true) {
|
||||||
request('/clear', "{'clear':True}");
|
request('/clear', '{"clear":true}');
|
||||||
} else {
|
} else {
|
||||||
request('/clear', "{'clear':False}");
|
request('/clear', '{"clear":false}');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31,9 +31,9 @@ function cancel_job(id)
|
|||||||
var r=confirm("Also delete files on master?");
|
var r=confirm("Also delete files on master?");
|
||||||
|
|
||||||
if (r==true) {
|
if (r==true) {
|
||||||
request('/cancel_' + id, "{'clear':True}");
|
request('/cancel_' + id, '{"clear":true}');
|
||||||
} else {
|
} else {
|
||||||
request('/cancel_' + id, "{'clear':False}");
|
request('/cancel_' + id, '{"clear":false}');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -41,13 +41,13 @@ function balance_edit(id, old_value)
|
|||||||
{
|
{
|
||||||
var new_value = prompt("New limit", old_value);
|
var new_value = prompt("New limit", old_value);
|
||||||
if (new_value != null && new_value != "") {
|
if (new_value != null && new_value != "") {
|
||||||
request("/balance_limit", "{" + id + ":'" + new_value + "'}");
|
request("/balance_limit", '{"' + id + '":"' + new_value + '"}');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function balance_enable(id, value)
|
function balance_enable(id, value)
|
||||||
{
|
{
|
||||||
request("/balance_enable", "{" + id + ":" + value + "}");
|
request("/balance_enable", '{"' + id + '":' + value + "}");
|
||||||
}
|
}
|
||||||
|
|
||||||
function showThumb(job, frame)
|
function showThumb(job, frame)
|
||||||
|
|||||||
@@ -62,12 +62,9 @@ class RENDER_OT_netslave_bake(bpy.types.Operator):
|
|||||||
modifier.point_cache.use_disk_cache = True
|
modifier.point_cache.use_disk_cache = True
|
||||||
modifier.point_cache.use_external = False
|
modifier.point_cache.use_external = False
|
||||||
elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
|
elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
|
||||||
modifier.domain_settings.point_cache_low.use_step = 1
|
modifier.domain_settings.point_cache.use_step = 1
|
||||||
modifier.domain_settings.point_cache_low.use_disk_cache = True
|
modifier.domain_settings.point_cache.use_disk_cache = True
|
||||||
modifier.domain_settings.point_cache_low.use_external = False
|
modifier.domain_settings.point_cache.use_external = False
|
||||||
modifier.domain_settings.point_cache_high.use_step = 1
|
|
||||||
modifier.domain_settings.point_cache_high.use_disk_cache = True
|
|
||||||
modifier.domain_settings.point_cache_high.use_external = False
|
|
||||||
|
|
||||||
# particles modifier are stupid and don't contain data
|
# particles modifier are stupid and don't contain data
|
||||||
# we have to go through the object property
|
# we have to go through the object property
|
||||||
@@ -355,7 +352,7 @@ class RENDER_OT_netclientcancel(bpy.types.Operator):
|
|||||||
if conn:
|
if conn:
|
||||||
job = netrender.jobs[netsettings.active_job_index]
|
job = netrender.jobs[netsettings.active_job_index]
|
||||||
|
|
||||||
conn.request("POST", cancelURL(job.id))
|
conn.request("POST", cancelURL(job.id), json.dumps({'clear':False}))
|
||||||
|
|
||||||
response = conn.getresponse()
|
response = conn.getresponse()
|
||||||
response.read()
|
response.read()
|
||||||
@@ -382,7 +379,7 @@ class RENDER_OT_netclientcancelall(bpy.types.Operator):
|
|||||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||||
|
|
||||||
if conn:
|
if conn:
|
||||||
conn.request("POST", "/clear")
|
conn.request("POST", "/clear", json.dumps({'clear':False}))
|
||||||
|
|
||||||
response = conn.getresponse()
|
response = conn.getresponse()
|
||||||
response.read()
|
response.read()
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ import bpy
|
|||||||
from netrender.utils import *
|
from netrender.utils import *
|
||||||
import netrender.model
|
import netrender.model
|
||||||
import netrender.repath
|
import netrender.repath
|
||||||
|
import netrender.thumbnail as thumbnail
|
||||||
|
|
||||||
BLENDER_PATH = sys.argv[0]
|
BLENDER_PATH = sys.argv[0]
|
||||||
|
|
||||||
@@ -304,7 +305,7 @@ def render_slave(engine, netsettings, threads):
|
|||||||
|
|
||||||
# thumbnail first
|
# thumbnail first
|
||||||
if netsettings.use_slave_thumb:
|
if netsettings.use_slave_thumb:
|
||||||
thumbname = thumbnail(filename)
|
thumbname = thumbnail.generate(filename)
|
||||||
|
|
||||||
if thumbname:
|
if thumbname:
|
||||||
f = open(thumbname, 'rb')
|
f = open(thumbname, 'rb')
|
||||||
|
|||||||
81
release/scripts/io/netrender/thumbnail.py
Normal file
81
release/scripts/io/netrender/thumbnail.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License
|
||||||
|
# as published by the Free Software Foundation; either version 2
|
||||||
|
# of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
import sys, os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
def generate(filename, external=True):
|
||||||
|
if external:
|
||||||
|
process = subprocess.Popen([sys.argv[0], "-b", "-noaudio", "-P", __file__, "--", filename], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
while process.poll() is None:
|
||||||
|
process.stdout.read(1024) # empty buffer to be sure
|
||||||
|
process.stdout.read()
|
||||||
|
|
||||||
|
return _thumbname(filename)
|
||||||
|
else:
|
||||||
|
return _internal(filename)
|
||||||
|
|
||||||
|
def _thumbname(filename):
|
||||||
|
root = os.path.splitext(filename)[0]
|
||||||
|
return root + ".jpg"
|
||||||
|
|
||||||
|
def _internal(filename):
|
||||||
|
imagename = os.path.split(filename)[1]
|
||||||
|
thumbname = _thumbname(filename)
|
||||||
|
|
||||||
|
if os.path.exists(thumbname):
|
||||||
|
return thumbname
|
||||||
|
|
||||||
|
if bpy:
|
||||||
|
scene = bpy.data.scenes[0] # FIXME, this is dodgy!
|
||||||
|
scene.render.file_format = "JPEG"
|
||||||
|
scene.render.file_quality = 90
|
||||||
|
|
||||||
|
# remove existing image, if there's a leftover (otherwise open changes the name)
|
||||||
|
if imagename in bpy.data.images:
|
||||||
|
img = bpy.data.images[imagename]
|
||||||
|
bpy.data.images.remove(img)
|
||||||
|
|
||||||
|
bpy.ops.image.open(filepath=filename)
|
||||||
|
img = bpy.data.images[imagename]
|
||||||
|
|
||||||
|
img.save_render(thumbname, scene=scene)
|
||||||
|
|
||||||
|
img.user_clear()
|
||||||
|
bpy.data.images.remove(img)
|
||||||
|
|
||||||
|
try:
|
||||||
|
process = subprocess.Popen(["convert", thumbname, "-resize", "300x300", thumbname])
|
||||||
|
process.wait()
|
||||||
|
return thumbname
|
||||||
|
except Exception as exp:
|
||||||
|
print("Error while generating thumbnail")
|
||||||
|
print(exp)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import bpy
|
||||||
|
try:
|
||||||
|
start = sys.argv.index("--") + 1
|
||||||
|
except ValueError:
|
||||||
|
start = 0
|
||||||
|
for filename in sys.argv[start:]:
|
||||||
|
generate(filename, external=False)
|
||||||
@@ -276,8 +276,6 @@ class RENDER_PT_network_slaves(NeedValidAddress, NetRenderButtonsPanel, bpy.type
|
|||||||
sub.operator("render.netclientslaves", icon='FILE_REFRESH', text="")
|
sub.operator("render.netclientslaves", icon='FILE_REFRESH', text="")
|
||||||
sub.operator("render.netclientblacklistslave", icon='ZOOMOUT', text="")
|
sub.operator("render.netclientblacklistslave", icon='ZOOMOUT', text="")
|
||||||
|
|
||||||
init_data(netsettings)
|
|
||||||
|
|
||||||
if netsettings.active_slave_index >= 0 and len(netsettings.slaves) > 0:
|
if netsettings.active_slave_index >= 0 and len(netsettings.slaves) > 0:
|
||||||
layout.separator()
|
layout.separator()
|
||||||
|
|
||||||
@@ -309,8 +307,6 @@ class RENDER_PT_network_slaves_blacklist(NeedValidAddress, NetRenderButtonsPanel
|
|||||||
sub = row.column(align=True)
|
sub = row.column(align=True)
|
||||||
sub.operator("render.netclientwhitelistslave", icon='ZOOMOUT', text="")
|
sub.operator("render.netclientwhitelistslave", icon='ZOOMOUT', text="")
|
||||||
|
|
||||||
init_data(netsettings)
|
|
||||||
|
|
||||||
if netsettings.active_blacklisted_slave_index >= 0 and len(netsettings.slaves_blacklist) > 0:
|
if netsettings.active_blacklisted_slave_index >= 0 and len(netsettings.slaves_blacklist) > 0:
|
||||||
layout.separator()
|
layout.separator()
|
||||||
|
|
||||||
@@ -345,8 +341,6 @@ class RENDER_PT_network_jobs(NeedValidAddress, NetRenderButtonsPanel, bpy.types.
|
|||||||
sub.operator("render.netclientcancelall", icon='PANEL_CLOSE', text="")
|
sub.operator("render.netclientcancelall", icon='PANEL_CLOSE', text="")
|
||||||
sub.operator("render.netclientdownload", icon='RENDER_ANIMATION', text="")
|
sub.operator("render.netclientdownload", icon='RENDER_ANIMATION', text="")
|
||||||
|
|
||||||
init_data(netsettings)
|
|
||||||
|
|
||||||
if netsettings.active_job_index >= 0 and len(netsettings.jobs) > 0:
|
if netsettings.active_job_index >= 0 and len(netsettings.jobs) > 0:
|
||||||
layout.separator()
|
layout.separator()
|
||||||
|
|
||||||
|
|||||||
@@ -219,7 +219,7 @@ def prefixPath(prefix_directory, file_path, prefix_path, force = False):
|
|||||||
else:
|
else:
|
||||||
full_path = os.path.join(prefix_directory, n)
|
full_path = os.path.join(prefix_directory, n)
|
||||||
else:
|
else:
|
||||||
full_path = (prefix_directory, file_path)
|
full_path = os.path.join(prefix_directory, file_path)
|
||||||
|
|
||||||
return full_path
|
return full_path
|
||||||
|
|
||||||
@@ -238,43 +238,11 @@ def getFileInfo(filepath, infos):
|
|||||||
|
|
||||||
return values
|
return values
|
||||||
|
|
||||||
def thumbnail(filename):
|
|
||||||
root = os.path.splitext(filename)[0]
|
|
||||||
imagename = os.path.split(filename)[1]
|
|
||||||
thumbname = root + ".jpg"
|
|
||||||
|
|
||||||
if os.path.exists(thumbname):
|
|
||||||
return thumbname
|
|
||||||
|
|
||||||
if bpy:
|
|
||||||
scene = bpy.data.scenes[0] # FIXME, this is dodgy!
|
|
||||||
scene.render.file_format = "JPEG"
|
|
||||||
scene.render.file_quality = 90
|
|
||||||
|
|
||||||
# remove existing image, if there's a leftover (otherwise open changes the name)
|
|
||||||
if imagename in bpy.data.images:
|
|
||||||
img = bpy.data.images[imagename]
|
|
||||||
bpy.data.images.remove(img)
|
|
||||||
|
|
||||||
bpy.ops.image.open(filepath=filename)
|
|
||||||
img = bpy.data.images[imagename]
|
|
||||||
|
|
||||||
img.save_render(thumbname, scene=scene)
|
|
||||||
|
|
||||||
img.user_clear()
|
|
||||||
bpy.data.images.remove(img)
|
|
||||||
|
|
||||||
try:
|
|
||||||
process = subprocess.Popen(["convert", thumbname, "-resize", "300x300", thumbname])
|
|
||||||
process.wait()
|
|
||||||
return thumbname
|
|
||||||
except Exception as exp:
|
|
||||||
print("Error while generating thumbnail")
|
|
||||||
print(exp)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import bpy
|
import bpy
|
||||||
for info in sys.argv[7:]:
|
try:
|
||||||
|
start = sys.argv.index("--") + 1
|
||||||
|
except ValueError:
|
||||||
|
start = 0
|
||||||
|
for info in sys.argv[start:]:
|
||||||
print("$", eval(info))
|
print("$", eval(info))
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ class BUILTIN_KSI_LocRot(bpy.types.KeyingSetInfo):
|
|||||||
# iterator - use callback for selected bones/objects
|
# iterator - use callback for selected bones/objects
|
||||||
iterator = RKS_ITER_selected_item
|
iterator = RKS_ITER_selected_item
|
||||||
|
|
||||||
# generator - use callback for location
|
# generator
|
||||||
def generate(self, context, ks, data):
|
def generate(self, context, ks, data):
|
||||||
# location
|
# location
|
||||||
RKS_GEN_location(self, context, ks, data)
|
RKS_GEN_location(self, context, ks, data)
|
||||||
@@ -77,7 +77,7 @@ class BUILTIN_KSI_LocScale(bpy.types.KeyingSetInfo):
|
|||||||
# iterator - use callback for selected bones/objects
|
# iterator - use callback for selected bones/objects
|
||||||
iterator = RKS_ITER_selected_item
|
iterator = RKS_ITER_selected_item
|
||||||
|
|
||||||
# generator - use callback for location
|
# generator
|
||||||
def generate(self, context, ks, data):
|
def generate(self, context, ks, data):
|
||||||
# location
|
# location
|
||||||
RKS_GEN_location(self, context, ks, data)
|
RKS_GEN_location(self, context, ks, data)
|
||||||
@@ -94,7 +94,7 @@ class BUILTIN_KSI_LocRotScale(bpy.types.KeyingSetInfo):
|
|||||||
# iterator - use callback for selected bones/objects
|
# iterator - use callback for selected bones/objects
|
||||||
iterator = RKS_ITER_selected_item
|
iterator = RKS_ITER_selected_item
|
||||||
|
|
||||||
# generator - use callback for location
|
# generator
|
||||||
def generate(self, context, ks, data):
|
def generate(self, context, ks, data):
|
||||||
# location
|
# location
|
||||||
RKS_GEN_location(self, context, ks, data)
|
RKS_GEN_location(self, context, ks, data)
|
||||||
@@ -113,7 +113,7 @@ class BUILTIN_KSI_RotScale(bpy.types.KeyingSetInfo):
|
|||||||
# iterator - use callback for selected bones/objects
|
# iterator - use callback for selected bones/objects
|
||||||
iterator = RKS_ITER_selected_item
|
iterator = RKS_ITER_selected_item
|
||||||
|
|
||||||
# generator - use callback for location
|
# generator
|
||||||
def generate(self, context, ks, data):
|
def generate(self, context, ks, data):
|
||||||
# rotation
|
# rotation
|
||||||
RKS_GEN_rotation(self, context, ks, data)
|
RKS_GEN_rotation(self, context, ks, data)
|
||||||
@@ -126,7 +126,7 @@ class BUILTIN_KSI_RotScale(bpy.types.KeyingSetInfo):
|
|||||||
class BUILTIN_KSI_VisualLoc(bpy.types.KeyingSetInfo):
|
class BUILTIN_KSI_VisualLoc(bpy.types.KeyingSetInfo):
|
||||||
bl_label = "Visual Location"
|
bl_label = "Visual Location"
|
||||||
|
|
||||||
insertkey_visual = True
|
bl_options = {'INSERTKEY_VISUAL'}
|
||||||
|
|
||||||
# poll - use predefined callback for selected bones/objects
|
# poll - use predefined callback for selected bones/objects
|
||||||
poll = RKS_POLL_selected_items
|
poll = RKS_POLL_selected_items
|
||||||
@@ -149,7 +149,7 @@ class BUILTIN_KSI_VisualRot(bpy.types.KeyingSetInfo):
|
|||||||
# iterator - use callback for selected bones/objects
|
# iterator - use callback for selected bones/objects
|
||||||
iterator = RKS_ITER_selected_item
|
iterator = RKS_ITER_selected_item
|
||||||
|
|
||||||
# generator - use callback for location
|
# generator - use callback for rotation
|
||||||
generate = RKS_GEN_rotation
|
generate = RKS_GEN_rotation
|
||||||
|
|
||||||
# VisualLocRot
|
# VisualLocRot
|
||||||
@@ -164,7 +164,7 @@ class BUILTIN_KSI_VisualLocRot(bpy.types.KeyingSetInfo):
|
|||||||
# iterator - use callback for selected bones/objects
|
# iterator - use callback for selected bones/objects
|
||||||
iterator = RKS_ITER_selected_item
|
iterator = RKS_ITER_selected_item
|
||||||
|
|
||||||
# generator - use callback for location
|
# generator
|
||||||
def generate(self, context, ks, data):
|
def generate(self, context, ks, data):
|
||||||
# location
|
# location
|
||||||
RKS_GEN_location(self, context, ks, data)
|
RKS_GEN_location(self, context, ks, data)
|
||||||
@@ -185,7 +185,7 @@ class BUILTIN_KSI_Available(bpy.types.KeyingSetInfo):
|
|||||||
# iterator - use callback for selected bones/objects
|
# iterator - use callback for selected bones/objects
|
||||||
iterator = RKS_ITER_selected_item
|
iterator = RKS_ITER_selected_item
|
||||||
|
|
||||||
# generator - use callback for location
|
# generator - use callback for doing this
|
||||||
generate = RKS_GEN_available
|
generate = RKS_GEN_available
|
||||||
|
|
||||||
###############################
|
###############################
|
||||||
|
|||||||
@@ -22,11 +22,12 @@
|
|||||||
This module has utility functions for renaming
|
This module has utility functions for renaming
|
||||||
rna values in fcurves and drivers.
|
rna values in fcurves and drivers.
|
||||||
|
|
||||||
The main function to use is: update_data_paths(...)
|
The main function to use is: update_data_paths(...)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
IS_TESTING = False
|
IS_TESTING = False
|
||||||
|
|
||||||
|
|
||||||
class DataPathBuilder(object):
|
class DataPathBuilder(object):
|
||||||
__slots__ = ("data_path", )
|
__slots__ = ("data_path", )
|
||||||
""" Dummy class used to parse fcurve and driver data paths.
|
""" Dummy class used to parse fcurve and driver data paths.
|
||||||
@@ -37,7 +38,7 @@ class DataPathBuilder(object):
|
|||||||
def __getattr__(self, attr):
|
def __getattr__(self, attr):
|
||||||
str_value = ".%s" % attr
|
str_value = ".%s" % attr
|
||||||
return DataPathBuilder(self.data_path + (str_value, ))
|
return DataPathBuilder(self.data_path + (str_value, ))
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
str_value = '["%s"]' % key
|
str_value = '["%s"]' % key
|
||||||
return DataPathBuilder(self.data_path + (str_value, ))
|
return DataPathBuilder(self.data_path + (str_value, ))
|
||||||
@@ -51,7 +52,7 @@ class DataPathBuilder(object):
|
|||||||
if base is not Ellipsis:
|
if base is not Ellipsis:
|
||||||
try:
|
try:
|
||||||
# this only works when running with an old blender
|
# this only works when running with an old blender
|
||||||
# where the old path will resolve
|
# where the old path will resolve
|
||||||
base = eval("base" + item)
|
base = eval("base" + item)
|
||||||
except:
|
except:
|
||||||
base_new = Ellipsis
|
base_new = Ellipsis
|
||||||
@@ -61,7 +62,7 @@ class DataPathBuilder(object):
|
|||||||
try:
|
try:
|
||||||
print("base." + item_new)
|
print("base." + item_new)
|
||||||
base_new = eval("base." + item_new)
|
base_new = eval("base." + item_new)
|
||||||
break # found, dont keep looking
|
break # found, dont keep looking
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -77,7 +78,7 @@ import bpy
|
|||||||
|
|
||||||
def id_iter():
|
def id_iter():
|
||||||
type_iter = type(bpy.data.objects)
|
type_iter = type(bpy.data.objects)
|
||||||
|
|
||||||
for attr in dir(bpy.data):
|
for attr in dir(bpy.data):
|
||||||
data_iter = getattr(bpy.data, attr, None)
|
data_iter = getattr(bpy.data, attr, None)
|
||||||
if type(data_iter) == type_iter:
|
if type(data_iter) == type_iter:
|
||||||
@@ -115,13 +116,13 @@ def find_path_new(id_data, data_path, rna_update_dict, rna_update_from_map):
|
|||||||
# ignore ID props for now
|
# ignore ID props for now
|
||||||
if data_path.startswith("["):
|
if data_path.startswith("["):
|
||||||
return data_path
|
return data_path
|
||||||
|
|
||||||
# recursive path fixing, likely will be one in most cases.
|
# recursive path fixing, likely will be one in most cases.
|
||||||
data_path_builder = eval("DataPathBuilder(tuple())." + data_path)
|
data_path_builder = eval("DataPathBuilder(tuple())." + data_path)
|
||||||
data_resolve = data_path_builder.resolve(id_data, rna_update_from_map)
|
data_resolve = data_path_builder.resolve(id_data, rna_update_from_map)
|
||||||
|
|
||||||
path_new = [pair[0] for pair in data_resolve]
|
path_new = [pair[0] for pair in data_resolve]
|
||||||
|
|
||||||
# print(data_resolve)
|
# print(data_resolve)
|
||||||
data_base = id_data
|
data_base = id_data
|
||||||
|
|
||||||
@@ -138,20 +139,20 @@ def find_path_new(id_data, data_path, rna_update_dict, rna_update_from_map):
|
|||||||
|
|
||||||
# set this as the base for further properties
|
# set this as the base for further properties
|
||||||
data_base = data
|
data_base = data
|
||||||
|
|
||||||
data_path_new = "".join(path_new)[1:] # skip the first "."
|
data_path_new = "".join(path_new)[1:] # skip the first "."
|
||||||
return data_path_new
|
return data_path_new
|
||||||
|
|
||||||
|
|
||||||
def update_data_paths(rna_update):
|
def update_data_paths(rna_update):
|
||||||
''' rna_update triple [(class_name, from, to), ...]
|
''' rna_update triple [(class_name, from, to), ...]
|
||||||
'''
|
'''
|
||||||
|
|
||||||
# make a faster lookup dict
|
# make a faster lookup dict
|
||||||
rna_update_dict = {}
|
rna_update_dict = {}
|
||||||
for ren_class, ren_from, ren_to in rna_update:
|
for ren_class, ren_from, ren_to in rna_update:
|
||||||
rna_update_dict.setdefault(ren_class, {})[ren_from] = ren_to
|
rna_update_dict.setdefault(ren_class, {})[ren_from] = ren_to
|
||||||
|
|
||||||
rna_update_from_map = {}
|
rna_update_from_map = {}
|
||||||
for ren_class, ren_from, ren_to in rna_update:
|
for ren_class, ren_from, ren_to in rna_update:
|
||||||
rna_update_from_map.setdefault(ren_from, []).append(ren_to)
|
rna_update_from_map.setdefault(ren_from, []).append(ren_to)
|
||||||
@@ -174,7 +175,7 @@ def update_data_paths(rna_update):
|
|||||||
for tar in var.targets:
|
for tar in var.targets:
|
||||||
id_data_other = tar.id
|
id_data_other = tar.id
|
||||||
data_path = tar.data_path
|
data_path = tar.data_path
|
||||||
|
|
||||||
if id_data_other and data_path:
|
if id_data_other and data_path:
|
||||||
data_path_new = find_path_new(id_data_other, data_path, rna_update_dict, rna_update_from_map)
|
data_path_new = find_path_new(id_data_other, data_path, rna_update_dict, rna_update_from_map)
|
||||||
# print(data_path_new)
|
# print(data_path_new)
|
||||||
@@ -182,9 +183,7 @@ def update_data_paths(rna_update):
|
|||||||
if not IS_TESTING:
|
if not IS_TESTING:
|
||||||
tar.data_path = data_path_new
|
tar.data_path = data_path_new
|
||||||
print("driver (%s): %s -> %s" % (id_data_other.name, data_path, data_path_new))
|
print("driver (%s): %s -> %s" % (id_data_other.name, data_path, data_path_new))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
for action in anim_data_actions(anim_data):
|
for action in anim_data_actions(anim_data):
|
||||||
for fcu in action.fcurves:
|
for fcu in action.fcurves:
|
||||||
data_path = fcu.data_path
|
data_path = fcu.data_path
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ def _main():
|
|||||||
pydoc.getpager = lambda: pydoc.plainpager
|
pydoc.getpager = lambda: pydoc.plainpager
|
||||||
pydoc.Helper.getline = lambda self, prompt: None
|
pydoc.Helper.getline = lambda self, prompt: None
|
||||||
pydoc.TextDoc.use_bold = lambda self, text: text
|
pydoc.TextDoc.use_bold = lambda self, text: text
|
||||||
|
|
||||||
# Possibly temp. addons path
|
# Possibly temp. addons path
|
||||||
from os.path import join, dirname, normpath
|
from os.path import join, dirname, normpath
|
||||||
_sys.path.append(normpath(join(dirname(__file__), "..", "..", "addons", "modules")))
|
_sys.path.append(normpath(join(dirname(__file__), "..", "..", "addons", "modules")))
|
||||||
|
|||||||
@@ -137,13 +137,12 @@ class bpy_ops_submodule_op(object):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _scene_update(context):
|
def _scene_update(context):
|
||||||
scene = context.scene
|
scene = context.scene
|
||||||
if scene: # None in backgroud mode
|
if scene: # None in backgroud mode
|
||||||
scene.update()
|
scene.update()
|
||||||
else:
|
else:
|
||||||
import bpy
|
import bpy
|
||||||
for scene in bpy.data.scenes:
|
for scene in bpy.data.scenes:
|
||||||
scene.update()
|
scene.update()
|
||||||
|
|
||||||
|
|
||||||
__doc__ = property(_get_doc)
|
__doc__ = property(_get_doc)
|
||||||
|
|
||||||
@@ -196,7 +195,8 @@ class bpy_ops_submodule_op(object):
|
|||||||
as_string = op_as_string(idname)
|
as_string = op_as_string(idname)
|
||||||
op_class = getattr(bpy.types, idname)
|
op_class = getattr(bpy.types, idname)
|
||||||
descr = op_class.bl_rna.description
|
descr = op_class.bl_rna.description
|
||||||
# XXX, workaround for not registering every __doc__ to save time on load.
|
# XXX, workaround for not registering
|
||||||
|
# every __doc__ to save time on load.
|
||||||
if not descr:
|
if not descr:
|
||||||
descr = op_class.__doc__
|
descr = op_class.__doc__
|
||||||
if not descr:
|
if not descr:
|
||||||
|
|||||||
@@ -204,7 +204,7 @@ def module_names(path, recursive=False):
|
|||||||
|
|
||||||
for filename in sorted(_os.listdir(path)):
|
for filename in sorted(_os.listdir(path)):
|
||||||
if filename == "modules":
|
if filename == "modules":
|
||||||
pass # XXX, hard coded exception.
|
pass # XXX, hard coded exception.
|
||||||
elif filename.endswith(".py") and filename != "__init__.py":
|
elif filename.endswith(".py") and filename != "__init__.py":
|
||||||
fullpath = join(path, filename)
|
fullpath = join(path, filename)
|
||||||
modules.append((filename[0:-3], fullpath))
|
modules.append((filename[0:-3], fullpath))
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ import bpy as _bpy
|
|||||||
import os as _os
|
import os as _os
|
||||||
import sys as _sys
|
import sys as _sys
|
||||||
|
|
||||||
|
|
||||||
def _test_import(module_name, loaded_modules):
|
def _test_import(module_name, loaded_modules):
|
||||||
import traceback
|
import traceback
|
||||||
import time
|
import time
|
||||||
@@ -203,13 +204,11 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
|
|||||||
# deal with addons seperately
|
# deal with addons seperately
|
||||||
addon_reset_all(reload_scripts)
|
addon_reset_all(reload_scripts)
|
||||||
|
|
||||||
|
|
||||||
# run the active integration preset
|
# run the active integration preset
|
||||||
filepath = preset_find(_bpy.context.user_preferences.inputs.active_keyconfig, "keyconfig")
|
filepath = preset_find(_bpy.context.user_preferences.inputs.active_keyconfig, "keyconfig")
|
||||||
if filepath:
|
if filepath:
|
||||||
keyconfig_set(filepath)
|
keyconfig_set(filepath)
|
||||||
|
|
||||||
|
|
||||||
if reload_scripts:
|
if reload_scripts:
|
||||||
import gc
|
import gc
|
||||||
print("gc.collect() -> %d" % gc.collect())
|
print("gc.collect() -> %d" % gc.collect())
|
||||||
@@ -368,7 +367,6 @@ def addon_enable(module_name, default_set=True):
|
|||||||
import bpy_types as _bpy_types
|
import bpy_types as _bpy_types
|
||||||
import imp
|
import imp
|
||||||
|
|
||||||
|
|
||||||
_bpy_types._register_immediate = False
|
_bpy_types._register_immediate = False
|
||||||
|
|
||||||
def handle_error():
|
def handle_error():
|
||||||
@@ -376,7 +374,6 @@ def addon_enable(module_name, default_set=True):
|
|||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
_bpy_types._register_immediate = True
|
_bpy_types._register_immediate = True
|
||||||
|
|
||||||
|
|
||||||
# reload if the mtime changes
|
# reload if the mtime changes
|
||||||
mod = sys.modules.get(module_name)
|
mod = sys.modules.get(module_name)
|
||||||
if mod:
|
if mod:
|
||||||
@@ -428,7 +425,7 @@ def addon_enable(module_name, default_set=True):
|
|||||||
if not ext:
|
if not ext:
|
||||||
ext = _bpy.context.user_preferences.addons.new()
|
ext = _bpy.context.user_preferences.addons.new()
|
||||||
ext.module = module_name
|
ext.module = module_name
|
||||||
|
|
||||||
_bpy_types._register_immediate = True
|
_bpy_types._register_immediate = True
|
||||||
|
|
||||||
mod.__addon_enabled__ = True
|
mod.__addon_enabled__ = True
|
||||||
@@ -471,7 +468,7 @@ def addon_disable(module_name, default_set=True):
|
|||||||
addon = addons.get(module_name)
|
addon = addons.get(module_name)
|
||||||
if addon:
|
if addon:
|
||||||
addons.remove(addon)
|
addons.remove(addon)
|
||||||
|
|
||||||
print("\tbpy.utils.addon_disable", module_name)
|
print("\tbpy.utils.addon_disable", module_name)
|
||||||
|
|
||||||
|
|
||||||
@@ -483,10 +480,10 @@ def addon_reset_all(reload_scripts=False):
|
|||||||
|
|
||||||
# RELEASE SCRIPTS: official scripts distributed in Blender releases
|
# RELEASE SCRIPTS: official scripts distributed in Blender releases
|
||||||
paths = script_paths("addons")
|
paths = script_paths("addons")
|
||||||
|
|
||||||
# CONTRIB SCRIPTS: good for testing but not official scripts yet
|
# CONTRIB SCRIPTS: good for testing but not official scripts yet
|
||||||
paths += script_paths("addons_contrib")
|
paths += script_paths("addons_contrib")
|
||||||
|
|
||||||
# EXTERN SCRIPTS: external projects scripts
|
# EXTERN SCRIPTS: external projects scripts
|
||||||
paths += script_paths("addons_extern")
|
paths += script_paths("addons_extern")
|
||||||
|
|
||||||
@@ -513,9 +510,9 @@ def addon_reset_all(reload_scripts=False):
|
|||||||
def preset_find(name, preset_path, display_name=False):
|
def preset_find(name, preset_path, display_name=False):
|
||||||
if not name:
|
if not name:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for directory in preset_paths(preset_path):
|
for directory in preset_paths(preset_path):
|
||||||
|
|
||||||
if display_name:
|
if display_name:
|
||||||
filename = ""
|
filename = ""
|
||||||
for fn in _os.listdir(directory):
|
for fn in _os.listdir(directory):
|
||||||
@@ -558,7 +555,7 @@ def keyconfig_set(filepath):
|
|||||||
keyconfigs.remove(kc_dupe)
|
keyconfigs.remove(kc_dupe)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
kc_new.name = name
|
kc_new.name = name
|
||||||
keyconfigs.active = kc_new
|
keyconfigs.active = kc_new
|
||||||
|
|
||||||
@@ -595,4 +592,3 @@ def user_resource(type, path="", create=False):
|
|||||||
target_path = ""
|
target_path = ""
|
||||||
|
|
||||||
return target_path
|
return target_path
|
||||||
|
|
||||||
|
|||||||
@@ -638,7 +638,7 @@ class OrderedMeta(RNAMeta):
|
|||||||
# with doc generation 'self.properties.bl_rna.properties' can fail
|
# with doc generation 'self.properties.bl_rna.properties' can fail
|
||||||
class Operator(StructRNA, metaclass=OrderedMeta):
|
class Operator(StructRNA, metaclass=OrderedMeta):
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
|
|
||||||
def __getattribute__(self, attr):
|
def __getattribute__(self, attr):
|
||||||
properties = StructRNA.path_resolve(self, "properties")
|
properties = StructRNA.path_resolve(self, "properties")
|
||||||
bl_rna = getattr(properties, "bl_rna", None)
|
bl_rna = getattr(properties, "bl_rna", None)
|
||||||
@@ -745,7 +745,7 @@ class Menu(StructRNA, _GenericUI, metaclass=RNAMeta):
|
|||||||
import bpy.utils
|
import bpy.utils
|
||||||
|
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
if not searchpaths:
|
if not searchpaths:
|
||||||
layout.label("* Missing Paths *")
|
layout.label("* Missing Paths *")
|
||||||
|
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ def rna_idprop_ui_prop_clear(item, prop):
|
|||||||
|
|
||||||
def rna_idprop_context_value(context, context_member, property_type):
|
def rna_idprop_context_value(context, context_member, property_type):
|
||||||
space = context.space_data
|
space = context.space_data
|
||||||
|
|
||||||
if space is None or isinstance(space, bpy.types.SpaceProperties):
|
if space is None or isinstance(space, bpy.types.SpaceProperties):
|
||||||
pin_id = space.pin_id
|
pin_id = space.pin_id
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -25,83 +25,87 @@ import bgl
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
def cutPoint(text, length):
|
|
||||||
"Returns position of the last space found before 'length' chars"
|
|
||||||
l = length
|
|
||||||
c = text[l]
|
|
||||||
while c != ' ':
|
|
||||||
l -= 1
|
|
||||||
if l == 0: return length # no space found
|
|
||||||
c = text[l]
|
|
||||||
return l
|
|
||||||
|
|
||||||
def textWrap(text, length = 70):
|
def cutPoint(text, length):
|
||||||
lines = []
|
"Returns position of the last space found before 'length' chars"
|
||||||
while len(text) > 70:
|
l = length
|
||||||
cpt = cutPoint(text, length)
|
c = text[l]
|
||||||
line, text = text[:cpt], text[cpt + 1:]
|
while c != ' ':
|
||||||
lines.append(line)
|
l -= 1
|
||||||
lines.append(text)
|
if l == 0:
|
||||||
return lines
|
return length # no space found
|
||||||
|
c = text[l]
|
||||||
|
return l
|
||||||
|
|
||||||
|
|
||||||
|
def textWrap(text, length=70):
|
||||||
|
lines = []
|
||||||
|
while len(text) > 70:
|
||||||
|
cpt = cutPoint(text, length)
|
||||||
|
line, text = text[:cpt], text[cpt + 1:]
|
||||||
|
lines.append(line)
|
||||||
|
lines.append(text)
|
||||||
|
return lines
|
||||||
|
|
||||||
|
|
||||||
def write_sysinfo(op):
|
def write_sysinfo(op):
|
||||||
output_filename = "system-info.txt"
|
output_filename = "system-info.txt"
|
||||||
warnings = 0
|
warnings = 0
|
||||||
notices = 0
|
notices = 0
|
||||||
|
|
||||||
if output_filename in bpy.data.texts.keys():
|
if output_filename in bpy.data.texts.keys():
|
||||||
output = bpy.data.texts[output_filename]
|
output = bpy.data.texts[output_filename]
|
||||||
output.clear()
|
output.clear()
|
||||||
else:
|
else:
|
||||||
output = bpy.data.texts.new(name=output_filename)
|
output = bpy.data.texts.new(name=output_filename)
|
||||||
|
|
||||||
header = '= Blender {} System Information =\n'.format(bpy.app.version_string)
|
header = '= Blender {} System Information =\n'.format(bpy.app.version_string)
|
||||||
lilies = '{}\n\n'.format(len(header)*'=')
|
lilies = '{}\n\n'.format(len(header) * '=')
|
||||||
firstlilies = '{}\n'.format(len(header)*'=')
|
firstlilies = '{}\n'.format(len(header) * '=')
|
||||||
output.write(firstlilies)
|
output.write(firstlilies)
|
||||||
output.write(header)
|
output.write(header)
|
||||||
output.write(lilies)
|
output.write(lilies)
|
||||||
|
|
||||||
# build info
|
# build info
|
||||||
output.write('\nBlender:\n')
|
output.write('\nBlender:\n')
|
||||||
output.write(lilies)
|
output.write(lilies)
|
||||||
output.write('version {}, revision {}. {}\n'.format(bpy.app.version_string, bpy.app.build_revision, bpy.app.build_type))
|
output.write('version {}, revision {}. {}\n'.format(bpy.app.version_string, bpy.app.build_revision, bpy.app.build_type))
|
||||||
output.write('build date: {}, {}\n'.format(bpy.app.build_date, bpy.app.build_time))
|
output.write('build date: {}, {}\n'.format(bpy.app.build_date, bpy.app.build_time))
|
||||||
output.write('platform: {}\n'.format(bpy.app.build_platform))
|
output.write('platform: {}\n'.format(bpy.app.build_platform))
|
||||||
output.write('binary path: {}\n'.format(bpy.app.binary_path))
|
output.write('binary path: {}\n'.format(bpy.app.binary_path))
|
||||||
output.write('build cflags: {}\n'.format(bpy.app.build_cflags))
|
output.write('build cflags: {}\n'.format(bpy.app.build_cflags))
|
||||||
output.write('build cxxflags: {}\n'.format(bpy.app.build_cxxflags))
|
output.write('build cxxflags: {}\n'.format(bpy.app.build_cxxflags))
|
||||||
output.write('build linkflags: {}\n'.format(bpy.app.build_linkflags))
|
output.write('build linkflags: {}\n'.format(bpy.app.build_linkflags))
|
||||||
output.write('build system: {}\n'.format(bpy.app.build_system))
|
output.write('build system: {}\n'.format(bpy.app.build_system))
|
||||||
|
|
||||||
# python info
|
# python info
|
||||||
output.write('\nPython:\n')
|
output.write('\nPython:\n')
|
||||||
output.write(lilies)
|
output.write(lilies)
|
||||||
output.write('version: {}\n'.format(sys.version))
|
output.write('version: {}\n'.format(sys.version))
|
||||||
output.write('paths:\n')
|
output.write('paths:\n')
|
||||||
for p in sys.path:
|
for p in sys.path:
|
||||||
output.write('\t{}\n'.format(p))
|
output.write('\t{}\n'.format(p))
|
||||||
|
|
||||||
output.write('\nDirectories:\n')
|
output.write('\nDirectories:\n')
|
||||||
output.write(lilies)
|
output.write(lilies)
|
||||||
output.write('scripts: {}\n'.format(bpy.utils.script_paths()))
|
output.write('scripts: {}\n'.format(bpy.utils.script_paths()))
|
||||||
output.write('user scripts: {}\n'.format(bpy.utils.user_script_path()))
|
output.write('user scripts: {}\n'.format(bpy.utils.user_script_path()))
|
||||||
output.write('datafiles: {}\n'.format(bpy.utils.user_resource('DATAFILES')))
|
output.write('datafiles: {}\n'.format(bpy.utils.user_resource('DATAFILES')))
|
||||||
output.write('config: {}\n'.format(bpy.utils.user_resource('CONFIG')))
|
output.write('config: {}\n'.format(bpy.utils.user_resource('CONFIG')))
|
||||||
output.write('scripts : {}\n'.format(bpy.utils.user_resource('SCRIPTS')))
|
output.write('scripts : {}\n'.format(bpy.utils.user_resource('SCRIPTS')))
|
||||||
output.write('autosave: {}\n'.format(bpy.utils.user_resource('AUTOSAVE')))
|
output.write('autosave: {}\n'.format(bpy.utils.user_resource('AUTOSAVE')))
|
||||||
output.write('tempdir: {}\n'.format(bpy.app.tempdir))
|
output.write('tempdir: {}\n'.format(bpy.app.tempdir))
|
||||||
|
|
||||||
output.write('\nOpenGL\n')
|
output.write('\nOpenGL\n')
|
||||||
output.write(lilies)
|
output.write(lilies)
|
||||||
output.write('renderer:\t{}\n'.format(bgl.glGetString(bgl.GL_RENDERER)))
|
output.write('renderer:\t{}\n'.format(bgl.glGetString(bgl.GL_RENDERER)))
|
||||||
output.write('vendor:\t\t{}\n'.format(bgl.glGetString(bgl.GL_VENDOR)))
|
output.write('vendor:\t\t{}\n'.format(bgl.glGetString(bgl.GL_VENDOR)))
|
||||||
output.write('version:\t{}\n'.format(bgl.glGetString(bgl.GL_VERSION)))
|
output.write('version:\t{}\n'.format(bgl.glGetString(bgl.GL_VERSION)))
|
||||||
output.write('extensions:\n')
|
output.write('extensions:\n')
|
||||||
|
|
||||||
glext = bgl.glGetString(bgl.GL_EXTENSIONS)
|
glext = bgl.glGetString(bgl.GL_EXTENSIONS)
|
||||||
glext = textWrap(glext, 70)
|
glext = textWrap(glext, 70)
|
||||||
for l in glext:
|
for l in glext:
|
||||||
output.write('\t\t{}\n'.format(l))
|
output.write('\t\t{}\n'.format(l))
|
||||||
|
|
||||||
op.report({'INFO'}, "System information generated in 'system-info.txt'")
|
op.report({'INFO'}, "System information generated in 'system-info.txt'")
|
||||||
|
|||||||
@@ -699,5 +699,6 @@ class UpdateAnimData(bpy.types.Operator):
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
bpy.ops.anim.update_data_paths()
|
bpy.ops.anim.update_data_paths()
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -82,9 +82,9 @@ def get_console(console_id):
|
|||||||
namespace["__builtins__"] = sys.modules["builtins"]
|
namespace["__builtins__"] = sys.modules["builtins"]
|
||||||
namespace["bpy"] = bpy
|
namespace["bpy"] = bpy
|
||||||
namespace["C"] = bpy.context
|
namespace["C"] = bpy.context
|
||||||
|
|
||||||
namespace.update(__import__("mathutils").__dict__) # from mathutils import *
|
namespace.update(__import__("mathutils").__dict__) # from mathutils import *
|
||||||
namespace.update(__import__("math").__dict__) # from math import *
|
namespace.update(__import__("math").__dict__) # from math import *
|
||||||
|
|
||||||
console = InteractiveConsole(locals=namespace, filename="<blender_console>")
|
console = InteractiveConsole(locals=namespace, filename="<blender_console>")
|
||||||
|
|
||||||
@@ -186,7 +186,7 @@ def execute(context):
|
|||||||
|
|
||||||
# restore the stdin
|
# restore the stdin
|
||||||
sys.stdin = stdin_backup
|
sys.stdin = stdin_backup
|
||||||
|
|
||||||
# execute any hooks
|
# execute any hooks
|
||||||
for func, args in execute.hooks:
|
for func, args in execute.hooks:
|
||||||
func(*args)
|
func(*args)
|
||||||
|
|||||||
@@ -27,18 +27,18 @@ if "bpy" in locals():
|
|||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
from bpy.props import *
|
from bpy.props import *
|
||||||
from io_utils import ImportHelper
|
from io_utils import ImportHelper, ExportHelper
|
||||||
|
|
||||||
|
|
||||||
class BvhImporter(bpy.types.Operator, ImportHelper):
|
class BvhImporter(bpy.types.Operator, ImportHelper):
|
||||||
'''Load a OBJ Motion Capture File'''
|
'''Load a BVH motion capture file'''
|
||||||
bl_idname = "import_anim.bvh"
|
bl_idname = "import_anim.bvh"
|
||||||
bl_label = "Import BVH"
|
bl_label = "Import BVH"
|
||||||
|
|
||||||
filename_ext = ".bvh"
|
filename_ext = ".bvh"
|
||||||
filter_glob = StringProperty(default="*.bvh", options={'HIDDEN'})
|
filter_glob = StringProperty(default="*.bvh", options={'HIDDEN'})
|
||||||
|
|
||||||
scale = FloatProperty(name="Scale", description="Scale the BVH by this value", min=0.0001, max=1000000.0, soft_min=0.001, soft_max=100.0, default=0.1)
|
global_scale = FloatProperty(name="Scale", description="Scale the BVH by this value", min=0.0001, max=1000000.0, soft_min=0.001, soft_max=100.0, default=1.0)
|
||||||
frame_start = IntProperty(name="Start Frame", description="Starting frame for the animation", default=1)
|
frame_start = IntProperty(name="Start Frame", description="Starting frame for the animation", default=1)
|
||||||
use_cyclic = BoolProperty(name="Loop", description="Loop the animation playback", default=False)
|
use_cyclic = BoolProperty(name="Loop", description="Loop the animation playback", default=False)
|
||||||
rotate_mode = EnumProperty(items=(
|
rotate_mode = EnumProperty(items=(
|
||||||
@@ -60,16 +60,54 @@ class BvhImporter(bpy.types.Operator, ImportHelper):
|
|||||||
return import_bvh.load(self, context, **self.as_keywords(ignore=("filter_glob",)))
|
return import_bvh.load(self, context, **self.as_keywords(ignore=("filter_glob",)))
|
||||||
|
|
||||||
|
|
||||||
def menu_func(self, context):
|
class BvhExporter(bpy.types.Operator, ExportHelper):
|
||||||
|
'''Save a BVH motion capture file from an armature'''
|
||||||
|
bl_idname = "export_anim.bvh"
|
||||||
|
bl_label = "Export BVH"
|
||||||
|
|
||||||
|
filename_ext = ".bvh"
|
||||||
|
filter_glob = StringProperty(default="*.bvh", options={'HIDDEN'})
|
||||||
|
|
||||||
|
global_scale = FloatProperty(name="Scale", description="Scale the BVH by this value", min=0.0001, max=1000000.0, soft_min=0.001, soft_max=100.0, default=1.0)
|
||||||
|
frame_start = IntProperty(name="Start Frame", description="Starting frame to export", default=0)
|
||||||
|
frame_end = IntProperty(name="End Frame", description="End frame to export", default=0)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
obj = context.object
|
||||||
|
return obj and obj.type == 'ARMATURE'
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
self.frame_start = context.scene.frame_start
|
||||||
|
self.frame_end = context.scene.frame_end
|
||||||
|
|
||||||
|
return super().invoke(context, event)
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
if self.frame_start == 0 and self.frame_end == 0:
|
||||||
|
self.frame_start = context.scene.frame_start
|
||||||
|
self.frame_end = context.scene.frame_end
|
||||||
|
|
||||||
|
from . import export_bvh
|
||||||
|
return export_bvh.save(self, context, **self.as_keywords(ignore=("check_existing", "filter_glob")))
|
||||||
|
|
||||||
|
|
||||||
|
def menu_func_import(self, context):
|
||||||
self.layout.operator(BvhImporter.bl_idname, text="Motion Capture (.bvh)")
|
self.layout.operator(BvhImporter.bl_idname, text="Motion Capture (.bvh)")
|
||||||
|
|
||||||
|
|
||||||
|
def menu_func_export(self, context):
|
||||||
|
self.layout.operator(BvhExporter.bl_idname, text="Motion Capture (.bvh)")
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
bpy.types.INFO_MT_file_import.append(menu_func)
|
bpy.types.INFO_MT_file_import.append(menu_func_import)
|
||||||
|
bpy.types.INFO_MT_file_export.append(menu_func_export)
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
bpy.types.INFO_MT_file_import.remove(menu_func)
|
bpy.types.INFO_MT_file_import.remove(menu_func_import)
|
||||||
|
bpy.types.INFO_MT_file_export.remove(menu_func_export)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
register()
|
register()
|
||||||
|
|||||||
236
release/scripts/op/io_anim_bvh/export_bvh.py
Normal file
236
release/scripts/op/io_anim_bvh/export_bvh.py
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License
|
||||||
|
# as published by the Free Software Foundation; either version 2
|
||||||
|
# of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
# <pep8 compliant>
|
||||||
|
|
||||||
|
# Script copyright (C) Campbell Barton
|
||||||
|
# fixes from Andrea Rugliancich
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
def _read(context, filepath, frame_start, frame_end, global_scale=1.0):
|
||||||
|
|
||||||
|
from mathutils import Matrix, Vector, Euler
|
||||||
|
from math import degrees
|
||||||
|
|
||||||
|
file = open(filepath, "w")
|
||||||
|
|
||||||
|
obj = context.object
|
||||||
|
arm = obj.data
|
||||||
|
|
||||||
|
# Build a dictionary of bone children.
|
||||||
|
# None is for parentless bones
|
||||||
|
bone_children = {None: []}
|
||||||
|
|
||||||
|
# initialize with blank lists
|
||||||
|
for bone in arm.bones:
|
||||||
|
bone_children[bone.name] = []
|
||||||
|
|
||||||
|
for bone in arm.bones:
|
||||||
|
bone_children[getattr(bone.parent, "name", None)].append(bone.name)
|
||||||
|
|
||||||
|
# sort the children
|
||||||
|
for children_list in bone_children.values():
|
||||||
|
children_list.sort()
|
||||||
|
|
||||||
|
# bone name list in the order that the bones are written
|
||||||
|
bones_serialized_names = []
|
||||||
|
|
||||||
|
bone_locs = {}
|
||||||
|
|
||||||
|
file.write("HIERARCHY\n")
|
||||||
|
|
||||||
|
def write_bones_recursive(bone_name, indent):
|
||||||
|
my_bone_children = bone_children[bone_name]
|
||||||
|
|
||||||
|
indent_str = "\t" * indent
|
||||||
|
|
||||||
|
bone = arm.bones[bone_name]
|
||||||
|
loc = bone.head_local
|
||||||
|
bone_locs[bone_name] = loc
|
||||||
|
|
||||||
|
# make relative if we can
|
||||||
|
if bone.parent:
|
||||||
|
loc = loc - bone_locs[bone.parent.name]
|
||||||
|
|
||||||
|
if indent:
|
||||||
|
file.write("%sJOINT %s\n" % (indent_str, bone_name))
|
||||||
|
else:
|
||||||
|
file.write("%sROOT %s\n" % (indent_str, bone_name))
|
||||||
|
|
||||||
|
file.write("%s{\n" % indent_str)
|
||||||
|
file.write("%s\tOFFSET %.6f %.6f %.6f\n" % (indent_str, loc.x * global_scale, loc.y * global_scale, loc.z * global_scale))
|
||||||
|
file.write("%s\tCHANNELS 6 Xposition Yposition Zposition Xrotation Yrotation Zrotation\n" % indent_str)
|
||||||
|
|
||||||
|
if my_bone_children:
|
||||||
|
# store the location for the children
|
||||||
|
# to het their relative offset
|
||||||
|
|
||||||
|
# Write children
|
||||||
|
for child_bone in my_bone_children:
|
||||||
|
bones_serialized_names.append(child_bone)
|
||||||
|
write_bones_recursive(child_bone, indent + 1)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Write the bone end.
|
||||||
|
file.write("%s\tEnd Site\n" % indent_str)
|
||||||
|
file.write("%s\t{\n" % indent_str)
|
||||||
|
loc = bone.tail_local - bone_locs[bone_name]
|
||||||
|
file.write("%s\t\tOFFSET %.6f %.6f %.6f\n" % (indent_str, loc.x * global_scale, loc.y * global_scale, loc.z * global_scale))
|
||||||
|
file.write("%s\t}\n" % indent_str)
|
||||||
|
|
||||||
|
file.write("%s}\n" % indent_str)
|
||||||
|
|
||||||
|
if len(bone_children[None]) == 1:
|
||||||
|
key = bone_children[None][0]
|
||||||
|
bones_serialized_names.append(key)
|
||||||
|
indent = 0
|
||||||
|
|
||||||
|
write_bones_recursive(key, indent)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Write a dummy parent node
|
||||||
|
file.write("ROOT %s\n" % key)
|
||||||
|
file.write("{\n")
|
||||||
|
file.write("\tOFFSET 0.0 0.0 0.0\n")
|
||||||
|
file.write("\tCHANNELS 0\n") # Xposition Yposition Zposition Xrotation Yrotation Zrotation
|
||||||
|
key = None
|
||||||
|
indent = 1
|
||||||
|
|
||||||
|
write_bones_recursive(key, indent)
|
||||||
|
|
||||||
|
file.write("}\n")
|
||||||
|
|
||||||
|
# redefine bones as sorted by bones_serialized_names
|
||||||
|
# so we can write motion
|
||||||
|
|
||||||
|
class decorated_bone(object):
|
||||||
|
__slots__ = (\
|
||||||
|
"name", # bone name, used as key in many places
|
||||||
|
"parent", # decorated bone parent, set in a later loop
|
||||||
|
"rest_bone", # blender armature bone
|
||||||
|
"pose_bone", # blender pose bone
|
||||||
|
"pose_mat", # blender pose matrix
|
||||||
|
"rest_arm_mat", # blender rest matrix (armature space)
|
||||||
|
"rest_local_mat", # blender rest batrix (local space)
|
||||||
|
"pose_imat", # pose_mat inverted
|
||||||
|
"rest_arm_imat", # rest_arm_mat inverted
|
||||||
|
"rest_local_imat", # rest_local_mat inverted
|
||||||
|
"prev_euler", # last used euler to preserve euler compability in between keyframes
|
||||||
|
)
|
||||||
|
def __init__(self, bone_name):
|
||||||
|
self.name = bone_name
|
||||||
|
self.rest_bone = arm.bones[bone_name]
|
||||||
|
self.pose_bone = obj.pose.bones[bone_name]
|
||||||
|
|
||||||
|
self.pose_mat = self.pose_bone.matrix
|
||||||
|
|
||||||
|
mat = self.rest_bone.matrix
|
||||||
|
self.rest_arm_mat = self.rest_bone.matrix_local
|
||||||
|
self.rest_local_mat = self.rest_bone.matrix
|
||||||
|
|
||||||
|
# inverted mats
|
||||||
|
self.pose_imat = self.pose_mat.copy().invert()
|
||||||
|
self.rest_arm_imat = self.rest_arm_mat.copy().invert()
|
||||||
|
self.rest_local_imat = self.rest_local_mat.copy().invert()
|
||||||
|
|
||||||
|
self.parent = None
|
||||||
|
self.prev_euler = Euler((0.0, 0.0, 0.0))
|
||||||
|
|
||||||
|
def update_posedata(self):
|
||||||
|
self.pose_mat = self.pose_bone.matrix
|
||||||
|
self.pose_imat = self.pose_mat.copy().invert()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if self.parent:
|
||||||
|
return "[\"%s\" child on \"%s\"]\n" % (self.name, self.parent.name)
|
||||||
|
else:
|
||||||
|
return "[\"%s\" root bone]\n" % (self.name)
|
||||||
|
|
||||||
|
bones_decorated = [decorated_bone(bone_name) for bone_name in bones_serialized_names]
|
||||||
|
|
||||||
|
# Assign parents
|
||||||
|
bones_decorated_dict = {}
|
||||||
|
for dbone in bones_decorated:
|
||||||
|
bones_decorated_dict[dbone.name] = dbone
|
||||||
|
|
||||||
|
for dbone in bones_decorated:
|
||||||
|
parent = dbone.rest_bone.parent
|
||||||
|
if parent:
|
||||||
|
dbone.parent = bones_decorated_dict[parent.name]
|
||||||
|
del bones_decorated_dict
|
||||||
|
# finish assigning parents
|
||||||
|
|
||||||
|
scene = bpy.context.scene
|
||||||
|
|
||||||
|
file.write("MOTION\n")
|
||||||
|
file.write("Frames: %d\n" % (frame_end - frame_start + 1))
|
||||||
|
file.write("Frame Time: %.6f\n" % (1.0 / (scene.render.fps / scene.render.fps_base)))
|
||||||
|
|
||||||
|
for frame in range(frame_start, frame_end + 1):
|
||||||
|
scene.frame_set(frame)
|
||||||
|
|
||||||
|
for dbone in bones_decorated:
|
||||||
|
dbone.update_posedata()
|
||||||
|
|
||||||
|
for dbone in bones_decorated:
|
||||||
|
trans = Matrix.Translation(dbone.rest_bone.head_local)
|
||||||
|
itrans = Matrix.Translation(-dbone.rest_bone.head_local)
|
||||||
|
|
||||||
|
if dbone.parent:
|
||||||
|
mat_final = dbone.parent.rest_arm_mat * dbone.parent.pose_imat * dbone.pose_mat * dbone.rest_arm_imat
|
||||||
|
mat_final = itrans * mat_final * trans
|
||||||
|
loc = mat_final.translation_part() + (dbone.rest_bone.head_local - dbone.parent.rest_bone.head_local)
|
||||||
|
else:
|
||||||
|
mat_final = dbone.pose_mat * dbone.rest_arm_imat
|
||||||
|
mat_final = itrans * mat_final * trans
|
||||||
|
loc = mat_final.translation_part() + dbone.rest_bone.head
|
||||||
|
|
||||||
|
# keep eulers compatible, no jumping on interpolation.
|
||||||
|
rot = mat_final.rotation_part().invert().to_euler('XYZ', dbone.prev_euler)
|
||||||
|
|
||||||
|
file.write("%.6f %.6f %.6f " % (loc * global_scale)[:])
|
||||||
|
file.write("%.6f %.6f %.6f " % (-degrees(rot[0]), -degrees(rot[1]), -degrees(rot[2])))
|
||||||
|
|
||||||
|
dbone.prev_euler = rot
|
||||||
|
|
||||||
|
file.write("\n")
|
||||||
|
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
print("BVH Exported: %s frames:%d\n" % (filepath, frame_end - frame_start + 1))
|
||||||
|
|
||||||
|
|
||||||
|
def save(operator, context, filepath="",
|
||||||
|
frame_start=-1,
|
||||||
|
frame_end=-1,
|
||||||
|
global_scale=1.0,
|
||||||
|
):
|
||||||
|
|
||||||
|
_read(context, filepath,
|
||||||
|
frame_start=frame_start,
|
||||||
|
frame_end=frame_end,
|
||||||
|
global_scale=global_scale,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
scene = bpy.context.scene
|
||||||
|
_read(bpy.data.filepath.rstrip(".blend") + ".bvh", bpy.context.object, scene.frame_start, scene.frame_end, 1.0)
|
||||||
@@ -30,19 +30,28 @@ from mathutils import Vector, Euler, Matrix
|
|||||||
|
|
||||||
class bvh_node_class(object):
|
class bvh_node_class(object):
|
||||||
__slots__ = (
|
__slots__ = (
|
||||||
'name',# bvh joint name
|
'name', # bvh joint name
|
||||||
'parent',# bvh_node_class type or None for no parent
|
'parent', # bvh_node_class type or None for no parent
|
||||||
'children',# a list of children of this type.
|
'children', # a list of children of this type.
|
||||||
'rest_head_world',# worldspace rest location for the head of this node
|
'rest_head_world', # worldspace rest location for the head of this node
|
||||||
'rest_head_local',# localspace rest location for the head of this node
|
'rest_head_local', # localspace rest location for the head of this node
|
||||||
'rest_tail_world',# # worldspace rest location for the tail of this node
|
'rest_tail_world', # worldspace rest location for the tail of this node
|
||||||
'rest_tail_local',# # worldspace rest location for the tail of this node
|
'rest_tail_local', # worldspace rest location for the tail of this node
|
||||||
'channels',# list of 6 ints, -1 for an unused channel, otherwise an index for the BVH motion data lines, lock triple then rot triple
|
'channels', # list of 6 ints, -1 for an unused channel, otherwise an index for the BVH motion data lines, lock triple then rot triple
|
||||||
'rot_order',# a triple of indicies as to the order rotation is applied. [0,1,2] is x/y/z - [None, None, None] if no rotation.
|
'rot_order', # a triple of indicies as to the order rotation is applied. [0,1,2] is x/y/z - [None, None, None] if no rotation.
|
||||||
'anim_data',# a list one tuple's one for each frame. (locx, locy, locz, rotx, roty, rotz)
|
'rot_order_str', # same as above but a string 'XYZ' format.
|
||||||
'has_loc',# Conveinience function, bool, same as (channels[0]!=-1 or channels[1]!=-1 channels[2]!=-1)
|
'anim_data', # a list one tuple's one for each frame. (locx, locy, locz, rotx, roty, rotz), euler rotation ALWAYS stored xyz order, even when native used.
|
||||||
'has_rot',# Conveinience function, bool, same as (channels[3]!=-1 or channels[4]!=-1 channels[5]!=-1)
|
'has_loc', # Conveinience function, bool, same as (channels[0]!=-1 or channels[1]!=-1 channels[2]!=-1)
|
||||||
'temp')# use this for whatever you want
|
'has_rot', # Conveinience function, bool, same as (channels[3]!=-1 or channels[4]!=-1 channels[5]!=-1)
|
||||||
|
'temp') # use this for whatever you want
|
||||||
|
|
||||||
|
_eul_order_lookup = {\
|
||||||
|
(0, 1, 2): 'XYZ',
|
||||||
|
(0, 2, 1): 'XZY',
|
||||||
|
(1, 0, 2): 'YXZ',
|
||||||
|
(1, 2, 0): 'YZX',
|
||||||
|
(2, 0, 1): 'ZXY',
|
||||||
|
(2, 1, 0): 'ZYX'}
|
||||||
|
|
||||||
def __init__(self, name, rest_head_world, rest_head_local, parent, channels, rot_order):
|
def __init__(self, name, rest_head_world, rest_head_local, parent, channels, rot_order):
|
||||||
self.name = name
|
self.name = name
|
||||||
@@ -52,13 +61,13 @@ class bvh_node_class(object):
|
|||||||
self.rest_tail_local = None
|
self.rest_tail_local = None
|
||||||
self.parent = parent
|
self.parent = parent
|
||||||
self.channels = channels
|
self.channels = channels
|
||||||
self.rot_order = rot_order
|
self.rot_order = tuple(rot_order)
|
||||||
|
self.rot_order_str = __class__._eul_order_lookup[self.rot_order]
|
||||||
|
|
||||||
# convenience functions
|
# convenience functions
|
||||||
self.has_loc = channels[0] != -1 or channels[1] != -1 or channels[2] != -1
|
self.has_loc = channels[0] != -1 or channels[1] != -1 or channels[2] != -1
|
||||||
self.has_rot = channels[3] != -1 or channels[4] != -1 or channels[5] != -1
|
self.has_rot = channels[3] != -1 or channels[4] != -1 or channels[5] != -1
|
||||||
|
|
||||||
|
|
||||||
self.children = []
|
self.children = []
|
||||||
|
|
||||||
# list of 6 length tuples: (lx,ly,lz, rx,ry,rz)
|
# list of 6 length tuples: (lx,ly,lz, rx,ry,rz)
|
||||||
@@ -73,25 +82,7 @@ class bvh_node_class(object):
|
|||||||
self.rest_head_world.x, self.rest_head_world.y, self.rest_head_world.z)
|
self.rest_head_world.x, self.rest_head_world.y, self.rest_head_world.z)
|
||||||
|
|
||||||
|
|
||||||
# Change the order rotation is applied.
|
def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0):
|
||||||
MATRIX_IDENTITY_3x3 = Matrix([1, 0, 0], [0, 1, 0], [0, 0, 1])
|
|
||||||
MATRIX_IDENTITY_4x4 = Matrix([1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1])
|
|
||||||
|
|
||||||
|
|
||||||
def eulerRotate(x, y, z, rot_order):
|
|
||||||
# Clamp all values between 0 and 360, values outside this raise an error.
|
|
||||||
mats = [Matrix.Rotation(x, 3, 'X'), Matrix.Rotation(y, 3, 'Y'), Matrix.Rotation(z, 3, 'Z')]
|
|
||||||
return (MATRIX_IDENTITY_3x3 * mats[rot_order[0]] * (mats[rot_order[1]] * (mats[rot_order[2]]))).to_euler()
|
|
||||||
|
|
||||||
# Should work but doesnt!
|
|
||||||
'''
|
|
||||||
eul = Euler((x, y, z))
|
|
||||||
eul.order = "XYZ"[rot_order[0]] + "XYZ"[rot_order[1]] + "XYZ"[rot_order[2]]
|
|
||||||
return tuple(eul.to_matrix().to_euler())
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
def read_bvh(context, file_path, ROT_MODE='XYZ', GLOBAL_SCALE=1.0):
|
|
||||||
# File loading stuff
|
# File loading stuff
|
||||||
# Open the file for importing
|
# Open the file for importing
|
||||||
file = open(file_path, 'rU')
|
file = open(file_path, 'rU')
|
||||||
@@ -105,9 +96,7 @@ def read_bvh(context, file_path, ROT_MODE='XYZ', GLOBAL_SCALE=1.0):
|
|||||||
# Split by whitespace.
|
# Split by whitespace.
|
||||||
file_lines = [ll for ll in [l.split() for l in file_lines] if ll]
|
file_lines = [ll for ll in [l.split() for l in file_lines] if ll]
|
||||||
|
|
||||||
|
|
||||||
# Create Hirachy as empties
|
# Create Hirachy as empties
|
||||||
|
|
||||||
if file_lines[0][0].lower() == 'hierarchy':
|
if file_lines[0][0].lower() == 'hierarchy':
|
||||||
#print 'Importing the BVH Hierarchy for:', file_path
|
#print 'Importing the BVH Hierarchy for:', file_path
|
||||||
pass
|
pass
|
||||||
@@ -119,9 +108,8 @@ def read_bvh(context, file_path, ROT_MODE='XYZ', GLOBAL_SCALE=1.0):
|
|||||||
|
|
||||||
channelIndex = -1
|
channelIndex = -1
|
||||||
|
|
||||||
|
lineIdx = 0 # An index for the file.
|
||||||
lineIdx = 0 # An index for the file.
|
while lineIdx < len(file_lines) - 1:
|
||||||
while lineIdx < len(file_lines) -1:
|
|
||||||
#...
|
#...
|
||||||
if file_lines[lineIdx][0].lower() == 'root' or file_lines[lineIdx][0].lower() == 'joint':
|
if file_lines[lineIdx][0].lower() == 'root' or file_lines[lineIdx][0].lower() == 'joint':
|
||||||
|
|
||||||
@@ -137,9 +125,9 @@ def read_bvh(context, file_path, ROT_MODE='XYZ', GLOBAL_SCALE=1.0):
|
|||||||
|
|
||||||
#print '%snode: %s, parent: %s' % (len(bvh_nodes_serial) * ' ', name, bvh_nodes_serial[-1])
|
#print '%snode: %s, parent: %s' % (len(bvh_nodes_serial) * ' ', name, bvh_nodes_serial[-1])
|
||||||
|
|
||||||
lineIdx += 2 # Incriment to the next line (Offset)
|
lineIdx += 2 # Increment to the next line (Offset)
|
||||||
rest_head_local = Vector((float(file_lines[lineIdx][1]), float(file_lines[lineIdx][2]), float(file_lines[lineIdx][3]))) * GLOBAL_SCALE
|
rest_head_local = Vector((float(file_lines[lineIdx][1]), float(file_lines[lineIdx][2]), float(file_lines[lineIdx][3]))) * global_scale
|
||||||
lineIdx += 1 # Incriment to the next line (Channels)
|
lineIdx += 1 # Increment to the next line (Channels)
|
||||||
|
|
||||||
# newChannel[Xposition, Yposition, Zposition, Xrotation, Yrotation, Zrotation]
|
# newChannel[Xposition, Yposition, Zposition, Xrotation, Yrotation, Zrotation]
|
||||||
# newChannel references indecies to the motiondata,
|
# newChannel references indecies to the motiondata,
|
||||||
@@ -150,7 +138,7 @@ def read_bvh(context, file_path, ROT_MODE='XYZ', GLOBAL_SCALE=1.0):
|
|||||||
rot_count = 0
|
rot_count = 0
|
||||||
for channel in file_lines[lineIdx][2:]:
|
for channel in file_lines[lineIdx][2:]:
|
||||||
channel = channel.lower()
|
channel = channel.lower()
|
||||||
channelIndex += 1 # So the index points to the right channel
|
channelIndex += 1 # So the index points to the right channel
|
||||||
if channel == 'xposition':
|
if channel == 'xposition':
|
||||||
my_channel[0] = channelIndex
|
my_channel[0] = channelIndex
|
||||||
elif channel == 'yposition':
|
elif channel == 'yposition':
|
||||||
@@ -173,10 +161,9 @@ def read_bvh(context, file_path, ROT_MODE='XYZ', GLOBAL_SCALE=1.0):
|
|||||||
|
|
||||||
channels = file_lines[lineIdx][2:]
|
channels = file_lines[lineIdx][2:]
|
||||||
|
|
||||||
my_parent = bvh_nodes_serial[-1] # account for none
|
my_parent = bvh_nodes_serial[-1] # account for none
|
||||||
|
|
||||||
|
# Apply the parents offset accumulatively
|
||||||
# Apply the parents offset accumletivly
|
|
||||||
if my_parent is None:
|
if my_parent is None:
|
||||||
rest_head_world = Vector(rest_head_local)
|
rest_head_world = Vector(rest_head_local)
|
||||||
else:
|
else:
|
||||||
@@ -188,24 +175,23 @@ def read_bvh(context, file_path, ROT_MODE='XYZ', GLOBAL_SCALE=1.0):
|
|||||||
bvh_nodes_serial.append(bvh_node)
|
bvh_nodes_serial.append(bvh_node)
|
||||||
|
|
||||||
# Account for an end node
|
# Account for an end node
|
||||||
if file_lines[lineIdx][0].lower() == 'end' and file_lines[lineIdx][1].lower() == 'site': # There is somtimes a name after 'End Site' but we will ignore it.
|
if file_lines[lineIdx][0].lower() == 'end' and file_lines[lineIdx][1].lower() == 'site': # There is sometimes a name after 'End Site' but we will ignore it.
|
||||||
lineIdx += 2 # Incriment to the next line (Offset)
|
lineIdx += 2 # Increment to the next line (Offset)
|
||||||
rest_tail = Vector((float(file_lines[lineIdx][1]), float(file_lines[lineIdx][2]), float(file_lines[lineIdx][3]))) * GLOBAL_SCALE
|
rest_tail = Vector((float(file_lines[lineIdx][1]), float(file_lines[lineIdx][2]), float(file_lines[lineIdx][3]))) * global_scale
|
||||||
|
|
||||||
bvh_nodes_serial[-1].rest_tail_world = bvh_nodes_serial[-1].rest_head_world + rest_tail
|
bvh_nodes_serial[-1].rest_tail_world = bvh_nodes_serial[-1].rest_head_world + rest_tail
|
||||||
bvh_nodes_serial[-1].rest_tail_local = bvh_nodes_serial[-1].rest_head_local + rest_tail
|
bvh_nodes_serial[-1].rest_tail_local = bvh_nodes_serial[-1].rest_head_local + rest_tail
|
||||||
|
|
||||||
|
# Just so we can remove the Parents in a uniform way- End has kids
|
||||||
# Just so we can remove the Parents in a uniform way- End end never has kids
|
|
||||||
# so this is a placeholder
|
# so this is a placeholder
|
||||||
bvh_nodes_serial.append(None)
|
bvh_nodes_serial.append(None)
|
||||||
|
|
||||||
if len(file_lines[lineIdx]) == 1 and file_lines[lineIdx][0] == '}': # == ['}']
|
if len(file_lines[lineIdx]) == 1 and file_lines[lineIdx][0] == '}': # == ['}']
|
||||||
bvh_nodes_serial.pop() # Remove the last item
|
bvh_nodes_serial.pop() # Remove the last item
|
||||||
|
|
||||||
if len(file_lines[lineIdx]) == 1 and file_lines[lineIdx][0].lower() == 'motion':
|
if len(file_lines[lineIdx]) == 1 and file_lines[lineIdx][0].lower() == 'motion':
|
||||||
#print '\nImporting motion data'
|
#print '\nImporting motion data'
|
||||||
lineIdx += 3 # Set the cursor to the first frame
|
lineIdx += 3 # Set the cursor to the first frame
|
||||||
break
|
break
|
||||||
|
|
||||||
lineIdx += 1
|
lineIdx += 1
|
||||||
@@ -226,21 +212,23 @@ def read_bvh(context, file_path, ROT_MODE='XYZ', GLOBAL_SCALE=1.0):
|
|||||||
channels = bvh_node.channels
|
channels = bvh_node.channels
|
||||||
anim_data = bvh_node.anim_data
|
anim_data = bvh_node.anim_data
|
||||||
if channels[0] != -1:
|
if channels[0] != -1:
|
||||||
lx = GLOBAL_SCALE * float(line[channels[0]])
|
lx = global_scale * float(line[channels[0]])
|
||||||
|
|
||||||
if channels[1] != -1:
|
if channels[1] != -1:
|
||||||
ly = GLOBAL_SCALE * float(line[channels[1]])
|
ly = global_scale * float(line[channels[1]])
|
||||||
|
|
||||||
if channels[2] != -1:
|
if channels[2] != -1:
|
||||||
lz = GLOBAL_SCALE * float(line[channels[2]])
|
lz = global_scale * float(line[channels[2]])
|
||||||
|
|
||||||
if channels[3] != -1 or channels[4] != -1 or channels[5] != -1:
|
if channels[3] != -1 or channels[4] != -1 or channels[5] != -1:
|
||||||
rx, ry, rz = float(line[channels[3]]), float(line[channels[4]]), float(line[channels[5]])
|
|
||||||
|
|
||||||
if ROT_MODE != 'NATIVE':
|
rot = radians(float(line[channels[3]])), \
|
||||||
rx, ry, rz = eulerRotate(radians(rx), radians(ry), radians(rz), bvh_node.rot_order)
|
radians(float(line[channels[4]])), \
|
||||||
else:
|
radians(float(line[channels[5]])),
|
||||||
rx, ry, rz = radians(rx), radians(ry), radians(rz)
|
|
||||||
|
# apply rotation order and convert to XYZ
|
||||||
|
# note that the rot_order_str is reversed.
|
||||||
|
rx, ry, rz = Euler(rot, bvh_node.rot_order_str[::-1]).to_matrix().to_euler('XYZ')
|
||||||
|
|
||||||
# Done importing motion data #
|
# Done importing motion data #
|
||||||
anim_data.append((lx, ly, lz, rx, ry, rz))
|
anim_data.append((lx, ly, lz, rx, ry, rz))
|
||||||
@@ -279,9 +267,9 @@ def read_bvh(context, file_path, ROT_MODE='XYZ', GLOBAL_SCALE=1.0):
|
|||||||
bvh_node.rest_tail_local = rest_tail_local * (1.0 / len(bvh_node.children))
|
bvh_node.rest_tail_local = rest_tail_local * (1.0 / len(bvh_node.children))
|
||||||
|
|
||||||
# Make sure tail isnt the same location as the head.
|
# Make sure tail isnt the same location as the head.
|
||||||
if (bvh_node.rest_tail_local - bvh_node.rest_head_local).length <= 0.001 * GLOBAL_SCALE:
|
if (bvh_node.rest_tail_local - bvh_node.rest_head_local).length <= 0.001 * global_scale:
|
||||||
bvh_node.rest_tail_local.y = bvh_node.rest_tail_local.y + GLOBAL_SCALE / 10
|
bvh_node.rest_tail_local.y = bvh_node.rest_tail_local.y + global_scale / 10
|
||||||
bvh_node.rest_tail_world.y = bvh_node.rest_tail_world.y + GLOBAL_SCALE / 10
|
bvh_node.rest_tail_world.y = bvh_node.rest_tail_world.y + global_scale / 10
|
||||||
|
|
||||||
return bvh_nodes
|
return bvh_nodes
|
||||||
|
|
||||||
@@ -307,7 +295,7 @@ def bvh_node_dict2objects(context, bvh_nodes, IMPORT_START_FRAME=1, IMPORT_LOOP=
|
|||||||
|
|
||||||
# Parent the objects
|
# Parent the objects
|
||||||
for bvh_node in bvh_nodes.values():
|
for bvh_node in bvh_nodes.values():
|
||||||
bvh_node.temp.makeParent([bvh_node_child.temp for bvh_node_child in bvh_node.children], 1, 0) # ojbs, noninverse, 1 = not fast.
|
bvh_node.temp.makeParent([bvh_node_child.temp for bvh_node_child in bvh_node.children], 1, 0) # ojbs, noninverse, 1 = not fast.
|
||||||
|
|
||||||
# Offset
|
# Offset
|
||||||
for bvh_node in bvh_nodes.values():
|
for bvh_node in bvh_nodes.values():
|
||||||
@@ -318,7 +306,7 @@ def bvh_node_dict2objects(context, bvh_nodes, IMPORT_START_FRAME=1, IMPORT_LOOP=
|
|||||||
for name, bvh_node in bvh_nodes.items():
|
for name, bvh_node in bvh_nodes.items():
|
||||||
if not bvh_node.children:
|
if not bvh_node.children:
|
||||||
ob_end = add_ob(name + '_end')
|
ob_end = add_ob(name + '_end')
|
||||||
bvh_node.temp.makeParent([ob_end], 1, 0) # ojbs, noninverse, 1 = not fast.
|
bvh_node.temp.makeParent([ob_end], 1, 0) # ojbs, noninverse, 1 = not fast.
|
||||||
ob_end.loc = bvh_node.rest_tail_local
|
ob_end.loc = bvh_node.rest_tail_local
|
||||||
|
|
||||||
|
|
||||||
@@ -334,13 +322,13 @@ def bvh_node_dict2objects(context, bvh_nodes, IMPORT_START_FRAME=1, IMPORT_LOOP=
|
|||||||
|
|
||||||
bvh_node.temp.rot = rx, ry, rz
|
bvh_node.temp.rot = rx, ry, rz
|
||||||
|
|
||||||
bvh_node.temp.insertIpoKey(Blender.Object.IpoKeyTypes.LOCROT) # XXX invalid
|
bvh_node.temp.insertIpoKey(Blender.Object.IpoKeyTypes.LOCROT) # XXX invalid
|
||||||
|
|
||||||
scn.update(1)
|
scn.update(1)
|
||||||
return objects
|
return objects
|
||||||
|
|
||||||
|
|
||||||
def bvh_node_dict2armature(context, bvh_nodes, ROT_MODE='XYZ', IMPORT_START_FRAME=1, IMPORT_LOOP=False):
|
def bvh_node_dict2armature(context, bvh_nodes, rotate_mode='XYZ', IMPORT_START_FRAME=1, IMPORT_LOOP=False):
|
||||||
|
|
||||||
if IMPORT_START_FRAME < 1:
|
if IMPORT_START_FRAME < 1:
|
||||||
IMPORT_START_FRAME = 1
|
IMPORT_START_FRAME = 1
|
||||||
@@ -396,7 +384,7 @@ def bvh_node_dict2armature(context, bvh_nodes, ROT_MODE='XYZ', IMPORT_START_FRAM
|
|||||||
if (bone.head - bone.tail).length < 0.001:
|
if (bone.head - bone.tail).length < 0.001:
|
||||||
if bvh_node.parent:
|
if bvh_node.parent:
|
||||||
ofs = bvh_node.parent.rest_head_local - bvh_node.parent.rest_tail_local
|
ofs = bvh_node.parent.rest_head_local - bvh_node.parent.rest_tail_local
|
||||||
if ofs.length: # is our parent zero length also?? unlikely
|
if ofs.length: # is our parent zero length also?? unlikely
|
||||||
bone.tail = bone.tail + ofs
|
bone.tail = bone.tail + ofs
|
||||||
else:
|
else:
|
||||||
bone.tail.y = bone.tail.y + average_bone_length
|
bone.tail.y = bone.tail.y + average_bone_length
|
||||||
@@ -436,31 +424,23 @@ def bvh_node_dict2armature(context, bvh_nodes, ROT_MODE='XYZ', IMPORT_START_FRAM
|
|||||||
pose = arm_ob.pose
|
pose = arm_ob.pose
|
||||||
pose_bones = pose.bones
|
pose_bones = pose.bones
|
||||||
|
|
||||||
if ROT_MODE == 'NATIVE':
|
if rotate_mode == 'NATIVE':
|
||||||
eul_order_lookup = {\
|
|
||||||
(0, 1, 2): 'XYZ',
|
|
||||||
(0, 2, 1): 'XZY',
|
|
||||||
(1, 0, 2): 'YXZ',
|
|
||||||
(1, 2, 0): 'YZX',
|
|
||||||
(2, 0, 1): 'ZXY',
|
|
||||||
(2, 1, 0): 'ZYX'}
|
|
||||||
|
|
||||||
for bvh_node in bvh_nodes.values():
|
for bvh_node in bvh_nodes.values():
|
||||||
bone_name = bvh_node.temp # may not be the same name as the bvh_node, could have been shortened.
|
bone_name = bvh_node.temp # may not be the same name as the bvh_node, could have been shortened.
|
||||||
pose_bone = pose_bones[bone_name]
|
pose_bone = pose_bones[bone_name]
|
||||||
pose_bone.rotation_mode = eul_order_lookup[tuple(bvh_node.rot_order)]
|
pose_bone.rotation_mode = bvh_node.rot_order_str
|
||||||
|
|
||||||
elif ROT_MODE != 'QUATERNION':
|
elif rotate_mode != 'QUATERNION':
|
||||||
for pose_bone in pose_bones:
|
for pose_bone in pose_bones:
|
||||||
pose_bone.rotation_mode = ROT_MODE
|
pose_bone.rotation_mode = rotate_mode
|
||||||
else:
|
else:
|
||||||
# Quats default
|
# Quats default
|
||||||
pass
|
pass
|
||||||
|
|
||||||
context.scene.update()
|
context.scene.update()
|
||||||
|
|
||||||
bpy.ops.pose.select_all() # set
|
bpy.ops.pose.select_all() # set
|
||||||
bpy.ops.anim.keyframe_insert_menu(type=-4) # XXX - -4 ???
|
bpy.ops.anim.keyframe_insert_menu(type=-4) # XXX - -4 ???
|
||||||
|
|
||||||
|
|
||||||
#XXX action = Blender.Armature.NLA.NewAction("Action")
|
#XXX action = Blender.Armature.NLA.NewAction("Action")
|
||||||
@@ -475,7 +455,7 @@ def bvh_node_dict2armature(context, bvh_nodes, ROT_MODE='XYZ', IMPORT_START_FRAM
|
|||||||
# Replace the bvh_node.temp (currently an editbone)
|
# Replace the bvh_node.temp (currently an editbone)
|
||||||
# With a tuple (pose_bone, armature_bone, bone_rest_matrix, bone_rest_matrix_inv)
|
# With a tuple (pose_bone, armature_bone, bone_rest_matrix, bone_rest_matrix_inv)
|
||||||
for bvh_node in bvh_nodes.values():
|
for bvh_node in bvh_nodes.values():
|
||||||
bone_name = bvh_node.temp # may not be the same name as the bvh_node, could have been shortened.
|
bone_name = bvh_node.temp # may not be the same name as the bvh_node, could have been shortened.
|
||||||
pose_bone = pose_bones[bone_name]
|
pose_bone = pose_bones[bone_name]
|
||||||
rest_bone = arm_data.bones[bone_name]
|
rest_bone = arm_data.bones[bone_name]
|
||||||
bone_rest_matrix = rest_bone.matrix_local.rotation_part()
|
bone_rest_matrix = rest_bone.matrix_local.rotation_part()
|
||||||
@@ -494,11 +474,11 @@ def bvh_node_dict2armature(context, bvh_nodes, ROT_MODE='XYZ', IMPORT_START_FRAM
|
|||||||
# KEYFRAME METHOD, SLOW, USE IPOS DIRECT
|
# KEYFRAME METHOD, SLOW, USE IPOS DIRECT
|
||||||
# TODO: use f-point samples instead (Aligorith)
|
# TODO: use f-point samples instead (Aligorith)
|
||||||
|
|
||||||
if ROT_MODE != 'QUATERNION':
|
if rotate_mode != 'QUATERNION':
|
||||||
prev_euler = [Euler() for i in range(len(bvh_nodes))]
|
prev_euler = [Euler() for i in range(len(bvh_nodes))]
|
||||||
|
|
||||||
# Animate the data, the last used bvh_node will do since they all have the same number of frames
|
# Animate the data, the last used bvh_node will do since they all have the same number of frames
|
||||||
for frame_current in range(len(bvh_node.anim_data)-1): # skip the first frame (rest frame)
|
for frame_current in range(len(bvh_node.anim_data) - 1): # skip the first frame (rest frame)
|
||||||
# print frame_current
|
# print frame_current
|
||||||
|
|
||||||
# if frame_current==40: # debugging
|
# if frame_current==40: # debugging
|
||||||
@@ -513,7 +493,7 @@ def bvh_node_dict2armature(context, bvh_nodes, ROT_MODE='XYZ', IMPORT_START_FRAM
|
|||||||
bone_rotation_matrix = Euler((rx, ry, rz)).to_matrix().resize4x4()
|
bone_rotation_matrix = Euler((rx, ry, rz)).to_matrix().resize4x4()
|
||||||
bone_rotation_matrix = bone_rest_matrix_inv * bone_rotation_matrix * bone_rest_matrix
|
bone_rotation_matrix = bone_rest_matrix_inv * bone_rotation_matrix * bone_rest_matrix
|
||||||
|
|
||||||
if ROT_MODE == 'QUATERNION':
|
if rotate_mode == 'QUATERNION':
|
||||||
pose_bone.rotation_quaternion = bone_rotation_matrix.to_quat()
|
pose_bone.rotation_quaternion = bone_rotation_matrix.to_quat()
|
||||||
else:
|
else:
|
||||||
euler = bone_rotation_matrix.to_euler(pose_bone.rotation_mode, prev_euler[i])
|
euler = bone_rotation_matrix.to_euler(pose_bone.rotation_mode, prev_euler[i])
|
||||||
@@ -526,7 +506,7 @@ def bvh_node_dict2armature(context, bvh_nodes, ROT_MODE='XYZ', IMPORT_START_FRAM
|
|||||||
if bvh_node.has_loc:
|
if bvh_node.has_loc:
|
||||||
pose_bone.keyframe_insert("location")
|
pose_bone.keyframe_insert("location")
|
||||||
if bvh_node.has_rot:
|
if bvh_node.has_rot:
|
||||||
if ROT_MODE == 'QUATERNION':
|
if rotate_mode == 'QUATERNION':
|
||||||
pose_bone.keyframe_insert("rotation_quaternion")
|
pose_bone.keyframe_insert("rotation_quaternion")
|
||||||
else:
|
else:
|
||||||
pose_bone.keyframe_insert("rotation_euler")
|
pose_bone.keyframe_insert("rotation_euler")
|
||||||
@@ -537,7 +517,7 @@ def bvh_node_dict2armature(context, bvh_nodes, ROT_MODE='XYZ', IMPORT_START_FRAM
|
|||||||
|
|
||||||
for cu in action.fcurves:
|
for cu in action.fcurves:
|
||||||
if IMPORT_LOOP:
|
if IMPORT_LOOP:
|
||||||
pass # 2.5 doenst have cyclic now?
|
pass # 2.5 doenst have cyclic now?
|
||||||
|
|
||||||
for bez in cu.keyframe_points:
|
for bez in cu.keyframe_points:
|
||||||
bez.interpolation = 'LINEAR'
|
bez.interpolation = 'LINEAR'
|
||||||
@@ -545,24 +525,24 @@ def bvh_node_dict2armature(context, bvh_nodes, ROT_MODE='XYZ', IMPORT_START_FRAM
|
|||||||
return arm_ob
|
return arm_ob
|
||||||
|
|
||||||
|
|
||||||
def load(operator, context, filepath="", rotate_mode='NATIVE', scale=1.0, use_cyclic=False, frame_start=1):
|
def load(operator, context, filepath="", rotate_mode='NATIVE', global_scale=1.0, use_cyclic=False, frame_start=1):
|
||||||
import time
|
import time
|
||||||
t1 = time.time()
|
t1 = time.time()
|
||||||
print('\tparsing bvh %r...' % filepath, end="")
|
print('\tparsing bvh %r...' % filepath, end="")
|
||||||
|
|
||||||
bvh_nodes = read_bvh(context, filepath,
|
bvh_nodes = read_bvh(context, filepath,
|
||||||
ROT_MODE=rotate_mode,
|
rotate_mode=rotate_mode,
|
||||||
GLOBAL_SCALE=scale)
|
global_scale=global_scale)
|
||||||
|
|
||||||
print('%.4f' % (time.time() - t1))
|
print('%.4f' % (time.time() - t1))
|
||||||
t1 = time.time()
|
t1 = time.time()
|
||||||
print('\timporting to blender...', end="")
|
print('\timporting to blender...', end="")
|
||||||
|
|
||||||
bvh_node_dict2armature(context, bvh_nodes,
|
bvh_node_dict2armature(context, bvh_nodes,
|
||||||
ROT_MODE=rotate_mode,
|
rotate_mode=rotate_mode,
|
||||||
IMPORT_START_FRAME=frame_start,
|
IMPORT_START_FRAME=frame_start,
|
||||||
IMPORT_LOOP=use_cyclic)
|
IMPORT_LOOP=use_cyclic)
|
||||||
|
|
||||||
print('Done in %.4f\n' % (time.time() - t1))
|
print('Done in %.4f\n' % (time.time() - t1))
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|||||||
@@ -32,14 +32,13 @@ import os
|
|||||||
|
|
||||||
|
|
||||||
def save(operator, context, filepath="", use_modifiers=True, use_normals=True, use_uv_coords=True, use_colors=True):
|
def save(operator, context, filepath="", use_modifiers=True, use_normals=True, use_uv_coords=True, use_colors=True):
|
||||||
|
|
||||||
def rvec3d(v):
|
def rvec3d(v):
|
||||||
return round(v[0], 6), round(v[1], 6), round(v[2], 6)
|
return round(v[0], 6), round(v[1], 6), round(v[2], 6)
|
||||||
|
|
||||||
|
|
||||||
def rvec2d(v):
|
def rvec2d(v):
|
||||||
return round(v[0], 6), round(v[1], 6)
|
return round(v[0], 6), round(v[1], 6)
|
||||||
|
|
||||||
scene = context.scene
|
scene = context.scene
|
||||||
obj = context.object
|
obj = context.object
|
||||||
|
|
||||||
@@ -94,15 +93,14 @@ def save(operator, context, filepath="", use_modifiers=True, use_normals=True, u
|
|||||||
# incase
|
# incase
|
||||||
color = uvcoord = uvcoord_key = normal = normal_key = None
|
color = uvcoord = uvcoord_key = normal = normal_key = None
|
||||||
|
|
||||||
mesh_verts = mesh.vertices # save a lookup
|
mesh_verts = mesh.vertices # save a lookup
|
||||||
ply_verts = [] # list of dictionaries
|
ply_verts = [] # list of dictionaries
|
||||||
# vdict = {} # (index, normal, uv) -> new index
|
# vdict = {} # (index, normal, uv) -> new index
|
||||||
vdict = [{} for i in range(len(mesh_verts))]
|
vdict = [{} for i in range(len(mesh_verts))]
|
||||||
ply_faces = [[] for f in range(len(mesh.faces))]
|
ply_faces = [[] for f in range(len(mesh.faces))]
|
||||||
vert_count = 0
|
vert_count = 0
|
||||||
for i, f in enumerate(mesh.faces):
|
for i, f in enumerate(mesh.faces):
|
||||||
|
|
||||||
|
|
||||||
smooth = f.use_smooth
|
smooth = f.use_smooth
|
||||||
if not smooth:
|
if not smooth:
|
||||||
normal = tuple(f.normal)
|
normal = tuple(f.normal)
|
||||||
@@ -110,7 +108,7 @@ def save(operator, context, filepath="", use_modifiers=True, use_normals=True, u
|
|||||||
|
|
||||||
if faceUV:
|
if faceUV:
|
||||||
uv = active_uv_layer[i]
|
uv = active_uv_layer[i]
|
||||||
uv = uv.uv1, uv.uv2, uv.uv3, uv.uv4 # XXX - crufty :/
|
uv = uv.uv1, uv.uv2, uv.uv3, uv.uv4 # XXX - crufty :/
|
||||||
if vertexColors:
|
if vertexColors:
|
||||||
col = active_col_layer[i]
|
col = active_col_layer[i]
|
||||||
col = col.color1[:], col.color2[:], col.color3[:], col.color4[:]
|
col = col.color1[:], col.color2[:], col.color3[:], col.color4[:]
|
||||||
@@ -136,13 +134,12 @@ def save(operator, context, filepath="", use_modifiers=True, use_normals=True, u
|
|||||||
color = col[j]
|
color = col[j]
|
||||||
color = int(color[0] * 255.0), int(color[1] * 255.0), int(color[2] * 255.0)
|
color = int(color[0] * 255.0), int(color[1] * 255.0), int(color[2] * 255.0)
|
||||||
|
|
||||||
|
|
||||||
key = normal_key, uvcoord_key, color
|
key = normal_key, uvcoord_key, color
|
||||||
|
|
||||||
vdict_local = vdict[vidx]
|
vdict_local = vdict[vidx]
|
||||||
pf_vidx = vdict_local.get(key) # Will be None initially
|
pf_vidx = vdict_local.get(key) # Will be None initially
|
||||||
|
|
||||||
if pf_vidx is None: # same as vdict_local.has_key(key)
|
if pf_vidx is None: # same as vdict_local.has_key(key)
|
||||||
pf_vidx = vdict_local[key] = vert_count
|
pf_vidx = vdict_local[key] = vert_count
|
||||||
ply_verts.append((vidx, normal, uvcoord, color))
|
ply_verts.append((vidx, normal, uvcoord, color))
|
||||||
vert_count += 1
|
vert_count += 1
|
||||||
@@ -176,13 +173,13 @@ def save(operator, context, filepath="", use_modifiers=True, use_normals=True, u
|
|||||||
file.write('end_header\n')
|
file.write('end_header\n')
|
||||||
|
|
||||||
for i, v in enumerate(ply_verts):
|
for i, v in enumerate(ply_verts):
|
||||||
file.write('%.6f %.6f %.6f ' % mesh_verts[v[0]].co[:]) # co
|
file.write('%.6f %.6f %.6f ' % mesh_verts[v[0]].co[:]) # co
|
||||||
if use_normals:
|
if use_normals:
|
||||||
file.write('%.6f %.6f %.6f ' % v[1]) # no
|
file.write('%.6f %.6f %.6f ' % v[1]) # no
|
||||||
if use_uv_coords:
|
if use_uv_coords:
|
||||||
file.write('%.6f %.6f ' % v[2]) # uv
|
file.write('%.6f %.6f ' % v[2]) # uv
|
||||||
if use_colors:
|
if use_colors:
|
||||||
file.write('%u %u %u' % v[3]) # col
|
file.write('%u %u %u' % v[3]) # col
|
||||||
file.write('\n')
|
file.write('\n')
|
||||||
|
|
||||||
for pf in ply_faces:
|
for pf in ply_faces:
|
||||||
@@ -202,5 +199,5 @@ def save(operator, context, filepath="", use_modifiers=True, use_normals=True, u
|
|||||||
if is_editmode:
|
if is_editmode:
|
||||||
Blender.Window.EditMode(1, '', 0)
|
Blender.Window.EditMode(1, '', 0)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|||||||
@@ -66,9 +66,11 @@ class Export3DS(bpy.types.Operator, ExportHelper):
|
|||||||
def menu_func_export(self, context):
|
def menu_func_export(self, context):
|
||||||
self.layout.operator(Export3DS.bl_idname, text="3D Studio (.3ds)")
|
self.layout.operator(Export3DS.bl_idname, text="3D Studio (.3ds)")
|
||||||
|
|
||||||
|
|
||||||
def menu_func_import(self, context):
|
def menu_func_import(self, context):
|
||||||
self.layout.operator(Import3DS.bl_idname, text="3D Studio (.3ds)")
|
self.layout.operator(Import3DS.bl_idname, text="3D Studio (.3ds)")
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
bpy.types.INFO_MT_file_import.append(menu_func_import)
|
bpy.types.INFO_MT_file_import.append(menu_func_import)
|
||||||
bpy.types.INFO_MT_file_export.append(menu_func_export)
|
bpy.types.INFO_MT_file_export.append(menu_func_export)
|
||||||
@@ -84,4 +86,3 @@ def unregister():
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
register()
|
register()
|
||||||
|
|
||||||
|
|||||||
@@ -110,7 +110,6 @@ def sane_name(name):
|
|||||||
|
|
||||||
def uv_key(uv):
|
def uv_key(uv):
|
||||||
return round(uv[0], 6), round(uv[1], 6)
|
return round(uv[0], 6), round(uv[1], 6)
|
||||||
# return round(uv.x, 6), round(uv.y, 6)
|
|
||||||
|
|
||||||
# size defines:
|
# size defines:
|
||||||
SZ_SHORT = 2
|
SZ_SHORT = 2
|
||||||
|
|||||||
@@ -542,7 +542,7 @@ def save(operator, context, filepath="",
|
|||||||
print('\nFBX export starting... %r' % filepath)
|
print('\nFBX export starting... %r' % filepath)
|
||||||
start_time = time.clock()
|
start_time = time.clock()
|
||||||
try:
|
try:
|
||||||
file = open(filepath, 'w')
|
file = open(filepath, 'w', encoding='utf8')
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -1636,13 +1636,13 @@ def save(operator, context, filepath="",
|
|||||||
# workaround, since uf.uv iteration is wrong atm
|
# workaround, since uf.uv iteration is wrong atm
|
||||||
for uv in uf.uv:
|
for uv in uf.uv:
|
||||||
if i==-1:
|
if i==-1:
|
||||||
file.write('%.6f,%.6f' % tuple(uv))
|
file.write('%.6f,%.6f' % uv[:])
|
||||||
i=0
|
i=0
|
||||||
else:
|
else:
|
||||||
if i==7:
|
if i==7:
|
||||||
file.write('\n\t\t\t ')
|
file.write('\n\t\t\t ')
|
||||||
i=0
|
i=0
|
||||||
file.write(',%.6f,%.6f' % tuple(uv))
|
file.write(',%.6f,%.6f' % uv[:])
|
||||||
i+=1
|
i+=1
|
||||||
ii+=1 # One more UV
|
ii+=1 # One more UV
|
||||||
|
|
||||||
|
|||||||
@@ -40,19 +40,18 @@ class ImportOBJ(bpy.types.Operator, ImportHelper):
|
|||||||
filename_ext = ".obj"
|
filename_ext = ".obj"
|
||||||
filter_glob = StringProperty(default="*.obj;*.mtl", options={'HIDDEN'})
|
filter_glob = StringProperty(default="*.obj;*.mtl", options={'HIDDEN'})
|
||||||
|
|
||||||
CREATE_SMOOTH_GROUPS = BoolProperty(name="Smooth Groups", description="Surround smooth groups by sharp edges", default= True)
|
CREATE_SMOOTH_GROUPS = BoolProperty(name="Smooth Groups", description="Surround smooth groups by sharp edges", default=True)
|
||||||
CREATE_FGONS = BoolProperty(name="NGons as FGons", description="Import faces with more then 4 verts as fgons", default= True)
|
CREATE_FGONS = BoolProperty(name="NGons as FGons", description="Import faces with more then 4 verts as fgons", default=True)
|
||||||
CREATE_EDGES = BoolProperty(name="Lines as Edges", description="Import lines and faces with 2 verts as edge", default= True)
|
CREATE_EDGES = BoolProperty(name="Lines as Edges", description="Import lines and faces with 2 verts as edge", default=True)
|
||||||
SPLIT_OBJECTS = BoolProperty(name="Object", description="Import OBJ Objects into Blender Objects", default= True)
|
SPLIT_OBJECTS = BoolProperty(name="Object", description="Import OBJ Objects into Blender Objects", default=True)
|
||||||
SPLIT_GROUPS = BoolProperty(name="Group", description="Import OBJ Groups into Blender Objects", default= True)
|
SPLIT_GROUPS = BoolProperty(name="Group", description="Import OBJ Groups into Blender Objects", default=True)
|
||||||
# old comment: only used for user feedback
|
# old comment: only used for user feedback
|
||||||
# disabled this option because in old code a handler for it disabled SPLIT* params, it's not passed to load_obj
|
# disabled this option because in old code a handler for it disabled SPLIT* params, it's not passed to load_obj
|
||||||
# KEEP_VERT_ORDER = BoolProperty(name="Keep Vert Order", description="Keep vert and face order, disables split options, enable for morph targets", default= True)
|
# KEEP_VERT_ORDER = BoolProperty(name="Keep Vert Order", description="Keep vert and face order, disables split options, enable for morph targets", default= True)
|
||||||
ROTATE_X90 = BoolProperty(name="-X90", description="Rotate X 90.", default= True)
|
ROTATE_X90 = BoolProperty(name="-X90", description="Rotate X 90.", default=True)
|
||||||
CLAMP_SIZE = FloatProperty(name="Clamp Scale", description="Clamp the size to this maximum (Zero to Disable)", min=0.0, max=1000.0, soft_min=0.0, soft_max=1000.0, default=0.0)
|
CLAMP_SIZE = FloatProperty(name="Clamp Scale", description="Clamp the size to this maximum (Zero to Disable)", min=0.0, max=1000.0, soft_min=0.0, soft_max=1000.0, default=0.0)
|
||||||
POLYGROUPS = BoolProperty(name="Poly Groups", description="Import OBJ groups as vertex groups.", default= True)
|
POLYGROUPS = BoolProperty(name="Poly Groups", description="Import OBJ groups as vertex groups.", default=True)
|
||||||
IMAGE_SEARCH = BoolProperty(name="Image Search", description="Search subdirs for any assosiated images (Warning, may be slow)", default= True)
|
IMAGE_SEARCH = BoolProperty(name="Image Search", description="Search subdirs for any assosiated images (Warning, may be slow)", default=True)
|
||||||
|
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
# print("Selected: " + context.active_object.name)
|
# print("Selected: " + context.active_object.name)
|
||||||
@@ -74,19 +73,19 @@ class ExportOBJ(bpy.types.Operator, ExportHelper):
|
|||||||
# to the class instance from the operator settings before calling.
|
# to the class instance from the operator settings before calling.
|
||||||
|
|
||||||
# context group
|
# context group
|
||||||
use_selection = BoolProperty(name="Selection Only", description="Export selected objects only", default= False)
|
use_selection = BoolProperty(name="Selection Only", description="Export selected objects only", default=False)
|
||||||
use_all_scenes = BoolProperty(name="All Scenes", description="", default= False)
|
use_all_scenes = BoolProperty(name="All Scenes", description="", default=False)
|
||||||
use_animation = BoolProperty(name="Animation", description="", default= False)
|
use_animation = BoolProperty(name="Animation", description="", default=False)
|
||||||
|
|
||||||
# object group
|
# object group
|
||||||
use_modifiers = BoolProperty(name="Apply Modifiers", description="Apply modifiers (preview resolution)", default= True)
|
use_modifiers = BoolProperty(name="Apply Modifiers", description="Apply modifiers (preview resolution)", default=True)
|
||||||
use_rotate_x90 = BoolProperty(name="Rotate X90", description="", default= True)
|
use_rotate_x90 = BoolProperty(name="Rotate X90", description="", default=True)
|
||||||
|
|
||||||
# extra data group
|
# extra data group
|
||||||
use_edges = BoolProperty(name="Edges", description="", default=True)
|
use_edges = BoolProperty(name="Edges", description="", default=True)
|
||||||
use_normals = BoolProperty(name="Normals", description="", default=False)
|
use_normals = BoolProperty(name="Normals", description="", default=False)
|
||||||
use_hq_normals = BoolProperty(name="High Quality Normals", description="", default=True)
|
use_hq_normals = BoolProperty(name="High Quality Normals", description="", default=True)
|
||||||
use_uvs = BoolProperty(name="UVs", description="", default= True)
|
use_uvs = BoolProperty(name="UVs", description="", default=True)
|
||||||
use_materials = BoolProperty(name="Materials", description="", default=True)
|
use_materials = BoolProperty(name="Materials", description="", default=True)
|
||||||
copy_images = BoolProperty(name="Copy Images", description="", default=False)
|
copy_images = BoolProperty(name="Copy Images", description="", default=False)
|
||||||
use_triangles = BoolProperty(name="Triangulate", description="", default=False)
|
use_triangles = BoolProperty(name="Triangulate", description="", default=False)
|
||||||
@@ -94,11 +93,10 @@ class ExportOBJ(bpy.types.Operator, ExportHelper):
|
|||||||
use_nurbs = BoolProperty(name="Nurbs", description="", default=False)
|
use_nurbs = BoolProperty(name="Nurbs", description="", default=False)
|
||||||
|
|
||||||
# grouping group
|
# grouping group
|
||||||
use_blen_objects = BoolProperty(name="Objects as OBJ Objects", description="", default= True)
|
use_blen_objects = BoolProperty(name="Objects as OBJ Objects", description="", default=True)
|
||||||
group_by_object = BoolProperty(name="Objects as OBJ Groups ", description="", default= False)
|
group_by_object = BoolProperty(name="Objects as OBJ Groups ", description="", default=False)
|
||||||
group_by_material = BoolProperty(name="Material Groups", description="", default= False)
|
group_by_material = BoolProperty(name="Material Groups", description="", default=False)
|
||||||
keep_vertex_order = BoolProperty(name="Keep Vertex Order", description="", default= False)
|
keep_vertex_order = BoolProperty(name="Keep Vertex Order", description="", default=False)
|
||||||
|
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
from . import export_obj
|
from . import export_obj
|
||||||
@@ -117,6 +115,7 @@ def register():
|
|||||||
bpy.types.INFO_MT_file_import.append(menu_func_import)
|
bpy.types.INFO_MT_file_import.append(menu_func_import)
|
||||||
bpy.types.INFO_MT_file_export.append(menu_func_export)
|
bpy.types.INFO_MT_file_export.append(menu_func_export)
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
bpy.types.INFO_MT_file_import.remove(menu_func_import)
|
bpy.types.INFO_MT_file_import.remove(menu_func_import)
|
||||||
bpy.types.INFO_MT_file_export.remove(menu_func_export)
|
bpy.types.INFO_MT_file_export.remove(menu_func_export)
|
||||||
|
|||||||
@@ -59,9 +59,8 @@ def write_mtl(scene, filepath, copy_images, mtl_dict):
|
|||||||
return rel
|
return rel
|
||||||
|
|
||||||
|
|
||||||
file = open(filepath, "w")
|
file = open(filepath, "w", encoding='utf8')
|
||||||
# XXX
|
file.write('# Blender MTL File: %r\n' % os.path.basename(bpy.data.filepath))
|
||||||
# file.write('# Blender MTL File: %s\n' % Blender.Get('filepath').split('\\')[-1].split('/')[-1])
|
|
||||||
file.write('# Material Count: %i\n' % len(mtl_dict))
|
file.write('# Material Count: %i\n' % len(mtl_dict))
|
||||||
# Write material/image combinations we have used.
|
# Write material/image combinations we have used.
|
||||||
for key, (mtl_mat_name, mat, img) in mtl_dict.items():
|
for key, (mtl_mat_name, mat, img) in mtl_dict.items():
|
||||||
@@ -137,8 +136,6 @@ def copy_file(source, dest):
|
|||||||
def copy_images(dest_dir):
|
def copy_images(dest_dir):
|
||||||
if dest_dir[-1] != os.sep:
|
if dest_dir[-1] != os.sep:
|
||||||
dest_dir += os.sep
|
dest_dir += os.sep
|
||||||
# if dest_dir[-1] != sys.sep:
|
|
||||||
# dest_dir += sys.sep
|
|
||||||
|
|
||||||
# Get unique image names
|
# Get unique image names
|
||||||
uniqueImages = {}
|
uniqueImages = {}
|
||||||
@@ -284,7 +281,6 @@ def write_file(filepath, objects, scene,
|
|||||||
|
|
||||||
def veckey2d(v):
|
def veckey2d(v):
|
||||||
return round(v[0], 6), round(v[1], 6)
|
return round(v[0], 6), round(v[1], 6)
|
||||||
# return round(v.x, 6), round(v.y, 6)
|
|
||||||
|
|
||||||
def findVertexGroupName(face, vWeightMap):
|
def findVertexGroupName(face, vWeightMap):
|
||||||
"""
|
"""
|
||||||
@@ -503,7 +499,7 @@ def write_file(filepath, objects, scene,
|
|||||||
uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
|
uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
|
||||||
except:
|
except:
|
||||||
uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
|
uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
|
||||||
file.write('vt %.6f %.6f\n' % tuple(uv))
|
file.write('vt %.6f %.6f\n' % uv[:])
|
||||||
|
|
||||||
uv_unique_count = len(uv_dict)
|
uv_unique_count = len(uv_dict)
|
||||||
# del uv, uvkey, uv_dict, f_index, uv_index
|
# del uv, uvkey, uv_dict, f_index, uv_index
|
||||||
@@ -546,18 +542,10 @@ def write_file(filepath, objects, scene,
|
|||||||
for f, f_index in face_index_pairs:
|
for f, f_index in face_index_pairs:
|
||||||
f_smooth= f.use_smooth
|
f_smooth= f.use_smooth
|
||||||
f_mat = min(f.material_index, len(materialNames)-1)
|
f_mat = min(f.material_index, len(materialNames)-1)
|
||||||
# f_mat = min(f.mat, len(materialNames)-1)
|
|
||||||
if faceuv:
|
if faceuv:
|
||||||
|
|
||||||
tface = uv_layer[f_index]
|
tface = uv_layer[f_index]
|
||||||
|
|
||||||
f_image = tface.image
|
f_image = tface.image
|
||||||
f_uv = tface.uv
|
|
||||||
# f_uv= [tface.uv1, tface.uv2, tface.uv3]
|
|
||||||
# if len(f.vertices) == 4:
|
|
||||||
# f_uv.append(tface.uv4)
|
|
||||||
# f_image = f.image
|
|
||||||
# f_uv= f.uv
|
|
||||||
|
|
||||||
# MAKE KEY
|
# MAKE KEY
|
||||||
if faceuv and f_image: # Object is always true.
|
if faceuv and f_image: # Object is always true.
|
||||||
@@ -768,7 +756,7 @@ def _write(context, filepath,
|
|||||||
else:
|
else:
|
||||||
objects = scene.objects
|
objects = scene.objects
|
||||||
|
|
||||||
full_path= ''.join(context_name)
|
full_path = ''.join(context_name)
|
||||||
|
|
||||||
# erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
|
# erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
|
||||||
# EXPORT THE FILE.
|
# EXPORT THE FILE.
|
||||||
@@ -789,7 +777,6 @@ def _write(context, filepath,
|
|||||||
EXPORT_POLYGROUPS,
|
EXPORT_POLYGROUPS,
|
||||||
EXPORT_CURVE_AS_NURBS)
|
EXPORT_CURVE_AS_NURBS)
|
||||||
|
|
||||||
|
|
||||||
scene.frame_set(orig_frame, 0.0)
|
scene.frame_set(orig_frame, 0.0)
|
||||||
|
|
||||||
# Restore old active scene.
|
# Restore old active scene.
|
||||||
@@ -825,7 +812,7 @@ def save(operator, context, filepath="",
|
|||||||
use_animation=False,
|
use_animation=False,
|
||||||
):
|
):
|
||||||
|
|
||||||
_write(context, filepath,
|
_write(context, filepath,
|
||||||
EXPORT_TRI=use_triangles,
|
EXPORT_TRI=use_triangles,
|
||||||
EXPORT_EDGES=use_edges,
|
EXPORT_EDGES=use_edges,
|
||||||
EXPORT_NORMALS=use_normals,
|
EXPORT_NORMALS=use_normals,
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ import os
|
|||||||
import time
|
import time
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
from mathutils.geometry import PolyFill
|
from mathutils.geometry import tesselate_polygon
|
||||||
from io_utils import load_image, unpack_list, unpack_face_list
|
from io_utils import load_image, unpack_list, unpack_face_list
|
||||||
|
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ def BPyMesh_ngon(from_data, indices, PREF_FIX_LOOPS= True):
|
|||||||
if verts[i][1]==verts[i-1][0]:
|
if verts[i][1]==verts[i-1][0]:
|
||||||
verts.pop(i-1)
|
verts.pop(i-1)
|
||||||
|
|
||||||
fill= PolyFill([verts])
|
fill= fill_polygon([verts])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
'''
|
'''
|
||||||
@@ -192,7 +192,7 @@ def BPyMesh_ngon(from_data, indices, PREF_FIX_LOOPS= True):
|
|||||||
vert_map[i+ii]= vert[2]
|
vert_map[i+ii]= vert[2]
|
||||||
ii+=len(verts)
|
ii+=len(verts)
|
||||||
|
|
||||||
fill= PolyFill([ [v[0] for v in loop] for loop in loop_list ])
|
fill= tesselate_polygon([ [v[0] for v in loop] for loop in loop_list ])
|
||||||
#draw_loops(loop_list)
|
#draw_loops(loop_list)
|
||||||
#raise 'done loop'
|
#raise 'done loop'
|
||||||
# map to original indicies
|
# map to original indicies
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ class x3d_class:
|
|||||||
self.filepath = filepath[:-1] # remove trailing z
|
self.filepath = filepath[:-1] # remove trailing z
|
||||||
|
|
||||||
if self.file is None:
|
if self.file is None:
|
||||||
self.file = open(self.filepath, "w")
|
self.file = open(self.filepath, "w", encoding='utf8')
|
||||||
|
|
||||||
self.bNav=0
|
self.bNav=0
|
||||||
self.nodeID=0
|
self.nodeID=0
|
||||||
@@ -537,37 +537,28 @@ class x3d_class:
|
|||||||
self.writeIndented("\n", -1)
|
self.writeIndented("\n", -1)
|
||||||
|
|
||||||
def writeTextureCoordinates(self, mesh):
|
def writeTextureCoordinates(self, mesh):
|
||||||
texCoordList=[]
|
|
||||||
texIndexList=[]
|
|
||||||
j=0
|
|
||||||
|
|
||||||
for face in mesh.uv_textures.active.data:
|
|
||||||
# for face in mesh.faces:
|
|
||||||
# workaround, since tface.uv iteration is wrong atm
|
|
||||||
uvs = face.uv
|
|
||||||
# uvs = [face.uv1, face.uv2, face.uv3, face.uv4] if face.vertices[3] else [face.uv1, face.uv2, face.uv3]
|
|
||||||
|
|
||||||
for uv in uvs:
|
|
||||||
# for uv in face.uv:
|
|
||||||
texIndexList.append(j)
|
|
||||||
texCoordList.append(uv)
|
|
||||||
j=j+1
|
|
||||||
texIndexList.append(-1)
|
|
||||||
|
|
||||||
if self.writingtexture == 0:
|
if self.writingtexture == 0:
|
||||||
self.file.write("\n\t\t\ttexCoordIndex=\"")
|
self.file.write("\n\t\t\ttexCoordIndex=\"")
|
||||||
texIndxStr=""
|
|
||||||
for i in range(len(texIndexList)):
|
fw = self.file.write
|
||||||
texIndxStr = texIndxStr + "%d, " % texIndexList[i]
|
j = 0
|
||||||
if texIndexList[i]==-1:
|
for face in mesh.uv_textures.active.data:
|
||||||
self.file.write(texIndxStr)
|
if len(face.uv) == 4:
|
||||||
texIndxStr=""
|
fw("%d %d %d %d -1, " % (j, j + 1, j + 2, j + 3))
|
||||||
self.file.write("\"\n\t\t\t")
|
j += 4
|
||||||
|
else:
|
||||||
|
fw("%d %d %d -1, " % (j, j + 1, j + 2))
|
||||||
|
j += 3
|
||||||
|
|
||||||
|
fw("\"\n\t\t\t")
|
||||||
else:
|
else:
|
||||||
|
texCoordList = (uv for fuv in mesh.uv_textures.active.data for uv in fuv.uv)
|
||||||
|
|
||||||
self.writeIndented("<TextureCoordinate point=\"", 1)
|
self.writeIndented("<TextureCoordinate point=\"", 1)
|
||||||
for i in range(len(texCoordList)):
|
fw = self.file.write
|
||||||
self.file.write("%s %s, " % (round(texCoordList[i][0],self.tp), round(texCoordList[i][1],self.tp)))
|
for uv in texCoordList:
|
||||||
self.file.write("\" />")
|
fw("%.4f %.4f, " % uv[:])
|
||||||
|
fw("\" />")
|
||||||
self.writeIndented("\n", -1)
|
self.writeIndented("\n", -1)
|
||||||
|
|
||||||
def writeFaceColors(self, mesh):
|
def writeFaceColors(self, mesh):
|
||||||
|
|||||||
@@ -58,11 +58,12 @@ class ImportMDD(bpy.types.Operator, ImportHelper):
|
|||||||
from . import import_mdd
|
from . import import_mdd
|
||||||
return import_mdd.load(self, context, **self.as_keywords(ignore=("filter_glob",)))
|
return import_mdd.load(self, context, **self.as_keywords(ignore=("filter_glob",)))
|
||||||
|
|
||||||
|
|
||||||
class ExportMDD(bpy.types.Operator, ExportHelper):
|
class ExportMDD(bpy.types.Operator, ExportHelper):
|
||||||
'''Animated mesh to MDD vertex keyframe file'''
|
'''Animated mesh to MDD vertex keyframe file'''
|
||||||
bl_idname = "export_shape.mdd"
|
bl_idname = "export_shape.mdd"
|
||||||
bl_label = "Export MDD"
|
bl_label = "Export MDD"
|
||||||
|
|
||||||
filename_ext = ".mdd"
|
filename_ext = ".mdd"
|
||||||
filter_glob = StringProperty(default="*.mdd", options={'HIDDEN'})
|
filter_glob = StringProperty(default="*.mdd", options={'HIDDEN'})
|
||||||
|
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ def zero_file(filepath):
|
|||||||
If a file fails, this replaces it with 1 char, better not remove it?
|
If a file fails, this replaces it with 1 char, better not remove it?
|
||||||
'''
|
'''
|
||||||
file = open(filepath, 'w')
|
file = open(filepath, 'w')
|
||||||
file.write('\n') # apparently macosx needs some data in a blank file?
|
file.write('\n') # apparently macosx needs some data in a blank file?
|
||||||
file.close()
|
file.close()
|
||||||
|
|
||||||
|
|
||||||
@@ -84,13 +84,13 @@ def save(operator, context, filepath="", frame_start=1, frame_end=300, fps=25):
|
|||||||
|
|
||||||
numframes = frame_end - frame_start + 1
|
numframes = frame_end - frame_start + 1
|
||||||
fps = float(fps)
|
fps = float(fps)
|
||||||
f = open(filepath, 'wb') #no Errors yet:Safe to create file
|
f = open(filepath, 'wb') # no Errors yet:Safe to create file
|
||||||
|
|
||||||
# Write the header
|
# Write the header
|
||||||
f.write(pack(">2i", numframes, numverts))
|
f.write(pack(">2i", numframes, numverts))
|
||||||
|
|
||||||
# Write the frame times (should we use the time IPO??)
|
# Write the frame times (should we use the time IPO??)
|
||||||
f.write(pack(">%df" % (numframes), *[frame / fps for frame in range(numframes)])) # seconds
|
f.write(pack(">%df" % (numframes), *[frame / fps for frame in range(numframes)])) # seconds
|
||||||
|
|
||||||
#rest frame needed to keep frames in sync
|
#rest frame needed to keep frames in sync
|
||||||
"""
|
"""
|
||||||
@@ -102,7 +102,7 @@ def save(operator, context, filepath="", frame_start=1, frame_end=300, fps=25):
|
|||||||
me.transform(mat_flip * obj.matrix_world)
|
me.transform(mat_flip * obj.matrix_world)
|
||||||
f.write(pack(">%df" % (numverts * 3), *[axis for v in me.vertices for axis in v.co]))
|
f.write(pack(">%df" % (numverts * 3), *[axis for v in me.vertices for axis in v.co]))
|
||||||
|
|
||||||
for frame in range(frame_start, frame_end + 1):#in order to start at desired frame
|
for frame in range(frame_start, frame_end + 1): # in order to start at desired frame
|
||||||
"""
|
"""
|
||||||
Blender.Set('curframe', frame)
|
Blender.Set('curframe', frame)
|
||||||
me_tmp.getFromObject(obj.name)
|
me_tmp.getFromObject(obj.name)
|
||||||
@@ -127,5 +127,5 @@ def save(operator, context, filepath="", frame_start=1, frame_end=300, fps=25):
|
|||||||
Blender.Set('curframe', orig_frame)
|
Blender.Set('curframe', orig_frame)
|
||||||
"""
|
"""
|
||||||
scene.frame_set(orig_frame)
|
scene.frame_set(orig_frame)
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|||||||
@@ -36,10 +36,10 @@ from struct import unpack
|
|||||||
|
|
||||||
|
|
||||||
def load(operator, context, filepath, frame_start=0, frame_step=1):
|
def load(operator, context, filepath, frame_start=0, frame_step=1):
|
||||||
|
|
||||||
scene = context.scene
|
scene = context.scene
|
||||||
obj = context.object
|
obj = context.object
|
||||||
|
|
||||||
print('\n\nimporting mdd %r' % filepath)
|
print('\n\nimporting mdd %r' % filepath)
|
||||||
|
|
||||||
if bpy.ops.object.mode_set.poll():
|
if bpy.ops.object.mode_set.poll():
|
||||||
@@ -68,37 +68,34 @@ def load(operator, context, filepath, frame_start=0, frame_step=1):
|
|||||||
new_shapekey.name = ("frame_%.4d" % fr)
|
new_shapekey.name = ("frame_%.4d" % fr)
|
||||||
new_shapekey_name = new_shapekey.name
|
new_shapekey_name = new_shapekey.name
|
||||||
|
|
||||||
obj.active_shape_key_index = len(obj.data.shape_keys.keys)-1
|
obj.active_shape_key_index = len(obj.data.shape_keys.keys) - 1
|
||||||
index = len(obj.data.shape_keys.keys)-1
|
index = len(obj.data.shape_keys.keys) - 1
|
||||||
obj.show_only_shape_key = True
|
obj.show_only_shape_key = True
|
||||||
|
|
||||||
verts = obj.data.shape_keys.keys[len(obj.data.shape_keys.keys)-1].data
|
verts = obj.data.shape_keys.keys[len(obj.data.shape_keys.keys) - 1].data
|
||||||
|
|
||||||
|
for v in verts: # 12 is the size of 3 floats
|
||||||
for v in verts: # 12 is the size of 3 floats
|
|
||||||
v.co[:] = unpack('>3f', file.read(12))
|
v.co[:] = unpack('>3f', file.read(12))
|
||||||
#me.update()
|
#me.update()
|
||||||
obj.show_only_shape_key = False
|
obj.show_only_shape_key = False
|
||||||
|
|
||||||
|
|
||||||
# insert keyframes
|
# insert keyframes
|
||||||
shape_keys = obj.data.shape_keys
|
shape_keys = obj.data.shape_keys
|
||||||
|
|
||||||
scene.frame_current -= 1
|
scene.frame_current -= 1
|
||||||
obj.data.shape_keys.keys[index].value = 0.0
|
obj.data.shape_keys.keys[index].value = 0.0
|
||||||
shape_keys.keys[len(obj.data.shape_keys.keys)-1].keyframe_insert("value")
|
shape_keys.keys[len(obj.data.shape_keys.keys) - 1].keyframe_insert("value")
|
||||||
|
|
||||||
scene.frame_current += 1
|
scene.frame_current += 1
|
||||||
obj.data.shape_keys.keys[index].value = 1.0
|
obj.data.shape_keys.keys[index].value = 1.0
|
||||||
shape_keys.keys[len(obj.data.shape_keys.keys)-1].keyframe_insert("value")
|
shape_keys.keys[len(obj.data.shape_keys.keys) - 1].keyframe_insert("value")
|
||||||
|
|
||||||
scene.frame_current += 1
|
scene.frame_current += 1
|
||||||
obj.data.shape_keys.keys[index].value = 0.0
|
obj.data.shape_keys.keys[index].value = 0.0
|
||||||
shape_keys.keys[len(obj.data.shape_keys.keys)-1].keyframe_insert("value")
|
shape_keys.keys[len(obj.data.shape_keys.keys) - 1].keyframe_insert("value")
|
||||||
|
|
||||||
obj.data.update()
|
obj.data.update()
|
||||||
|
|
||||||
|
|
||||||
for i in range(frames):
|
for i in range(frames):
|
||||||
UpdateMesh(obj, i)
|
UpdateMesh(obj, i)
|
||||||
|
|
||||||
|
|||||||
@@ -257,7 +257,7 @@ class ShapeTransfer(bpy.types.Operator):
|
|||||||
ob.active_shape_key_index = len(me.shape_keys.keys) - 1
|
ob.active_shape_key_index = len(me.shape_keys.keys) - 1
|
||||||
ob.show_only_shape_key = True
|
ob.show_only_shape_key = True
|
||||||
|
|
||||||
from mathutils.geometry import BarycentricTransform
|
from mathutils.geometry import barycentric_transform
|
||||||
from mathutils import Vector
|
from mathutils import Vector
|
||||||
|
|
||||||
if use_clamp and mode == 'OFFSET':
|
if use_clamp and mode == 'OFFSET':
|
||||||
@@ -300,38 +300,38 @@ class ShapeTransfer(bpy.types.Operator):
|
|||||||
for face in me.faces:
|
for face in me.faces:
|
||||||
i1, i2, i3, i4 = face.vertices_raw
|
i1, i2, i3, i4 = face.vertices_raw
|
||||||
if i4 != 0:
|
if i4 != 0:
|
||||||
pt = BarycentricTransform(orig_shape_coords[i1],
|
pt = barycentric_transform(orig_shape_coords[i1],
|
||||||
orig_coords[i4], orig_coords[i1], orig_coords[i2],
|
orig_coords[i4], orig_coords[i1], orig_coords[i2],
|
||||||
target_coords[i4], target_coords[i1], target_coords[i2])
|
target_coords[i4], target_coords[i1], target_coords[i2])
|
||||||
median_coords[i1].append(pt)
|
median_coords[i1].append(pt)
|
||||||
|
|
||||||
pt = BarycentricTransform(orig_shape_coords[i2],
|
pt = barycentric_transform(orig_shape_coords[i2],
|
||||||
orig_coords[i1], orig_coords[i2], orig_coords[i3],
|
orig_coords[i1], orig_coords[i2], orig_coords[i3],
|
||||||
target_coords[i1], target_coords[i2], target_coords[i3])
|
target_coords[i1], target_coords[i2], target_coords[i3])
|
||||||
median_coords[i2].append(pt)
|
median_coords[i2].append(pt)
|
||||||
|
|
||||||
pt = BarycentricTransform(orig_shape_coords[i3],
|
pt = barycentric_transform(orig_shape_coords[i3],
|
||||||
orig_coords[i2], orig_coords[i3], orig_coords[i4],
|
orig_coords[i2], orig_coords[i3], orig_coords[i4],
|
||||||
target_coords[i2], target_coords[i3], target_coords[i4])
|
target_coords[i2], target_coords[i3], target_coords[i4])
|
||||||
median_coords[i3].append(pt)
|
median_coords[i3].append(pt)
|
||||||
|
|
||||||
pt = BarycentricTransform(orig_shape_coords[i4],
|
pt = barycentric_transform(orig_shape_coords[i4],
|
||||||
orig_coords[i3], orig_coords[i4], orig_coords[i1],
|
orig_coords[i3], orig_coords[i4], orig_coords[i1],
|
||||||
target_coords[i3], target_coords[i4], target_coords[i1])
|
target_coords[i3], target_coords[i4], target_coords[i1])
|
||||||
median_coords[i4].append(pt)
|
median_coords[i4].append(pt)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
pt = BarycentricTransform(orig_shape_coords[i1],
|
pt = barycentric_transform(orig_shape_coords[i1],
|
||||||
orig_coords[i3], orig_coords[i1], orig_coords[i2],
|
orig_coords[i3], orig_coords[i1], orig_coords[i2],
|
||||||
target_coords[i3], target_coords[i1], target_coords[i2])
|
target_coords[i3], target_coords[i1], target_coords[i2])
|
||||||
median_coords[i1].append(pt)
|
median_coords[i1].append(pt)
|
||||||
|
|
||||||
pt = BarycentricTransform(orig_shape_coords[i2],
|
pt = barycentric_transform(orig_shape_coords[i2],
|
||||||
orig_coords[i1], orig_coords[i2], orig_coords[i3],
|
orig_coords[i1], orig_coords[i2], orig_coords[i3],
|
||||||
target_coords[i1], target_coords[i2], target_coords[i3])
|
target_coords[i1], target_coords[i2], target_coords[i3])
|
||||||
median_coords[i2].append(pt)
|
median_coords[i2].append(pt)
|
||||||
|
|
||||||
pt = BarycentricTransform(orig_shape_coords[i3],
|
pt = barycentric_transform(orig_shape_coords[i3],
|
||||||
orig_coords[i2], orig_coords[i3], orig_coords[i1],
|
orig_coords[i2], orig_coords[i3], orig_coords[i1],
|
||||||
target_coords[i2], target_coords[i3], target_coords[i1])
|
target_coords[i2], target_coords[i3], target_coords[i1])
|
||||||
median_coords[i3].append(pt)
|
median_coords[i3].append(pt)
|
||||||
@@ -350,12 +350,12 @@ class ShapeTransfer(bpy.types.Operator):
|
|||||||
n1loc_to = v1_to + target_normals[i1] * edlen_to
|
n1loc_to = v1_to + target_normals[i1] * edlen_to
|
||||||
n2loc_to = v2_to + target_normals[i2] * edlen_to
|
n2loc_to = v2_to + target_normals[i2] * edlen_to
|
||||||
|
|
||||||
pt = BarycentricTransform(orig_shape_coords[i1],
|
pt = barycentric_transform(orig_shape_coords[i1],
|
||||||
v2, v1, n1loc,
|
v2, v1, n1loc,
|
||||||
v2_to, v1_to, n1loc_to)
|
v2_to, v1_to, n1loc_to)
|
||||||
median_coords[i1].append(pt)
|
median_coords[i1].append(pt)
|
||||||
|
|
||||||
pt = BarycentricTransform(orig_shape_coords[i2],
|
pt = barycentric_transform(orig_shape_coords[i2],
|
||||||
v1, v2, n2loc,
|
v1, v2, n2loc,
|
||||||
v1_to, v2_to, n2loc_to)
|
v1_to, v2_to, n2loc_to)
|
||||||
median_coords[i2].append(pt)
|
median_coords[i2].append(pt)
|
||||||
@@ -552,7 +552,6 @@ class IsolateTypeRender(bpy.types.Operator):
|
|||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ClearAllRestrictRender(bpy.types.Operator):
|
class ClearAllRestrictRender(bpy.types.Operator):
|
||||||
'''Reveal all render objects by setting the hide render flag'''
|
'''Reveal all render objects by setting the hide render flag'''
|
||||||
bl_idname = "object.hide_render_clear_all"
|
bl_idname = "object.hide_render_clear_all"
|
||||||
|
|||||||
@@ -123,21 +123,21 @@ def align_objects(align_x, align_y, align_z, align_mode, relative_to):
|
|||||||
|
|
||||||
# Align Mode
|
# Align Mode
|
||||||
|
|
||||||
if relative_to == 'OPT_4': # Active relative
|
if relative_to == 'OPT_4': # Active relative
|
||||||
if align_mode == 'OPT_1':
|
if align_mode == 'OPT_1':
|
||||||
obj_x = obj_loc[0] - negative_x - size_active_x
|
obj_x = obj_loc[0] - negative_x - size_active_x
|
||||||
|
|
||||||
elif align_mode == 'OPT_3':
|
elif align_mode == 'OPT_3':
|
||||||
obj_x = obj_loc[0] - positive_x + size_active_x
|
obj_x = obj_loc[0] - positive_x + size_active_x
|
||||||
|
|
||||||
else: # Everything else relative
|
else: # Everything else relative
|
||||||
if align_mode == 'OPT_1':
|
if align_mode == 'OPT_1':
|
||||||
obj_x = obj_loc[0] - negative_x
|
obj_x = obj_loc[0] - negative_x
|
||||||
|
|
||||||
elif align_mode == 'OPT_3':
|
elif align_mode == 'OPT_3':
|
||||||
obj_x = obj_loc[0] - positive_x
|
obj_x = obj_loc[0] - positive_x
|
||||||
|
|
||||||
if align_mode == 'OPT_2': # All relative
|
if align_mode == 'OPT_2': # All relative
|
||||||
obj_x = obj_loc[0] - center_x
|
obj_x = obj_loc[0] - center_x
|
||||||
|
|
||||||
# Relative To
|
# Relative To
|
||||||
@@ -156,26 +156,24 @@ def align_objects(align_x, align_y, align_z, align_mode, relative_to):
|
|||||||
|
|
||||||
obj.location[0] = loc_x
|
obj.location[0] = loc_x
|
||||||
|
|
||||||
|
|
||||||
if align_y:
|
if align_y:
|
||||||
|
|
||||||
# Align Mode
|
# Align Mode
|
||||||
|
|
||||||
if relative_to == 'OPT_4': # Active relative
|
if relative_to == 'OPT_4': # Active relative
|
||||||
if align_mode == 'OPT_1':
|
if align_mode == 'OPT_1':
|
||||||
obj_y = obj_loc[1] - negative_y - size_active_y
|
obj_y = obj_loc[1] - negative_y - size_active_y
|
||||||
|
|
||||||
elif align_mode == 'OPT_3':
|
elif align_mode == 'OPT_3':
|
||||||
obj_y = obj_loc[1] - positive_y + size_active_y
|
obj_y = obj_loc[1] - positive_y + size_active_y
|
||||||
|
|
||||||
else: # Everything else relative
|
else: # Everything else relative
|
||||||
if align_mode == 'OPT_1':
|
if align_mode == 'OPT_1':
|
||||||
obj_y = obj_loc[1] - negative_y
|
obj_y = obj_loc[1] - negative_y
|
||||||
|
|
||||||
elif align_mode == 'OPT_3':
|
elif align_mode == 'OPT_3':
|
||||||
obj_y = obj_loc[1] - positive_y
|
obj_y = obj_loc[1] - positive_y
|
||||||
|
|
||||||
if align_mode == 'OPT_2': # All relative
|
if align_mode == 'OPT_2': # All relative
|
||||||
obj_y = obj_loc[1] - center_y
|
obj_y = obj_loc[1] - center_y
|
||||||
|
|
||||||
# Relative To
|
# Relative To
|
||||||
@@ -194,26 +192,23 @@ def align_objects(align_x, align_y, align_z, align_mode, relative_to):
|
|||||||
|
|
||||||
obj.location[1] = loc_y
|
obj.location[1] = loc_y
|
||||||
|
|
||||||
|
|
||||||
if align_z:
|
if align_z:
|
||||||
|
|
||||||
# Align Mode
|
# Align Mode
|
||||||
|
if relative_to == 'OPT_4': # Active relative
|
||||||
if relative_to == 'OPT_4': # Active relative
|
|
||||||
if align_mode == 'OPT_1':
|
if align_mode == 'OPT_1':
|
||||||
obj_z = obj_loc[2] - negative_z - size_active_z
|
obj_z = obj_loc[2] - negative_z - size_active_z
|
||||||
|
|
||||||
elif align_mode == 'OPT_3':
|
elif align_mode == 'OPT_3':
|
||||||
obj_z = obj_loc[2] - positive_z + size_active_z
|
obj_z = obj_loc[2] - positive_z + size_active_z
|
||||||
|
|
||||||
else: # Everything else relative
|
else: # Everything else relative
|
||||||
if align_mode == 'OPT_1':
|
if align_mode == 'OPT_1':
|
||||||
obj_z = obj_loc[2] - negative_z
|
obj_z = obj_loc[2] - negative_z
|
||||||
|
|
||||||
elif align_mode == 'OPT_3':
|
elif align_mode == 'OPT_3':
|
||||||
obj_z = obj_loc[2] - positive_z
|
obj_z = obj_loc[2] - positive_z
|
||||||
|
|
||||||
if align_mode == 'OPT_2': # All relative
|
if align_mode == 'OPT_2': # All relative
|
||||||
obj_z = obj_loc[2] - center_z
|
obj_z = obj_loc[2] - center_z
|
||||||
|
|
||||||
# Relative To
|
# Relative To
|
||||||
|
|||||||
@@ -86,6 +86,7 @@ def randomize_selected(seed, delta, loc, rot, scale, scale_even):
|
|||||||
|
|
||||||
from bpy.props import *
|
from bpy.props import *
|
||||||
|
|
||||||
|
|
||||||
class RandomizeLocRotSize(bpy.types.Operator):
|
class RandomizeLocRotSize(bpy.types.Operator):
|
||||||
'''Randomize objects loc/rot/scale'''
|
'''Randomize objects loc/rot/scale'''
|
||||||
bl_idname = "object.randomize_transform"
|
bl_idname = "object.randomize_transform"
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ class AddPresetBase():
|
|||||||
- preset_subdir '''
|
- preset_subdir '''
|
||||||
# bl_idname = "script.preset_base_add"
|
# bl_idname = "script.preset_base_add"
|
||||||
# bl_label = "Add a Python Preset"
|
# bl_label = "Add a Python Preset"
|
||||||
bl_options = {'REGISTER'} # only because invoke_props_popup requires.
|
bl_options = {'REGISTER'} # only because invoke_props_popup requires.
|
||||||
|
|
||||||
name = bpy.props.StringProperty(name="Name", description="Name of the preset, used to make the path name", maxlen=64, default="")
|
name = bpy.props.StringProperty(name="Name", description="Name of the preset, used to make the path name", maxlen=64, default="")
|
||||||
remove_active = bpy.props.BoolProperty(default=False, options={'HIDDEN'})
|
remove_active = bpy.props.BoolProperty(default=False, options={'HIDDEN'})
|
||||||
@@ -42,13 +42,13 @@ class AddPresetBase():
|
|||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
import os
|
import os
|
||||||
|
|
||||||
if hasattr(self, "pre_cb"):
|
if hasattr(self, "pre_cb"):
|
||||||
self.pre_cb(context)
|
self.pre_cb(context)
|
||||||
|
|
||||||
preset_menu_class = getattr(bpy.types, self.preset_menu)
|
preset_menu_class = getattr(bpy.types, self.preset_menu)
|
||||||
|
|
||||||
if not self.remove_active:
|
if not self.remove_active:
|
||||||
|
|
||||||
if not self.name:
|
if not self.name:
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
@@ -62,7 +62,7 @@ class AddPresetBase():
|
|||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
filepath = os.path.join(target_path, filename) + ".py"
|
filepath = os.path.join(target_path, filename) + ".py"
|
||||||
|
|
||||||
if hasattr(self, "add"):
|
if hasattr(self, "add"):
|
||||||
self.add(context, filepath)
|
self.add(context, filepath)
|
||||||
else:
|
else:
|
||||||
@@ -352,6 +352,7 @@ class WM_MT_operator_presets(bpy.types.Menu):
|
|||||||
|
|
||||||
preset_operator = "script.execute_preset"
|
preset_operator = "script.execute_preset"
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -21,11 +21,12 @@
|
|||||||
import bpy
|
import bpy
|
||||||
from bpy.props import *
|
from bpy.props import *
|
||||||
|
|
||||||
|
|
||||||
def write_svg(fw, mesh, image_width, image_height, face_iter):
|
def write_svg(fw, mesh, image_width, image_height, face_iter):
|
||||||
# for making an XML compatible string
|
# for making an XML compatible string
|
||||||
from xml.sax.saxutils import escape
|
from xml.sax.saxutils import escape
|
||||||
from os.path import basename
|
from os.path import basename
|
||||||
|
|
||||||
fw('<?xml version="1.0" standalone="no"?>\n')
|
fw('<?xml version="1.0" standalone="no"?>\n')
|
||||||
fw('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" \n')
|
fw('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" \n')
|
||||||
fw(' "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">\n')
|
fw(' "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">\n')
|
||||||
@@ -126,7 +127,6 @@ def write_png(fw, mesh_source, image_width, image_height, face_iter):
|
|||||||
for f in mesh_source.faces:
|
for f in mesh_source.faces:
|
||||||
tot_verts += len(f.vertices)
|
tot_verts += len(f.vertices)
|
||||||
|
|
||||||
|
|
||||||
faces_source = mesh_source.faces
|
faces_source = mesh_source.faces
|
||||||
|
|
||||||
# get unique UV's incase there are many overlapping which slow down filling.
|
# get unique UV's incase there are many overlapping which slow down filling.
|
||||||
@@ -145,7 +145,6 @@ def write_png(fw, mesh_source, image_width, image_height, face_iter):
|
|||||||
mesh_new_materials = []
|
mesh_new_materials = []
|
||||||
mesh_new_face_vertices = []
|
mesh_new_face_vertices = []
|
||||||
|
|
||||||
|
|
||||||
current_vert = 0
|
current_vert = 0
|
||||||
|
|
||||||
for face_data in face_hash_3:
|
for face_data in face_hash_3:
|
||||||
@@ -167,7 +166,7 @@ def write_png(fw, mesh_source, image_width, image_height, face_iter):
|
|||||||
mesh.faces.foreach_set("material_index", mesh_new_materials)
|
mesh.faces.foreach_set("material_index", mesh_new_materials)
|
||||||
|
|
||||||
mesh.update(calc_edges=True)
|
mesh.update(calc_edges=True)
|
||||||
|
|
||||||
obj_solid = bpy.data.objects.new("uv_temp_solid", mesh)
|
obj_solid = bpy.data.objects.new("uv_temp_solid", mesh)
|
||||||
obj_wire = bpy.data.objects.new("uv_temp_wire", mesh)
|
obj_wire = bpy.data.objects.new("uv_temp_wire", mesh)
|
||||||
base_solid = scene.objects.link(obj_solid)
|
base_solid = scene.objects.link(obj_solid)
|
||||||
@@ -177,11 +176,10 @@ def write_png(fw, mesh_source, image_width, image_height, face_iter):
|
|||||||
|
|
||||||
# place behind the wire
|
# place behind the wire
|
||||||
obj_solid.location = 0, 0, -1
|
obj_solid.location = 0, 0, -1
|
||||||
|
|
||||||
obj_wire.material_slots[0].link = 'OBJECT'
|
obj_wire.material_slots[0].link = 'OBJECT'
|
||||||
obj_wire.material_slots[0].material = material_wire
|
obj_wire.material_slots[0].material = material_wire
|
||||||
|
|
||||||
|
|
||||||
# setup the camera
|
# setup the camera
|
||||||
cam = bpy.data.cameras.new("uv_temp")
|
cam = bpy.data.cameras.new("uv_temp")
|
||||||
cam.type = 'ORTHO'
|
cam.type = 'ORTHO'
|
||||||
@@ -204,7 +202,6 @@ def write_png(fw, mesh_source, image_width, image_height, face_iter):
|
|||||||
material_wire.use_shadeless = True
|
material_wire.use_shadeless = True
|
||||||
material_wire.diffuse_color = 0, 0, 0
|
material_wire.diffuse_color = 0, 0, 0
|
||||||
|
|
||||||
|
|
||||||
# scene render settings
|
# scene render settings
|
||||||
scene.render.use_raytrace = False
|
scene.render.use_raytrace = False
|
||||||
scene.render.alpha_mode = 'STRAIGHT'
|
scene.render.alpha_mode = 'STRAIGHT'
|
||||||
@@ -217,11 +214,11 @@ def write_png(fw, mesh_source, image_width, image_height, face_iter):
|
|||||||
if image_width > image_height:
|
if image_width > image_height:
|
||||||
scene.render.pixel_aspect_y = image_width / image_height
|
scene.render.pixel_aspect_y = image_width / image_height
|
||||||
elif image_width < image_height:
|
elif image_width < image_height:
|
||||||
scene.render.pixel_aspect_x = image_height /image_width
|
scene.render.pixel_aspect_x = image_height / image_width
|
||||||
|
|
||||||
scene.frame_start = 1
|
scene.frame_start = 1
|
||||||
scene.frame_end = 1
|
scene.frame_end = 1
|
||||||
|
|
||||||
scene.render.file_format = 'PNG'
|
scene.render.file_format = 'PNG'
|
||||||
scene.render.filepath = filepath
|
scene.render.filepath = filepath
|
||||||
|
|
||||||
@@ -236,13 +233,12 @@ def write_png(fw, mesh_source, image_width, image_height, face_iter):
|
|||||||
|
|
||||||
bpy.data.cameras.remove(cam)
|
bpy.data.cameras.remove(cam)
|
||||||
bpy.data.meshes.remove(mesh)
|
bpy.data.meshes.remove(mesh)
|
||||||
|
|
||||||
bpy.data.materials.remove(material_wire)
|
bpy.data.materials.remove(material_wire)
|
||||||
for mat_solid in material_solids:
|
for mat_solid in material_solids:
|
||||||
bpy.data.materials.remove(mat_solid)
|
bpy.data.materials.remove(mat_solid)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ExportUVLayout(bpy.types.Operator):
|
class ExportUVLayout(bpy.types.Operator):
|
||||||
"""Export UV layout to file"""
|
"""Export UV layout to file"""
|
||||||
|
|
||||||
@@ -328,7 +324,6 @@ class ExportUVLayout(bpy.types.Operator):
|
|||||||
if is_editmode:
|
if is_editmode:
|
||||||
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
|
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
|
||||||
|
|
||||||
|
|
||||||
mesh = obj.data
|
mesh = obj.data
|
||||||
|
|
||||||
mode = self.mode
|
mode = self.mode
|
||||||
|
|||||||
@@ -225,7 +225,7 @@ def islandIntersectUvIsland(source, target, SourceOffset):
|
|||||||
# Edge intersect test
|
# Edge intersect test
|
||||||
for ed in edgeLoopsSource:
|
for ed in edgeLoopsSource:
|
||||||
for seg in edgeLoopsTarget:
|
for seg in edgeLoopsTarget:
|
||||||
i = geometry.LineIntersect2D(\
|
i = geometry.intersect_line_line_2d(\
|
||||||
seg[0], seg[1], SourceOffset+ed[0], SourceOffset+ed[1])
|
seg[0], seg[1], SourceOffset+ed[0], SourceOffset+ed[1])
|
||||||
if i:
|
if i:
|
||||||
return 1 # LINE INTERSECTION
|
return 1 # LINE INTERSECTION
|
||||||
@@ -739,7 +739,7 @@ def packIslands(islandList):
|
|||||||
#XXX Window.DrawProgressBar(0.7, 'Packing %i UV Islands...' % len(packBoxes) )
|
#XXX Window.DrawProgressBar(0.7, 'Packing %i UV Islands...' % len(packBoxes) )
|
||||||
|
|
||||||
time1 = time.time()
|
time1 = time.time()
|
||||||
packWidth, packHeight = geometry.BoxPack2D(packBoxes)
|
packWidth, packHeight = geometry.box_pack_2d(packBoxes)
|
||||||
|
|
||||||
# print 'Box Packing Time:', time.time() - time1
|
# print 'Box Packing Time:', time.time() - time1
|
||||||
|
|
||||||
|
|||||||
@@ -85,10 +85,10 @@ class BRUSH_OT_set_active_number(bpy.types.Operator):
|
|||||||
number = IntProperty(name="number",
|
number = IntProperty(name="number",
|
||||||
description="Brush number")
|
description="Brush number")
|
||||||
|
|
||||||
_attr_dict = {"sculpt" : "use_paint_sculpt",
|
_attr_dict = {"sculpt": "use_paint_sculpt",
|
||||||
"vertex_paint": "use_paint_vertex",
|
"vertex_paint": "use_paint_vertex",
|
||||||
"weight_paint": "use_paint_weight",
|
"weight_paint": "use_paint_weight",
|
||||||
"image_paint" : "use_paint_texture"}
|
"image_paint": "use_paint_texture"}
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
attr = self._attr_dict.get(self.mode)
|
attr = self._attr_dict.get(self.mode)
|
||||||
@@ -102,6 +102,7 @@ class BRUSH_OT_set_active_number(bpy.types.Operator):
|
|||||||
|
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
|
||||||
class WM_OT_context_set_boolean(bpy.types.Operator):
|
class WM_OT_context_set_boolean(bpy.types.Operator):
|
||||||
'''Set a context value.'''
|
'''Set a context value.'''
|
||||||
bl_idname = "wm.context_set_boolean"
|
bl_idname = "wm.context_set_boolean"
|
||||||
@@ -668,7 +669,6 @@ class WM_OT_doc_edit(bpy.types.Operator):
|
|||||||
return wm.invoke_props_dialog(self, width=600)
|
return wm.invoke_props_dialog(self, width=600)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
from bpy.props import *
|
from bpy.props import *
|
||||||
|
|
||||||
|
|
||||||
@@ -689,7 +689,7 @@ class WM_OT_properties_edit(bpy.types.Operator):
|
|||||||
'''Internal use (edit a property data_path)'''
|
'''Internal use (edit a property data_path)'''
|
||||||
bl_idname = "wm.properties_edit"
|
bl_idname = "wm.properties_edit"
|
||||||
bl_label = "Edit Property"
|
bl_label = "Edit Property"
|
||||||
bl_options = {'REGISTER'} # only because invoke_props_popup requires.
|
bl_options = {'REGISTER'} # only because invoke_props_popup requires.
|
||||||
|
|
||||||
data_path = rna_path
|
data_path = rna_path
|
||||||
property = rna_property
|
property = rna_property
|
||||||
@@ -803,6 +803,7 @@ class WM_OT_keyconfig_activate(bpy.types.Operator):
|
|||||||
bpy.utils.keyconfig_set(self.filepath)
|
bpy.utils.keyconfig_set(self.filepath)
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
class WM_OT_sysinfo(bpy.types.Operator):
|
class WM_OT_sysinfo(bpy.types.Operator):
|
||||||
'''Generate System Info'''
|
'''Generate System Info'''
|
||||||
bl_idname = "wm.sysinfo"
|
bl_idname = "wm.sysinfo"
|
||||||
@@ -813,6 +814,7 @@ class WM_OT_sysinfo(bpy.types.Operator):
|
|||||||
sys_info.write_sysinfo(self)
|
sys_info.write_sysinfo(self)
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -43,9 +43,9 @@ class DATA_PT_context_arm(ArmatureButtonsPanel, bpy.types.Panel):
|
|||||||
space = context.space_data
|
space = context.space_data
|
||||||
|
|
||||||
if ob:
|
if ob:
|
||||||
layout.template_ID(ob, "data", unlink="None")
|
layout.template_ID(ob, "data")
|
||||||
elif arm:
|
elif arm:
|
||||||
layout.template_ID(space, "pin_id", unlink="None")
|
layout.template_ID(space, "pin_id")
|
||||||
|
|
||||||
|
|
||||||
class DATA_PT_skeleton(ArmatureButtonsPanel, bpy.types.Panel):
|
class DATA_PT_skeleton(ArmatureButtonsPanel, bpy.types.Panel):
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ class BONE_PT_transform(BoneButtonsPanel, bpy.types.Panel):
|
|||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
if context.edit_bone:
|
if context.edit_bone:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
ob = context.object
|
ob = context.object
|
||||||
return ob and ob.mode == 'POSE' and context.bone
|
return ob and ob.mode == 'POSE' and context.bone
|
||||||
|
|
||||||
|
|||||||
@@ -60,9 +60,9 @@ class DATA_PT_context_curve(CurveButtonsPanel, bpy.types.Panel):
|
|||||||
space = context.space_data
|
space = context.space_data
|
||||||
|
|
||||||
if ob:
|
if ob:
|
||||||
layout.template_ID(ob, "data", unlink="None")
|
layout.template_ID(ob, "data")
|
||||||
elif curve:
|
elif curve:
|
||||||
layout.template_ID(space, "pin_id", unlink="None") # XXX: broken
|
layout.template_ID(space, "pin_id") # XXX: broken
|
||||||
|
|
||||||
|
|
||||||
class DATA_PT_shape_curve(CurveButtonsPanel, bpy.types.Panel):
|
class DATA_PT_shape_curve(CurveButtonsPanel, bpy.types.Panel):
|
||||||
|
|||||||
@@ -71,9 +71,9 @@ class DATA_PT_context_mesh(MeshButtonsPanel, bpy.types.Panel):
|
|||||||
space = context.space_data
|
space = context.space_data
|
||||||
|
|
||||||
if ob:
|
if ob:
|
||||||
layout.template_ID(ob, "data", unlink="None")
|
layout.template_ID(ob, "data")
|
||||||
elif mesh:
|
elif mesh:
|
||||||
layout.template_ID(space, "pin_id", unlink="None")
|
layout.template_ID(space, "pin_id")
|
||||||
|
|
||||||
|
|
||||||
class DATA_PT_normals(MeshButtonsPanel, bpy.types.Panel):
|
class DATA_PT_normals(MeshButtonsPanel, bpy.types.Panel):
|
||||||
|
|||||||
@@ -43,9 +43,9 @@ class DATA_PT_context_metaball(DataButtonsPanel, bpy.types.Panel):
|
|||||||
space = context.space_data
|
space = context.space_data
|
||||||
|
|
||||||
if ob:
|
if ob:
|
||||||
layout.template_ID(ob, "data", unlink="None")
|
layout.template_ID(ob, "data")
|
||||||
elif mball:
|
elif mball:
|
||||||
layout.template_ID(space, "pin_id", unlink="None")
|
layout.template_ID(space, "pin_id")
|
||||||
|
|
||||||
|
|
||||||
class DATA_PT_metaball(DataButtonsPanel, bpy.types.Panel):
|
class DATA_PT_metaball(DataButtonsPanel, bpy.types.Panel):
|
||||||
|
|||||||
@@ -60,15 +60,15 @@ class DATA_PT_modifiers(ModifierButtonsPanel, bpy.types.Panel):
|
|||||||
col.prop(md, "use_bone_envelopes", text="Bone Envelopes")
|
col.prop(md, "use_bone_envelopes", text="Bone Envelopes")
|
||||||
|
|
||||||
split = layout.split()
|
split = layout.split()
|
||||||
|
|
||||||
col = split.column()
|
col = split.split()
|
||||||
col.prop(md, "use_multi_modifier")
|
col.prop_search(md, "vertex_group", ob, "vertex_groups", text="")
|
||||||
col = col.split()
|
|
||||||
col.active = md.use_multi_modifier
|
|
||||||
col.prop_search(md, "vertex_group_multi_modifier", ob, "vertex_groups", text="")
|
|
||||||
sub = col.column()
|
sub = col.column()
|
||||||
sub.active = bool(md.vertex_group_multi_modifier)
|
sub.active = bool(md.vertex_group)
|
||||||
sub.prop(md, "invert_vertex_group_multi_modifier")
|
sub.prop(md, "invert_vertex_group")
|
||||||
|
|
||||||
|
col = layout.column()
|
||||||
|
col.prop(md, "use_multi_modifier")
|
||||||
|
|
||||||
def ARRAY(self, layout, ob, md):
|
def ARRAY(self, layout, ob, md):
|
||||||
layout.prop(md, "fit_type")
|
layout.prop(md, "fit_type")
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ class OBJECT_PT_context_object(ObjectButtonsPanel, bpy.types.Panel):
|
|||||||
ob = context.object
|
ob = context.object
|
||||||
|
|
||||||
if space.use_pin_id:
|
if space.use_pin_id:
|
||||||
layout.template_ID(space, "pin_id", unlink="None")
|
layout.template_ID(space, "pin_id")
|
||||||
else:
|
else:
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
row.label(text="", icon='OBJECT_DATA')
|
row.label(text="", icon='OBJECT_DATA')
|
||||||
@@ -68,7 +68,8 @@ class OBJECT_PT_transform(ObjectButtonsPanel, bpy.types.Panel):
|
|||||||
row.column().prop(ob, "scale")
|
row.column().prop(ob, "scale")
|
||||||
|
|
||||||
layout.prop(ob, "rotation_mode")
|
layout.prop(ob, "rotation_mode")
|
||||||
|
|
||||||
|
|
||||||
class OBJECT_PT_delta_transform(ObjectButtonsPanel, bpy.types.Panel):
|
class OBJECT_PT_delta_transform(ObjectButtonsPanel, bpy.types.Panel):
|
||||||
bl_label = "Delta Transform"
|
bl_label = "Delta Transform"
|
||||||
bl_options = {'DEFAULT_CLOSED'}
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ class ConstraintButtonsPanel():
|
|||||||
if con.target.type == 'ARMATURE':
|
if con.target.type == 'ARMATURE':
|
||||||
layout.prop_search(con, "subtarget", con.target.data, "bones", text="Bone")
|
layout.prop_search(con, "subtarget", con.target.data, "bones", text="Bone")
|
||||||
|
|
||||||
if con.type in ('COPY_LOCATION', 'STRETCH_TO', 'TRACK_TO', 'PIVOT'):
|
if hasattr(con, "head_tail"):
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
row.label(text="Head/Tail:")
|
row.label(text="Head/Tail:")
|
||||||
row.prop(con, "head_tail", text="")
|
row.prop(con, "head_tail", text="")
|
||||||
@@ -550,17 +550,16 @@ class ConstraintButtonsPanel():
|
|||||||
col.prop(con, "axis_x", text="X")
|
col.prop(con, "axis_x", text="X")
|
||||||
col.prop(con, "axis_y", text="Y")
|
col.prop(con, "axis_y", text="Y")
|
||||||
col.prop(con, "axis_z", text="Z")
|
col.prop(con, "axis_z", text="Z")
|
||||||
|
|
||||||
|
|
||||||
if con.pivot_type == 'CONE_TWIST':
|
if con.pivot_type == 'CONE_TWIST':
|
||||||
layout.label(text="Limits:")
|
layout.label(text="Limits:")
|
||||||
split = layout.split()
|
split = layout.split()
|
||||||
|
|
||||||
col = split.column(align=True)
|
col = split.column(align=True)
|
||||||
col.prop(con, "use_angular_limit_x", text="Angular X")
|
col.prop(con, "use_angular_limit_x", text="Angular X")
|
||||||
col.prop(con, "use_angular_limit_y", text="Angular Y")
|
col.prop(con, "use_angular_limit_y", text="Angular Y")
|
||||||
col.prop(con, "use_angular_limit_z", text="Angular Z")
|
col.prop(con, "use_angular_limit_z", text="Angular Z")
|
||||||
|
|
||||||
col = split.column()
|
col = split.column()
|
||||||
col.prop(con, "limit_cone_min", text="")
|
col.prop(con, "limit_cone_min", text="")
|
||||||
col = split.column()
|
col = split.column()
|
||||||
@@ -569,7 +568,7 @@ class ConstraintButtonsPanel():
|
|||||||
elif con.pivot_type == 'GENERIC_6_DOF':
|
elif con.pivot_type == 'GENERIC_6_DOF':
|
||||||
layout.label(text="Limits:")
|
layout.label(text="Limits:")
|
||||||
split = layout.split()
|
split = layout.split()
|
||||||
|
|
||||||
col = split.column(align=True)
|
col = split.column(align=True)
|
||||||
col.prop(con, "use_limit_x", text="X")
|
col.prop(con, "use_limit_x", text="X")
|
||||||
col.prop(con, "use_limit_y", text="Y")
|
col.prop(con, "use_limit_y", text="Y")
|
||||||
@@ -577,12 +576,12 @@ class ConstraintButtonsPanel():
|
|||||||
col.prop(con, "use_angular_limit_x", text="Angular X")
|
col.prop(con, "use_angular_limit_x", text="Angular X")
|
||||||
col.prop(con, "use_angular_limit_y", text="Angular Y")
|
col.prop(con, "use_angular_limit_y", text="Angular Y")
|
||||||
col.prop(con, "use_angular_limit_z", text="Angular Z")
|
col.prop(con, "use_angular_limit_z", text="Angular Z")
|
||||||
|
|
||||||
col = split.column()
|
col = split.column()
|
||||||
col.prop(con, "limit_generic_min", text="")
|
col.prop(con, "limit_generic_min", text="")
|
||||||
col = split.column()
|
col = split.column()
|
||||||
col.prop(con, "limit_generic_max", text="")
|
col.prop(con, "limit_generic_max", text="")
|
||||||
|
|
||||||
def CLAMP_TO(self, context, layout, con):
|
def CLAMP_TO(self, context, layout, con):
|
||||||
self.target_template(layout, con)
|
self.target_template(layout, con)
|
||||||
|
|
||||||
|
|||||||
@@ -69,22 +69,26 @@ def point_cache_ui(self, context, cache, enabled, cachetype):
|
|||||||
row.prop(cache, "frame_end")
|
row.prop(cache, "frame_end")
|
||||||
if cachetype not in ('SMOKE', 'CLOTH'):
|
if cachetype not in ('SMOKE', 'CLOTH'):
|
||||||
row.prop(cache, "frame_step")
|
row.prop(cache, "frame_step")
|
||||||
|
row.prop(cache, "use_quick_cache")
|
||||||
if cachetype != 'SMOKE':
|
if cachetype != 'SMOKE':
|
||||||
layout.label(text=cache.info)
|
layout.label(text=cache.info)
|
||||||
|
|
||||||
if cachetype != 'SMOKE':
|
if cachetype != 'SMOKE':
|
||||||
split = layout.split()
|
split = layout.split()
|
||||||
|
split.enabled = enabled and (not bpy.data.is_dirty)
|
||||||
|
|
||||||
col = split.column()
|
col = split.column()
|
||||||
col.enabled = enabled
|
|
||||||
col.prop(cache, "use_quick_cache")
|
|
||||||
|
|
||||||
col = split.column()
|
|
||||||
col.enabled = (not bpy.data.is_dirty)
|
|
||||||
col.prop(cache, "use_disk_cache")
|
col.prop(cache, "use_disk_cache")
|
||||||
sub = col.column()
|
|
||||||
sub.enabled = cache.use_disk_cache
|
col = split.column()
|
||||||
sub.prop(cache, "use_library_path", "Use Lib Path")
|
col.active = cache.use_disk_cache
|
||||||
|
col.prop(cache, "use_library_path", "Use Lib Path")
|
||||||
|
|
||||||
|
row = layout.row()
|
||||||
|
row.enabled = enabled and (not bpy.data.is_dirty)
|
||||||
|
row.active = cache.use_disk_cache
|
||||||
|
row.label(text="Compression:")
|
||||||
|
row.prop(cache, "compression", expand=True)
|
||||||
|
|
||||||
layout.separator()
|
layout.separator()
|
||||||
|
|
||||||
|
|||||||
@@ -629,7 +629,7 @@ class RENDER_PT_output(RenderButtonsPanel, bpy.types.Panel):
|
|||||||
col.prop(rd, "jpeg2k_ycc")
|
col.prop(rd, "jpeg2k_ycc")
|
||||||
|
|
||||||
elif file_format in ('CINEON', 'DPX'):
|
elif file_format in ('CINEON', 'DPX'):
|
||||||
|
|
||||||
split = layout.split()
|
split = layout.split()
|
||||||
split.label("FIXME: hard coded Non-Linear, Gamma:1.0")
|
split.label("FIXME: hard coded Non-Linear, Gamma:1.0")
|
||||||
'''
|
'''
|
||||||
@@ -783,7 +783,7 @@ class RENDER_PT_motion_blur(RenderButtonsPanel, bpy.types.Panel):
|
|||||||
bl_label = "Sampled Motion Blur"
|
bl_label = "Sampled Motion Blur"
|
||||||
bl_options = {'DEFAULT_CLOSED'}
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
COMPAT_ENGINES = {'BLENDER_RENDER'}
|
COMPAT_ENGINES = {'BLENDER_RENDER'}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
rd = context.scene.render
|
rd = context.scene.render
|
||||||
@@ -819,7 +819,7 @@ class RENDER_PT_dimensions(RenderButtonsPanel, bpy.types.Panel):
|
|||||||
row.menu("RENDER_MT_presets", text=bpy.types.RENDER_MT_presets.bl_label)
|
row.menu("RENDER_MT_presets", text=bpy.types.RENDER_MT_presets.bl_label)
|
||||||
row.operator("render.preset_add", text="", icon="ZOOMIN")
|
row.operator("render.preset_add", text="", icon="ZOOMIN")
|
||||||
row.operator("render.preset_add", text="", icon="ZOOMOUT").remove_active = True
|
row.operator("render.preset_add", text="", icon="ZOOMOUT").remove_active = True
|
||||||
|
|
||||||
split = layout.split()
|
split = layout.split()
|
||||||
|
|
||||||
col = split.column()
|
col = split.column()
|
||||||
|
|||||||
@@ -229,7 +229,7 @@ class ANIM_OT_keying_set_export(bpy.types.Operator):
|
|||||||
f.write("# Keying Set: %s\n" % ks.name)
|
f.write("# Keying Set: %s\n" % ks.name)
|
||||||
|
|
||||||
f.write("import bpy\n\n")
|
f.write("import bpy\n\n")
|
||||||
f.write("scene= bpy.data.scenes[0]\n\n") # XXX, why not use the current scene?
|
f.write("scene= bpy.data.scenes[0]\n\n") # XXX, why not use the current scene?
|
||||||
|
|
||||||
# Add KeyingSet and set general settings
|
# Add KeyingSet and set general settings
|
||||||
f.write("# Keying Set Level declarations\n")
|
f.write("# Keying Set Level declarations\n")
|
||||||
@@ -238,7 +238,7 @@ class ANIM_OT_keying_set_export(bpy.types.Operator):
|
|||||||
if not ks.is_path_absolute:
|
if not ks.is_path_absolute:
|
||||||
f.write("ks.is_path_absolute = False\n")
|
f.write("ks.is_path_absolute = False\n")
|
||||||
f.write("\n")
|
f.write("\n")
|
||||||
|
|
||||||
f.write("ks.bl_options = %r\n" % ks.bl_options)
|
f.write("ks.bl_options = %r\n" % ks.bl_options)
|
||||||
f.write("\n")
|
f.write("\n")
|
||||||
|
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ class TEXTURE_PT_context_texture(TextureButtonsPanel, bpy.types.Panel):
|
|||||||
|
|
||||||
if tex_collection:
|
if tex_collection:
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
|
|
||||||
row.template_list(idblock, "texture_slots", idblock, "active_texture_index", rows=2)
|
row.template_list(idblock, "texture_slots", idblock, "active_texture_index", rows=2)
|
||||||
|
|
||||||
col = row.column(align=True)
|
col = row.column(align=True)
|
||||||
@@ -143,7 +143,6 @@ class TEXTURE_PT_context_texture(TextureButtonsPanel, bpy.types.Panel):
|
|||||||
split.prop(tex, "type", text="")
|
split.prop(tex, "type", text="")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class TEXTURE_PT_preview(TextureButtonsPanel, bpy.types.Panel):
|
class TEXTURE_PT_preview(TextureButtonsPanel, bpy.types.Panel):
|
||||||
bl_label = "Preview"
|
bl_label = "Preview"
|
||||||
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
|
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
|
||||||
@@ -395,8 +394,9 @@ class TEXTURE_PT_image_sampling(TextureTypePanel, bpy.types.Panel):
|
|||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
|
idblock = context_tex_datablock(context)
|
||||||
tex = context.texture
|
tex = context.texture
|
||||||
# slot = context.texture_slot
|
slot = context.texture_slot
|
||||||
|
|
||||||
split = layout.split()
|
split = layout.split()
|
||||||
|
|
||||||
@@ -410,10 +410,12 @@ class TEXTURE_PT_image_sampling(TextureTypePanel, bpy.types.Panel):
|
|||||||
|
|
||||||
col = split.column()
|
col = split.column()
|
||||||
|
|
||||||
col.prop(tex, "use_normal_map")
|
#Only for Material based textures, not for Lamp/World...
|
||||||
row = col.row()
|
if isinstance(idblock, bpy.types.Material):
|
||||||
row.active = tex.use_normal_map
|
col.prop(tex, "use_normal_map")
|
||||||
row.prop(tex, "normal_space", text="")
|
row = col.row()
|
||||||
|
row.active = tex.use_normal_map
|
||||||
|
row.prop(slot, "normal_map_space", text="")
|
||||||
|
|
||||||
col.prop(tex, "use_mipmap")
|
col.prop(tex, "use_mipmap")
|
||||||
row = col.row()
|
row = col.row()
|
||||||
@@ -897,7 +899,7 @@ class TEXTURE_PT_influence(TextureSlotPanel, bpy.types.Panel):
|
|||||||
sub = row.row()
|
sub = row.row()
|
||||||
sub.active = getattr(tex, toggle)
|
sub.active = getattr(tex, toggle)
|
||||||
sub.prop(tex, factor, text=name, slider=True)
|
sub.prop(tex, factor, text=name, slider=True)
|
||||||
return sub # XXX, temp. use_map_normal needs to override.
|
return sub # XXX, temp. use_map_normal needs to override.
|
||||||
|
|
||||||
if isinstance(idblock, bpy.types.Material):
|
if isinstance(idblock, bpy.types.Material):
|
||||||
if idblock.type in ('SURFACE', 'WIRE'):
|
if idblock.type in ('SURFACE', 'WIRE'):
|
||||||
@@ -936,13 +938,13 @@ class TEXTURE_PT_influence(TextureSlotPanel, bpy.types.Panel):
|
|||||||
#sub.prop(tex, "default_value", text="Amount", slider=True)
|
#sub.prop(tex, "default_value", text="Amount", slider=True)
|
||||||
elif idblock.type == 'HALO':
|
elif idblock.type == 'HALO':
|
||||||
layout.label(text="Halo:")
|
layout.label(text="Halo:")
|
||||||
|
|
||||||
split = layout.split()
|
split = layout.split()
|
||||||
|
|
||||||
col = split.column()
|
col = split.column()
|
||||||
factor_but(col, "use_map_color_diffuse", "diffuse_color_factor", "Color")
|
factor_but(col, "use_map_color_diffuse", "diffuse_color_factor", "Color")
|
||||||
factor_but(col, "use_map_alpha", "alpha_factor", "Alpha")
|
factor_but(col, "use_map_alpha", "alpha_factor", "Alpha")
|
||||||
|
|
||||||
col = split.column()
|
col = split.column()
|
||||||
factor_but(col, "use_map_raymir", "raymir_factor", "Size")
|
factor_but(col, "use_map_raymir", "raymir_factor", "Size")
|
||||||
factor_but(col, "use_map_hardness", "hardness_factor", "Hardness")
|
factor_but(col, "use_map_hardness", "hardness_factor", "Hardness")
|
||||||
|
|||||||
@@ -152,7 +152,7 @@ class WORLD_PT_indirect_lighting(WorldButtonsPanel, bpy.types.Panel):
|
|||||||
split = layout.split()
|
split = layout.split()
|
||||||
split.prop(light, "indirect_factor", text="Factor")
|
split.prop(light, "indirect_factor", text="Factor")
|
||||||
split.prop(light, "indirect_bounces", text="Bounces")
|
split.prop(light, "indirect_bounces", text="Bounces")
|
||||||
|
|
||||||
if light.gather_method == 'RAYTRACE':
|
if light.gather_method == 'RAYTRACE':
|
||||||
layout.label(text="Only works with Approximate gather method")
|
layout.label(text="Only works with Approximate gather method")
|
||||||
|
|
||||||
|
|||||||
@@ -20,6 +20,7 @@
|
|||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
# used for DopeSheet, NLA, and Graph Editors
|
# used for DopeSheet, NLA, and Graph Editors
|
||||||
def dopesheet_filter(layout, context):
|
def dopesheet_filter(layout, context):
|
||||||
dopesheet = context.space_data.dopesheet
|
dopesheet = context.space_data.dopesheet
|
||||||
@@ -102,7 +103,7 @@ class DOPESHEET_HT_header(bpy.types.Header):
|
|||||||
if st.mode == 'DOPESHEET':
|
if st.mode == 'DOPESHEET':
|
||||||
dopesheet_filter(layout, context)
|
dopesheet_filter(layout, context)
|
||||||
|
|
||||||
elif st.mode in ('ACTION','SHAPEKEY'):
|
elif st.mode in ('ACTION', 'SHAPEKEY'):
|
||||||
layout.template_ID(st, "action", new="action.new")
|
layout.template_ID(st, "action", new="action.new")
|
||||||
|
|
||||||
if st.mode != 'GPENCIL':
|
if st.mode != 'GPENCIL':
|
||||||
|
|||||||
@@ -57,9 +57,9 @@ class FILEBROWSER_HT_header(bpy.types.Header):
|
|||||||
|
|
||||||
row = layout.row(align=True)
|
row = layout.row(align=True)
|
||||||
row.active = params.use_filter
|
row.active = params.use_filter
|
||||||
|
|
||||||
row.prop(params, "use_filter_folder", text="")
|
row.prop(params, "use_filter_folder", text="")
|
||||||
|
|
||||||
if params.filter_glob:
|
if params.filter_glob:
|
||||||
#if st.operator and hasattr(st.operator, "filter_glob"):
|
#if st.operator and hasattr(st.operator, "filter_glob"):
|
||||||
# row.prop(params, "filter_glob", text="")
|
# row.prop(params, "filter_glob", text="")
|
||||||
|
|||||||
@@ -188,7 +188,7 @@ class GRAPH_MT_key(bpy.types.Menu):
|
|||||||
layout.separator()
|
layout.separator()
|
||||||
layout.operator_menu_enum("graph.handle_type", "type", text="Handle Type")
|
layout.operator_menu_enum("graph.handle_type", "type", text="Handle Type")
|
||||||
layout.operator_menu_enum("graph.interpolation_type", "type", text="Interpolation Mode")
|
layout.operator_menu_enum("graph.interpolation_type", "type", text="Interpolation Mode")
|
||||||
|
|
||||||
layout.separator()
|
layout.separator()
|
||||||
layout.operator("graph.clean")
|
layout.operator("graph.clean")
|
||||||
layout.operator("graph.sample")
|
layout.operator("graph.sample")
|
||||||
|
|||||||
@@ -250,7 +250,8 @@ class IMAGE_MT_uvs(bpy.types.Menu):
|
|||||||
layout.separator()
|
layout.separator()
|
||||||
|
|
||||||
layout.menu("IMAGE_MT_uvs_showhide")
|
layout.menu("IMAGE_MT_uvs_showhide")
|
||||||
|
|
||||||
|
|
||||||
class IMAGE_MT_uvs_select_mode(bpy.types.Menu):
|
class IMAGE_MT_uvs_select_mode(bpy.types.Menu):
|
||||||
bl_label = "UV Select Mode"
|
bl_label = "UV Select Mode"
|
||||||
|
|
||||||
@@ -259,9 +260,9 @@ class IMAGE_MT_uvs_select_mode(bpy.types.Menu):
|
|||||||
|
|
||||||
layout.operator_context = 'INVOKE_REGION_WIN'
|
layout.operator_context = 'INVOKE_REGION_WIN'
|
||||||
toolsettings = context.tool_settings
|
toolsettings = context.tool_settings
|
||||||
|
|
||||||
# do smart things depending on whether uv_select_sync is on
|
# do smart things depending on whether uv_select_sync is on
|
||||||
|
|
||||||
if toolsettings.use_uv_select_sync:
|
if toolsettings.use_uv_select_sync:
|
||||||
prop = layout.operator("wm.context_set_value", text="Vertex", icon='VERTEXSEL')
|
prop = layout.operator("wm.context_set_value", text="Vertex", icon='VERTEXSEL')
|
||||||
prop.value = "(True, False, False)"
|
prop.value = "(True, False, False)"
|
||||||
@@ -287,7 +288,7 @@ class IMAGE_MT_uvs_select_mode(bpy.types.Menu):
|
|||||||
prop = layout.operator("wm.context_set_string", text="Face", icon='UV_FACESEL')
|
prop = layout.operator("wm.context_set_string", text="Face", icon='UV_FACESEL')
|
||||||
prop.value = "FACE"
|
prop.value = "FACE"
|
||||||
prop.data_path = "tool_settings.uv_select_mode"
|
prop.data_path = "tool_settings.uv_select_mode"
|
||||||
|
|
||||||
prop = layout.operator("wm.context_set_string", text="Island", icon='UV_ISLANDSEL')
|
prop = layout.operator("wm.context_set_string", text="Island", icon='UV_ISLANDSEL')
|
||||||
prop.value = "ISLAND"
|
prop.value = "ISLAND"
|
||||||
prop.data_path = "tool_settings.uv_select_mode"
|
prop.data_path = "tool_settings.uv_select_mode"
|
||||||
|
|||||||
@@ -69,7 +69,6 @@ class INFO_HT_header(bpy.types.Header):
|
|||||||
# XXX: this should be right-aligned to the RHS of the region
|
# XXX: this should be right-aligned to the RHS of the region
|
||||||
layout.operator("wm.window_fullscreen_toggle", icon='FULLSCREEN_ENTER', text="")
|
layout.operator("wm.window_fullscreen_toggle", icon='FULLSCREEN_ENTER', text="")
|
||||||
|
|
||||||
|
|
||||||
# XXX: BEFORE RELEASE, MOVE FILE MENU OUT OF INFO!!!
|
# XXX: BEFORE RELEASE, MOVE FILE MENU OUT OF INFO!!!
|
||||||
"""
|
"""
|
||||||
row = layout.row(align=True)
|
row = layout.row(align=True)
|
||||||
@@ -82,7 +81,7 @@ class INFO_HT_header(bpy.types.Header):
|
|||||||
row = layout.row()
|
row = layout.row()
|
||||||
row.enabled = sinfo.show_report_operator
|
row.enabled = sinfo.show_report_operator
|
||||||
row.operator("info.report_replay")
|
row.operator("info.report_replay")
|
||||||
|
|
||||||
row.menu("INFO_MT_report")
|
row.menu("INFO_MT_report")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -220,6 +219,7 @@ class INFO_MT_curve_add(bpy.types.Menu):
|
|||||||
layout.operator("curve.primitive_nurbs_circle_add", icon='CURVE_NCIRCLE', text="Nurbs Circle")
|
layout.operator("curve.primitive_nurbs_circle_add", icon='CURVE_NCIRCLE', text="Nurbs Circle")
|
||||||
layout.operator("curve.primitive_nurbs_path_add", icon='CURVE_PATH', text="Path")
|
layout.operator("curve.primitive_nurbs_path_add", icon='CURVE_PATH', text="Path")
|
||||||
|
|
||||||
|
|
||||||
class INFO_MT_edit_curve_add(bpy.types.Menu):
|
class INFO_MT_edit_curve_add(bpy.types.Menu):
|
||||||
bl_idname = "INFO_MT_edit_curve_add"
|
bl_idname = "INFO_MT_edit_curve_add"
|
||||||
bl_label = "Add"
|
bl_label = "Add"
|
||||||
@@ -231,9 +231,9 @@ class INFO_MT_edit_curve_add(bpy.types.Menu):
|
|||||||
layout.operator_context = 'INVOKE_REGION_WIN'
|
layout.operator_context = 'INVOKE_REGION_WIN'
|
||||||
|
|
||||||
if is_surf:
|
if is_surf:
|
||||||
INFO_MT_surface_add.draw(self, context)
|
INFO_MT_surface_add.draw(self, context)
|
||||||
else:
|
else:
|
||||||
INFO_MT_curve_add.draw(self, context)
|
INFO_MT_curve_add.draw(self, context)
|
||||||
|
|
||||||
|
|
||||||
class INFO_MT_surface_add(bpy.types.Menu):
|
class INFO_MT_surface_add(bpy.types.Menu):
|
||||||
@@ -353,7 +353,7 @@ class INFO_MT_help(bpy.types.Menu):
|
|||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
layout.operator("wm.url_open", text="Manual", icon='HELP').url = 'http://wiki.blender.org/index.php/Doc:Manual'
|
layout.operator("wm.url_open", text="Manual", icon='HELP').url = 'http://wiki.blender.org/index.php/Doc:Manual'
|
||||||
layout.operator("wm.url_open", text="Release Log", icon='URL').url = 'http://www.blender.org/development/release-logs/blender-254-beta/'
|
layout.operator("wm.url_open", text="Release Log", icon='URL').url = 'http://www.blender.org/development/release-logs/blender-256-beta/'
|
||||||
|
|
||||||
layout.separator()
|
layout.separator()
|
||||||
|
|
||||||
|
|||||||
@@ -134,6 +134,7 @@ class SEQUENCER_MT_view(bpy.types.Menu):
|
|||||||
if (st.view_type == 'PREVIEW') or (st.view_type == 'SEQUENCER_PREVIEW'):
|
if (st.view_type == 'PREVIEW') or (st.view_type == 'SEQUENCER_PREVIEW'):
|
||||||
layout.operator_context = 'INVOKE_REGION_PREVIEW'
|
layout.operator_context = 'INVOKE_REGION_PREVIEW'
|
||||||
layout.operator("sequencer.view_all_preview", text='Fit preview in window')
|
layout.operator("sequencer.view_all_preview", text='Fit preview in window')
|
||||||
|
layout.operator("sequencer.view_zoom_ratio", text='Show preview 1:1').ratio = 1.0
|
||||||
layout.operator_context = 'INVOKE_DEFAULT'
|
layout.operator_context = 'INVOKE_DEFAULT'
|
||||||
|
|
||||||
# # XXX, invokes in the header view
|
# # XXX, invokes in the header view
|
||||||
@@ -389,7 +390,7 @@ class SEQUENCER_PT_edit(SequencerButtonsPanel, bpy.types.Panel):
|
|||||||
col.label(text="Frame Still %d:%d" % (strip.frame_still_start, strip.frame_still_end))
|
col.label(text="Frame Still %d:%d" % (strip.frame_still_start, strip.frame_still_end))
|
||||||
|
|
||||||
elem = False
|
elem = False
|
||||||
|
|
||||||
if strip.type == 'IMAGE':
|
if strip.type == 'IMAGE':
|
||||||
elem = strip.getStripElem(frame_current)
|
elem = strip.getStripElem(frame_current)
|
||||||
elif strip.type == 'MOVIE':
|
elif strip.type == 'MOVIE':
|
||||||
@@ -607,7 +608,12 @@ class SEQUENCER_PT_input(SequencerButtonsPanel, bpy.types.Panel):
|
|||||||
col.prop(strip.crop, "max_x")
|
col.prop(strip.crop, "max_x")
|
||||||
|
|
||||||
col = layout.column(align=True)
|
col = layout.column(align=True)
|
||||||
col.label(text="Trim Duration:")
|
col.label(text="Trim Duration (hard):")
|
||||||
|
col.prop(strip, "animation_offset_start", text="Start")
|
||||||
|
col.prop(strip, "animation_offset_end", text="End")
|
||||||
|
|
||||||
|
col = layout.column(align=True)
|
||||||
|
col.label(text="Trim Duration (soft):")
|
||||||
col.prop(strip, "frame_offset_start", text="Start")
|
col.prop(strip, "frame_offset_start", text="Start")
|
||||||
col.prop(strip, "frame_offset_end", text="End")
|
col.prop(strip, "frame_offset_end", text="End")
|
||||||
|
|
||||||
@@ -678,7 +684,7 @@ class SEQUENCER_PT_scene(SequencerButtonsPanel, bpy.types.Panel):
|
|||||||
layout.template_ID(strip, "scene_camera")
|
layout.template_ID(strip, "scene_camera")
|
||||||
|
|
||||||
sce = strip.scene
|
sce = strip.scene
|
||||||
layout.label(text="Original frame range: "+ str(sce.frame_start) +" - "+ str(sce.frame_end) + " (" + str(sce.frame_end-sce.frame_start+1) + ")")
|
layout.label(text="Original frame range: %d-%d (%d)" % (sce.frame_start, sce.frame_end, sce.frame_end - sce.frame_start + 1))
|
||||||
|
|
||||||
|
|
||||||
class SEQUENCER_PT_filter(SequencerButtonsPanel, bpy.types.Panel):
|
class SEQUENCER_PT_filter(SequencerButtonsPanel, bpy.types.Panel):
|
||||||
@@ -787,7 +793,7 @@ class SEQUENCER_PT_preview(SequencerButtonsPanel_Output, bpy.types.Panel):
|
|||||||
render = context.scene.render
|
render = context.scene.render
|
||||||
|
|
||||||
col = layout.column()
|
col = layout.column()
|
||||||
col.active = False #Currently only opengl preview works!
|
col.active = False # Currently only opengl preview works!
|
||||||
col.prop(render, "use_sequencer_gl_preview", text="Open GL Preview")
|
col.prop(render, "use_sequencer_gl_preview", text="Open GL Preview")
|
||||||
col = layout.column()
|
col = layout.column()
|
||||||
#col.active = render.use_sequencer_gl_preview
|
#col.active = render.use_sequencer_gl_preview
|
||||||
|
|||||||
@@ -74,16 +74,16 @@ class TIME_HT_header(bpy.types.Header):
|
|||||||
row.operator("screen.keyframe_jump", text="", icon='NEXT_KEYFRAME').next = True
|
row.operator("screen.keyframe_jump", text="", icon='NEXT_KEYFRAME').next = True
|
||||||
row.operator("screen.frame_jump", text="", icon='FF').end = True
|
row.operator("screen.frame_jump", text="", icon='FF').end = True
|
||||||
|
|
||||||
|
layout.prop(scene, "sync_mode", text="")
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
row = layout.row(align=True)
|
row = layout.row(align=True)
|
||||||
row.prop(tools, "use_keyframe_insert_auto", text="", toggle=True)
|
row.prop(tools, "use_keyframe_insert_auto", text="", toggle=True)
|
||||||
if screen.is_animation_playing and tools.use_keyframe_insert_auto:
|
if screen.is_animation_playing and tools.use_keyframe_insert_auto:
|
||||||
subsub = row.row()
|
subsub = row.row()
|
||||||
subsub.prop(tools, "use_record_with_nla", toggle=True)
|
subsub.prop(tools, "use_record_with_nla", toggle=True)
|
||||||
|
|
||||||
layout.prop(scene, "sync_mode", text="")
|
|
||||||
|
|
||||||
layout.separator()
|
|
||||||
|
|
||||||
row = layout.row(align=True)
|
row = layout.row(align=True)
|
||||||
row.prop_search(scene.keying_sets_all, "active", scene, "keying_sets_all", text="")
|
row.prop_search(scene.keying_sets_all, "active", scene, "keying_sets_all", text="")
|
||||||
row.operator("anim.keyframe_insert", text="", icon='KEY_HLT')
|
row.operator("anim.keyframe_insert", text="", icon='KEY_HLT')
|
||||||
|
|||||||
@@ -815,6 +815,7 @@ class USERPREF_PT_input(InputKeyMapPanel):
|
|||||||
|
|
||||||
#print("runtime", time.time() - start)
|
#print("runtime", time.time() - start)
|
||||||
|
|
||||||
|
|
||||||
class USERPREF_MT_addons_dev_guides(bpy.types.Menu):
|
class USERPREF_MT_addons_dev_guides(bpy.types.Menu):
|
||||||
bl_label = "Addons develoment guides"
|
bl_label = "Addons develoment guides"
|
||||||
|
|
||||||
@@ -855,21 +856,21 @@ class USERPREF_PT_addons(bpy.types.Panel):
|
|||||||
|
|
||||||
modules = []
|
modules = []
|
||||||
loaded_modules = set()
|
loaded_modules = set()
|
||||||
|
|
||||||
# RELEASE SCRIPTS: official scripts distributed in Blender releases
|
# RELEASE SCRIPTS: official scripts distributed in Blender releases
|
||||||
paths = bpy.utils.script_paths("addons")
|
paths = bpy.utils.script_paths("addons")
|
||||||
|
|
||||||
# CONTRIB SCRIPTS: good for testing but not official scripts yet
|
# CONTRIB SCRIPTS: good for testing but not official scripts yet
|
||||||
# if folder addons_contrib/ exists, scripts in there will be loaded too
|
# if folder addons_contrib/ exists, scripts in there will be loaded too
|
||||||
paths += bpy.utils.script_paths("addons_contrib")
|
paths += bpy.utils.script_paths("addons_contrib")
|
||||||
|
|
||||||
# EXTERN SCRIPTS: external projects scripts
|
# EXTERN SCRIPTS: external projects scripts
|
||||||
# if folder addons_extern/ exists, scripts in there will be loaded too
|
# if folder addons_extern/ exists, scripts in there will be loaded too
|
||||||
paths += bpy.utils.script_paths("addons_extern")
|
paths += bpy.utils.script_paths("addons_extern")
|
||||||
|
|
||||||
if bpy.app.debug:
|
if bpy.app.debug:
|
||||||
t_main = time.time()
|
t_main = time.time()
|
||||||
|
|
||||||
# fake module importing
|
# fake module importing
|
||||||
def fake_module(mod_name, mod_path, speedy=True):
|
def fake_module(mod_name, mod_path, speedy=True):
|
||||||
if bpy.app.debug:
|
if bpy.app.debug:
|
||||||
@@ -960,11 +961,11 @@ class USERPREF_PT_addons(bpy.types.Panel):
|
|||||||
col = split.column()
|
col = split.column()
|
||||||
col.prop(context.window_manager, "addon_search", text="", icon='VIEWZOOM')
|
col.prop(context.window_manager, "addon_search", text="", icon='VIEWZOOM')
|
||||||
col.prop(context.window_manager, "addon_filter", text="Filter", expand=True)
|
col.prop(context.window_manager, "addon_filter", text="Filter", expand=True)
|
||||||
|
|
||||||
# menu to open webpages with addons development guides
|
# menu to open webpages with addons development guides
|
||||||
col.separator()
|
col.separator()
|
||||||
col.label(text = ' Online Documentation', icon = 'INFO')
|
col.label(text=" Online Documentation", icon='INFO')
|
||||||
col.menu('USERPREF_MT_addons_dev_guides', text='Addons Developer Guides')
|
col.menu("USERPREF_MT_addons_dev_guides", text="Addons Developer Guides")
|
||||||
|
|
||||||
col = split.column()
|
col = split.column()
|
||||||
|
|
||||||
|
|||||||
@@ -128,6 +128,7 @@ class USERPREF_MT_keyconfigs(bpy.types.Menu):
|
|||||||
bl_label = "KeyPresets"
|
bl_label = "KeyPresets"
|
||||||
preset_subdir = "keyconfig"
|
preset_subdir = "keyconfig"
|
||||||
preset_operator = "wm.keyconfig_activate"
|
preset_operator = "wm.keyconfig_activate"
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
props = self.layout.operator("wm.context_set_value", text="Blender (default)")
|
props = self.layout.operator("wm.context_set_value", text="Blender (default)")
|
||||||
props.data_path = "window_manager.keyconfigs.active"
|
props.data_path = "window_manager.keyconfigs.active"
|
||||||
@@ -379,7 +380,7 @@ class InputKeyMapPanel(bpy.types.Panel):
|
|||||||
subcol = subsplit.column()
|
subcol = subsplit.column()
|
||||||
|
|
||||||
row = subcol.row(align=True)
|
row = subcol.row(align=True)
|
||||||
|
|
||||||
#row.prop_search(wm.keyconfigs, "active", wm, "keyconfigs", text="Key Config:")
|
#row.prop_search(wm.keyconfigs, "active", wm, "keyconfigs", text="Key Config:")
|
||||||
text = bpy.path.display_name(context.window_manager.keyconfigs.active.name)
|
text = bpy.path.display_name(context.window_manager.keyconfigs.active.name)
|
||||||
if not text:
|
if not text:
|
||||||
@@ -387,7 +388,7 @@ class InputKeyMapPanel(bpy.types.Panel):
|
|||||||
row.menu("USERPREF_MT_keyconfigs", text=text)
|
row.menu("USERPREF_MT_keyconfigs", text=text)
|
||||||
row.operator("wm.keyconfig_preset_add", text="", icon="ZOOMIN")
|
row.operator("wm.keyconfig_preset_add", text="", icon="ZOOMIN")
|
||||||
row.operator("wm.keyconfig_preset_add", text="", icon="ZOOMOUT").remove_active = True
|
row.operator("wm.keyconfig_preset_add", text="", icon="ZOOMOUT").remove_active = True
|
||||||
|
|
||||||
# layout.context_pointer_set("keyconfig", wm.keyconfigs.active)
|
# layout.context_pointer_set("keyconfig", wm.keyconfigs.active)
|
||||||
# row.operator("wm.keyconfig_remove", text="", icon='X')
|
# row.operator("wm.keyconfig_remove", text="", icon='X')
|
||||||
|
|
||||||
@@ -605,7 +606,7 @@ class WM_OT_keyconfig_export(bpy.types.Operator):
|
|||||||
f.write("import bpy\n")
|
f.write("import bpy\n")
|
||||||
f.write("import os\n\n")
|
f.write("import os\n\n")
|
||||||
f.write("wm = bpy.context.window_manager\n")
|
f.write("wm = bpy.context.window_manager\n")
|
||||||
f.write("kc = wm.keyconfigs.new(os.path.splitext(os.path.basename(__file__))[0])\n\n") # keymap must be created by caller
|
f.write("kc = wm.keyconfigs.new(os.path.splitext(os.path.basename(__file__))[0])\n\n") # keymap must be created by caller
|
||||||
|
|
||||||
# Generate a list of keymaps to export:
|
# Generate a list of keymaps to export:
|
||||||
#
|
#
|
||||||
|
|||||||
@@ -647,6 +647,7 @@ class VIEW3D_MT_select_face(bpy.types.Menu): # XXX no matching enum
|
|||||||
|
|
||||||
# ********** Object menu **********
|
# ********** Object menu **********
|
||||||
|
|
||||||
|
|
||||||
class VIEW3D_MT_object(bpy.types.Menu):
|
class VIEW3D_MT_object(bpy.types.Menu):
|
||||||
bl_context = "objectmode"
|
bl_context = "objectmode"
|
||||||
bl_label = "Object"
|
bl_label = "Object"
|
||||||
@@ -1418,7 +1419,7 @@ class VIEW3D_MT_edit_mesh_extrude(bpy.types.Menu):
|
|||||||
if mesh.total_edge_sel and (select_mode[0] or select_mode[1]):
|
if mesh.total_edge_sel and (select_mode[0] or select_mode[1]):
|
||||||
menu += ["EDGE"]
|
menu += ["EDGE"]
|
||||||
if mesh.total_vert_sel and select_mode[0]:
|
if mesh.total_vert_sel and select_mode[0]:
|
||||||
menu += ["VERT"]
|
menu += ["VERT"]
|
||||||
|
|
||||||
# should never get here
|
# should never get here
|
||||||
return menu
|
return menu
|
||||||
|
|||||||
@@ -1077,9 +1077,9 @@ class VIEW3D_PT_tools_weightpaint(View3DPanel, bpy.types.Panel):
|
|||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
ob = context.active_object
|
ob = context.active_object
|
||||||
|
|
||||||
col = layout.column()
|
col = layout.column()
|
||||||
col.active = ob.vertex_groups.active != None
|
col.active = ob.vertex_groups.active != None
|
||||||
col.operator("object.vertex_group_normalize_all", text="Normalize All")
|
col.operator("object.vertex_group_normalize_all", text="Normalize All")
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ def file_list_py(path):
|
|||||||
|
|
||||||
def is_pep8(path):
|
def is_pep8(path):
|
||||||
print(path)
|
print(path)
|
||||||
f = open(path, 'r')
|
f = open(path, 'r', encoding="utf8")
|
||||||
for i in range(PEP8_SEEK_COMMENT):
|
for i in range(PEP8_SEEK_COMMENT):
|
||||||
line = f.readline()
|
line = f.readline()
|
||||||
if line.startswith("# <pep8"):
|
if line.startswith("# <pep8"):
|
||||||
|
|||||||
@@ -15,12 +15,12 @@
|
|||||||
<p class="title"><b>Blender 2.5 Beta</b></p>
|
<p class="title"><b>Blender 2.5 Beta</b></p>
|
||||||
<p><br></p>
|
<p><br></p>
|
||||||
<p class="header"><b>About</b></p>
|
<p class="header"><b>About</b></p>
|
||||||
<p class="body">Welcome to Blender, the free, open source 3D application for modeling, animation, rendering, compositing, video editing and game creation. Blender is available for Linux, Mac OS X, Windows, Solaris and Irix and has a large world-wide community.</p>
|
<p class="body">Welcome to Blender, the free, open source 3D application for modeling, animation, rendering, compositing, video editing and game creation. Blender is available for Linux, Mac OS X, Windows, Solaris, FreeBSD and Irix and has a large world-wide community.</p>
|
||||||
<p class="body">Blender can be used freely for any purpose, including commercial use and distribution. It's free and open-source software, released under the GNU GPL licence. The entire source code is available on our website.</p>
|
<p class="body">Blender can be used freely for any purpose, including commercial use and distribution. It's free and open-source software, released under the GNU GPL licence. The entire source code is available on our website.</p>
|
||||||
<p class="body">For more information, visit <a href="http://www.blender.org">blender.org</a>.</p>
|
<p class="body">For more information, visit <a href="http://www.blender.org">blender.org</a>.</p>
|
||||||
<p><br></p>
|
<p><br></p>
|
||||||
<p class="header"><b>2.5 Beta</b></p>
|
<p class="header"><b>2.5 Beta</b></p>
|
||||||
<p class="body">The Blender Foundation and online developer community is proud to present Blender 2.5 Beta. This release is the third official beta release of the Blender 2.5 series, and represents the culmination of many years of redesign and development work. <a href="http://www.blender.org/development/release-logs/blender-254-beta/">More information about this release</a>.</p>
|
<p class="body">The Blender Foundation and online developer community is proud to present Blender 2.5 Beta. This release is the fourth official beta release of the Blender 2.5 series, and represents the culmination of many years of redesign and development work. <a href="http://www.blender.org/development/release-logs/blender-254-beta/">More information about this release</a>.</p>
|
||||||
<p class="body">What to Expect:</p>
|
<p class="body">What to Expect:</p>
|
||||||
<p class="body"> • Big improvements - This is our most exciting version to date, already a significant improvement in many ways over 2.49</p>
|
<p class="body"> • Big improvements - This is our most exciting version to date, already a significant improvement in many ways over 2.49</p>
|
||||||
<p class="body"> • Missing/Incomplete Features - Although most of it is there, not all functionality from pre-2.5 versions has been restored yet. Some functionality may be re-implemented a different way.</p>
|
<p class="body"> • Missing/Incomplete Features - Although most of it is there, not all functionality from pre-2.5 versions has been restored yet. Some functionality may be re-implemented a different way.</p>
|
||||||
@@ -53,7 +53,7 @@ Install scripts by putting them in the .blender/scripts inside your home folder.
|
|||||||
<p class="header"><b>Links</b></p>
|
<p class="header"><b>Links</b></p>
|
||||||
<p class="body">Users:</p>
|
<p class="body">Users:</p>
|
||||||
<p class="body"> General information <a href="http://www.blender.org">www.blender.org</a> <br>
|
<p class="body"> General information <a href="http://www.blender.org">www.blender.org</a> <br>
|
||||||
Full release log <a href="http://www.blender.org/development/release-logs/blender-254-beta/">www.blender.org/development/release-logs/blender-254-beta/</a><br>
|
Full release log <a href="http://www.blender.org/development/release-logs/blender-256-beta/">www.blender.org/development/release-logs/blender-256-beta/</a><br>
|
||||||
Tutorials <a href="http://www.blender.org/education-help/">www.blender.org/education-help/</a> <br>
|
Tutorials <a href="http://www.blender.org/education-help/">www.blender.org/education-help/</a> <br>
|
||||||
Manual <a href="http://wiki.blender.org/index.php/Doc:Manual">wiki.blender.org/index.php/Doc:Manual</a><br>
|
Manual <a href="http://wiki.blender.org/index.php/Doc:Manual">wiki.blender.org/index.php/Doc:Manual</a><br>
|
||||||
User Forum <a href="http://www.blenderartists.org">www.blenderartists.org</a><br>
|
User Forum <a href="http://www.blenderartists.org">www.blenderartists.org</a><br>
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
!include "nsDialogs.nsh"
|
!include "nsDialogs.nsh"
|
||||||
!include "x64.nsh"
|
!include "x64.nsh"
|
||||||
|
|
||||||
|
RequestExecutionLevel user
|
||||||
|
|
||||||
SetCompressor /SOLID lzma
|
SetCompressor /SOLID lzma
|
||||||
|
|
||||||
@@ -69,7 +70,10 @@ UninstallIcon "[RELDIR]\00.installer.ico"
|
|||||||
|
|
||||||
Caption "Blender [VERSION] Installer"
|
Caption "Blender [VERSION] Installer"
|
||||||
OutFile "[DISTDIR]\..\blender-[VERSION]-windows[BITNESS].exe"
|
OutFile "[DISTDIR]\..\blender-[VERSION]-windows[BITNESS].exe"
|
||||||
InstallDir "$PROGRAMFILES[BITNESS]\Blender Foundation\Blender"
|
;InstallDir "$PROGRAMFILES[BITNESS]\Blender Foundation\Blender"
|
||||||
|
; Install to user profile dir. While it is non-standard, it allows
|
||||||
|
; users to install without having to have the installer run in elevated mode.
|
||||||
|
InstallDir "$PROFILE\Blender Foundation\Blender"
|
||||||
|
|
||||||
BrandingText "Blender Foundation | http://www.blender.org"
|
BrandingText "Blender Foundation | http://www.blender.org"
|
||||||
ComponentText "This will install Blender [VERSION] on your computer."
|
ComponentText "This will install Blender [VERSION] on your computer."
|
||||||
|
|||||||
@@ -117,6 +117,8 @@ void action_groups_remove_channel(struct bAction *act, struct FCurve *fcu);
|
|||||||
/* Find a group with the given name */
|
/* Find a group with the given name */
|
||||||
struct bActionGroup *action_groups_find_named(struct bAction *act, const char name[]);
|
struct bActionGroup *action_groups_find_named(struct bAction *act, const char name[]);
|
||||||
|
|
||||||
|
/* Clear all 'temp' flags on all groups */
|
||||||
|
void action_groups_clear_tempflags(struct bAction *act);
|
||||||
|
|
||||||
/* Pose API ----------------- */
|
/* Pose API ----------------- */
|
||||||
|
|
||||||
|
|||||||
@@ -100,6 +100,14 @@ void BKE_animdata_fix_paths_rename(struct ID *owner_id, struct AnimData *adt, co
|
|||||||
/* Fix all the paths for the entire database... */
|
/* Fix all the paths for the entire database... */
|
||||||
void BKE_all_animdata_fix_paths_rename(char *prefix, char *oldName, char *newName);
|
void BKE_all_animdata_fix_paths_rename(char *prefix, char *oldName, char *newName);
|
||||||
|
|
||||||
|
/* -------------------------------------- */
|
||||||
|
|
||||||
|
/* Move animation data from src to destination if it's paths are based on basepaths */
|
||||||
|
void BKE_animdata_separate_by_basepath(struct ID *srcID, struct ID *dstID, struct ListBase *basepaths);
|
||||||
|
|
||||||
|
/* Move F-Curves from src to destination if it's path is based on basepath */
|
||||||
|
void action_move_fcurves_by_basepath(struct bAction *srcAct, struct bAction *dstAct, const char basepath[]);
|
||||||
|
|
||||||
/* ************************************* */
|
/* ************************************* */
|
||||||
/* Batch AnimData API */
|
/* Batch AnimData API */
|
||||||
|
|
||||||
|
|||||||
@@ -44,8 +44,8 @@ struct ReportList;
|
|||||||
struct Scene;
|
struct Scene;
|
||||||
struct Main;
|
struct Main;
|
||||||
|
|
||||||
#define BLENDER_VERSION 255
|
#define BLENDER_VERSION 256
|
||||||
#define BLENDER_SUBVERSION 1
|
#define BLENDER_SUBVERSION 0
|
||||||
|
|
||||||
#define BLENDER_MINVERSION 250
|
#define BLENDER_MINVERSION 250
|
||||||
#define BLENDER_MINSUBVERSION 0
|
#define BLENDER_MINSUBVERSION 0
|
||||||
|
|||||||
@@ -144,6 +144,7 @@ void CustomData_em_copy_data(const struct CustomData *source,
|
|||||||
void CustomData_bmesh_copy_data(const struct CustomData *source,
|
void CustomData_bmesh_copy_data(const struct CustomData *source,
|
||||||
struct CustomData *dest,void *src_block,
|
struct CustomData *dest,void *src_block,
|
||||||
void **dest_block);
|
void **dest_block);
|
||||||
|
void CustomData_em_validate_data(struct CustomData *data, void *block, int sub_elements);
|
||||||
|
|
||||||
/* frees data in a CustomData object
|
/* frees data in a CustomData object
|
||||||
* return 1 on success, 0 on failure
|
* return 1 on success, 0 on failure
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ void defgroup_unique_name(struct bDeformGroup *dg, struct Object *ob);
|
|||||||
struct MDeformWeight *defvert_find_index(const struct MDeformVert *dv, const int defgroup);
|
struct MDeformWeight *defvert_find_index(const struct MDeformVert *dv, const int defgroup);
|
||||||
struct MDeformWeight *defvert_verify_index(struct MDeformVert *dv, const int defgroup);
|
struct MDeformWeight *defvert_verify_index(struct MDeformVert *dv, const int defgroup);
|
||||||
|
|
||||||
float defvert_find_weight(const struct MDeformVert *dvert, int group_num);
|
float defvert_find_weight(const struct MDeformVert *dvert, const int group_num);
|
||||||
float defvert_array_find_weight_safe(const struct MDeformVert *dvert, int index, int group_num);
|
float defvert_array_find_weight_safe(const struct MDeformVert *dvert, int index, int group_num);
|
||||||
|
|
||||||
void defvert_copy(struct MDeformVert *dvert_r, const struct MDeformVert *dvert);
|
void defvert_copy(struct MDeformVert *dvert_r, const struct MDeformVert *dvert);
|
||||||
|
|||||||
@@ -51,6 +51,7 @@ typedef struct Main {
|
|||||||
char name[240];
|
char name[240];
|
||||||
short versionfile, subversionfile;
|
short versionfile, subversionfile;
|
||||||
short minversionfile, minsubversionfile;
|
short minversionfile, minsubversionfile;
|
||||||
|
int revision; /* svn revision of binary that saved file */
|
||||||
|
|
||||||
struct Library *curlib;
|
struct Library *curlib;
|
||||||
ListBase scene;
|
ListBase scene;
|
||||||
|
|||||||
@@ -81,5 +81,13 @@ void multires_mdisp_smooth_bounds(struct MDisps *disps);
|
|||||||
/* update multires data after topology changing */
|
/* update multires data after topology changing */
|
||||||
void multires_topology_changed(struct Object *ob);
|
void multires_topology_changed(struct Object *ob);
|
||||||
|
|
||||||
|
/**** interpolation stuff ****/
|
||||||
|
void old_mdisps_bilinear(float out[3], float (*disps)[3], int st, float u, float v);
|
||||||
|
void mdisp_rot_crn_to_face(int S, int corners, int face_side, float x, float y, float *u, float *v);
|
||||||
|
int mdisp_rot_face_to_crn(int corners, int face_side, float u, float v, float *x, float *y);
|
||||||
|
void mdisp_apply_weight(int S, int corners, int x, int y, int face_side, float crn_weight[4][2], float *u_r, float *v_r);
|
||||||
|
void mdisp_flip_disp(int S, int corners, float axis_x[2], float axis_y[2], float disp[3]);
|
||||||
|
void mdisp_join_tris(struct MDisps *dst, struct MDisps *tri1, struct MDisps *tri2);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|||||||
@@ -63,6 +63,13 @@
|
|||||||
#define PTCACHE_TYPE_SMOKE_DOMAIN 3
|
#define PTCACHE_TYPE_SMOKE_DOMAIN 3
|
||||||
#define PTCACHE_TYPE_SMOKE_HIGHRES 4
|
#define PTCACHE_TYPE_SMOKE_HIGHRES 4
|
||||||
|
|
||||||
|
/* high bits reserved for flags that need to be stored in file */
|
||||||
|
#define PTCACHE_TYPEFLAG_COMPRESS (1<<16)
|
||||||
|
#define PTCACHE_TYPEFLAG_EXTRADATA (1<<17)
|
||||||
|
|
||||||
|
#define PTCACHE_TYPEFLAG_TYPEMASK 0x0000FFFF
|
||||||
|
#define PTCACHE_TYPEFLAG_FLAGMASK 0xFFFF0000
|
||||||
|
|
||||||
/* PTCache read return code */
|
/* PTCache read return code */
|
||||||
#define PTCACHE_READ_EXACT 1
|
#define PTCACHE_READ_EXACT 1
|
||||||
#define PTCACHE_READ_INTERPOLATED 2
|
#define PTCACHE_READ_INTERPOLATED 2
|
||||||
@@ -96,7 +103,7 @@ typedef struct PTCacheFile {
|
|||||||
FILE *fp;
|
FILE *fp;
|
||||||
|
|
||||||
int totpoint, type, frame, old_format;
|
int totpoint, type, frame, old_format;
|
||||||
unsigned int data_types;
|
unsigned int data_types, flag;
|
||||||
|
|
||||||
struct PTCacheData data;
|
struct PTCacheData data;
|
||||||
void *cur[BPHYS_TOT_DATA];
|
void *cur[BPHYS_TOT_DATA];
|
||||||
@@ -118,15 +125,23 @@ typedef struct PTCacheID {
|
|||||||
unsigned int data_types, info_types;
|
unsigned int data_types, info_types;
|
||||||
|
|
||||||
/* copies point data to cache data */
|
/* copies point data to cache data */
|
||||||
int (*write_elem)(int index, void *calldata, void **data, int cfra);
|
int (*write_point)(int index, void *calldata, void **data, int cfra);
|
||||||
|
/* copies cache cata to point data */
|
||||||
|
void (*read_point)(int index, void *calldata, void **data, float cfra, float *old_data);
|
||||||
|
/* interpolated between previously read point data and cache data */
|
||||||
|
void (*interpolate_point)(int index, void *calldata, void **data, float cfra, float cfra1, float cfra2, float *old_data);
|
||||||
|
|
||||||
/* copies point data to cache data */
|
/* copies point data to cache data */
|
||||||
int (*write_stream)(PTCacheFile *pf, void *calldata);
|
int (*write_stream)(PTCacheFile *pf, void *calldata);
|
||||||
/* copies cache cata to point data */
|
/* copies cache cata to point data */
|
||||||
void (*read_elem)(int index, void *calldata, void **data, float frs_sec, float cfra, float *old_data);
|
|
||||||
/* copies cache cata to point data */
|
|
||||||
void (*read_stream)(PTCacheFile *pf, void *calldata);
|
void (*read_stream)(PTCacheFile *pf, void *calldata);
|
||||||
/* interpolated between previously read point data and cache data */
|
|
||||||
void (*interpolate_elem)(int index, void *calldata, void **data, float frs_sec, float cfra, float cfra1, float cfra2, float *old_data);
|
/* copies custom extradata to cache data */
|
||||||
|
int (*write_extra_data)(void *calldata, struct PTCacheMem *pm, int cfra);
|
||||||
|
/* copies custom extradata to cache data */
|
||||||
|
int (*read_extra_data)(void *calldata, struct PTCacheMem *pm, float cfra);
|
||||||
|
/* copies custom extradata to cache data */
|
||||||
|
int (*interpolate_extra_data)(void *calldata, struct PTCacheMem *pm, float cfra, float cfra1, float cfra2);
|
||||||
|
|
||||||
/* total number of simulated points (the cfra parameter is just for using same function pointer with totwrite) */
|
/* total number of simulated points (the cfra parameter is just for using same function pointer with totwrite) */
|
||||||
int (*totpoint)(void *calldata, int cfra);
|
int (*totpoint)(void *calldata, int cfra);
|
||||||
@@ -267,7 +282,7 @@ void BKE_ptcache_data_get(void **data, int type, int index, void *to);
|
|||||||
void BKE_ptcache_data_set(void **data, int type, void *from);
|
void BKE_ptcache_data_set(void **data, int type, void *from);
|
||||||
|
|
||||||
/* Main cache reading call. */
|
/* Main cache reading call. */
|
||||||
int BKE_ptcache_read(PTCacheID *pid, float cfra, float frs_sec);
|
int BKE_ptcache_read(PTCacheID *pid, float cfra);
|
||||||
|
|
||||||
/* Main cache writing call. */
|
/* Main cache writing call. */
|
||||||
int BKE_ptcache_write(PTCacheID *pid, int cfra);
|
int BKE_ptcache_write(PTCacheID *pid, int cfra);
|
||||||
|
|||||||
@@ -96,7 +96,6 @@ void make_local_action(bAction *act)
|
|||||||
if (act->id.us==1) {
|
if (act->id.us==1) {
|
||||||
act->id.lib= 0;
|
act->id.lib= 0;
|
||||||
act->id.flag= LIB_LOCAL;
|
act->id.flag= LIB_LOCAL;
|
||||||
//make_local_action_channels(act);
|
|
||||||
new_id(0, (ID *)act, 0);
|
new_id(0, (ID *)act, 0);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -376,6 +375,20 @@ bActionGroup *action_groups_find_named (bAction *act, const char name[])
|
|||||||
return BLI_findstring(&act->groups, name, offsetof(bActionGroup, name));
|
return BLI_findstring(&act->groups, name, offsetof(bActionGroup, name));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Clear all 'temp' flags on all groups */
|
||||||
|
void action_groups_clear_tempflags (bAction *act)
|
||||||
|
{
|
||||||
|
bActionGroup *agrp;
|
||||||
|
|
||||||
|
/* sanity checks */
|
||||||
|
if (ELEM(NULL, act, act->groups.first))
|
||||||
|
return;
|
||||||
|
|
||||||
|
/* flag clearing loop */
|
||||||
|
for (agrp = act->groups.first; agrp; agrp = agrp->next)
|
||||||
|
agrp->flag &= ~AGRP_TEMP;
|
||||||
|
}
|
||||||
|
|
||||||
/* *************** Pose channels *************** */
|
/* *************** Pose channels *************** */
|
||||||
|
|
||||||
/* usually used within a loop, so we got a N^2 slowdown */
|
/* usually used within a loop, so we got a N^2 slowdown */
|
||||||
|
|||||||
@@ -277,6 +277,176 @@ void BKE_animdata_make_local(AnimData *adt)
|
|||||||
make_local_strips(&nlt->strips);
|
make_local_strips(&nlt->strips);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Sub-ID Regrouping ------------------------------------------- */
|
||||||
|
|
||||||
|
/* helper heuristic for determining if a path is compatible with the basepath
|
||||||
|
* < path: (str) full RNA-path from some data (usually an F-Curve) to compare
|
||||||
|
* < basepath: (str) shorter path fragment to look for
|
||||||
|
* > returns (bool) whether there is a match
|
||||||
|
*/
|
||||||
|
static short animpath_matches_basepath (const char path[], const char basepath[])
|
||||||
|
{
|
||||||
|
/* we need start of path to be basepath */
|
||||||
|
return (path && basepath) && (strstr(path, basepath) == path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Move F-Curves in src action to dst action, setting up all the necessary groups
|
||||||
|
* for this to happen, but only if the F-Curves being moved have the appropriate
|
||||||
|
* "base path".
|
||||||
|
* - This is used when data moves from one datablock to another, causing the
|
||||||
|
* F-Curves to need to be moved over too
|
||||||
|
*/
|
||||||
|
void action_move_fcurves_by_basepath (bAction *srcAct, bAction *dstAct, const char basepath[])
|
||||||
|
{
|
||||||
|
FCurve *fcu, *fcn=NULL;
|
||||||
|
|
||||||
|
/* sanity checks */
|
||||||
|
if ELEM3(NULL, srcAct, dstAct, basepath) {
|
||||||
|
if (G.f & G_DEBUG) {
|
||||||
|
printf("ERROR: action_partition_fcurves_by_basepath(%p, %p, %p) has insufficient info to work with\n",
|
||||||
|
srcAct, dstAct, basepath);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* clear 'temp' flags on all groups in src, as we'll be needing them later
|
||||||
|
* to identify groups that we've managed to empty out here
|
||||||
|
*/
|
||||||
|
action_groups_clear_tempflags(srcAct);
|
||||||
|
|
||||||
|
/* iterate over all src F-Curves, moving over the ones that need to be moved */
|
||||||
|
for (fcu = srcAct->curves.first; fcu; fcu = fcn) {
|
||||||
|
/* store next pointer in case we move stuff */
|
||||||
|
fcn = fcu->next;
|
||||||
|
|
||||||
|
/* should F-Curve be moved over?
|
||||||
|
* - we only need the start of the path to match basepath
|
||||||
|
*/
|
||||||
|
if (animpath_matches_basepath(fcu->rna_path, basepath)) {
|
||||||
|
bActionGroup *agrp = NULL;
|
||||||
|
|
||||||
|
/* if grouped... */
|
||||||
|
if (fcu->grp) {
|
||||||
|
/* make sure there will be a matching group on the other side for the migrants */
|
||||||
|
agrp = action_groups_find_named(dstAct, fcu->grp->name);
|
||||||
|
|
||||||
|
if (agrp == NULL) {
|
||||||
|
/* add a new one with a similar name (usually will be the same though) */
|
||||||
|
agrp = action_groups_add_new(dstAct, fcu->grp->name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* old groups should be tagged with 'temp' flags so they can be removed later
|
||||||
|
* if we remove everything from them
|
||||||
|
*/
|
||||||
|
fcu->grp->flag |= AGRP_TEMP;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* perform the migration now */
|
||||||
|
action_groups_remove_channel(srcAct, fcu);
|
||||||
|
|
||||||
|
if (agrp)
|
||||||
|
action_groups_add_channel(dstAct, agrp, fcu);
|
||||||
|
else
|
||||||
|
BLI_addtail(&dstAct->curves, fcu);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* cleanup groups (if present) */
|
||||||
|
if (srcAct->groups.first) {
|
||||||
|
bActionGroup *agrp, *grp=NULL;
|
||||||
|
|
||||||
|
for (agrp = srcAct->groups.first; agrp; agrp = grp) {
|
||||||
|
grp = agrp->next;
|
||||||
|
|
||||||
|
/* only tagged groups need to be considered - clearing these tags or removing them */
|
||||||
|
if (agrp->flag & AGRP_TEMP) {
|
||||||
|
/* if group is empty and tagged, then we can remove as this operation
|
||||||
|
* moved out all the channels that were formerly here
|
||||||
|
*/
|
||||||
|
if (agrp->channels.first == NULL)
|
||||||
|
BLI_freelinkN(&srcAct->groups, agrp);
|
||||||
|
else
|
||||||
|
agrp->flag &= ~AGRP_TEMP;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Transfer the animation data from srcID to dstID where the srcID
|
||||||
|
* animation data is based off "basepath", creating new AnimData and
|
||||||
|
* associated data as necessary
|
||||||
|
*/
|
||||||
|
void BKE_animdata_separate_by_basepath (ID *srcID, ID *dstID, ListBase *basepaths)
|
||||||
|
{
|
||||||
|
AnimData *srcAdt=NULL, *dstAdt=NULL;
|
||||||
|
LinkData *ld;
|
||||||
|
|
||||||
|
/* sanity checks */
|
||||||
|
if ELEM(NULL, srcID, dstID) {
|
||||||
|
if (G.f & G_DEBUG)
|
||||||
|
printf("ERROR: no source or destination ID to separate AnimData with\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* get animdata from src, and create for destination (if needed) */
|
||||||
|
srcAdt = BKE_animdata_from_id(srcID);
|
||||||
|
dstAdt = BKE_id_add_animdata(dstID);
|
||||||
|
|
||||||
|
if ELEM(NULL, srcAdt, dstAdt) {
|
||||||
|
if (G.f & G_DEBUG)
|
||||||
|
printf("ERROR: no AnimData for this pair of ID's\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* active action */
|
||||||
|
if (srcAdt->action) {
|
||||||
|
/* set up an action if necessary, and name it in a similar way so that it can be easily found again */
|
||||||
|
if (dstAdt->action == NULL) {
|
||||||
|
dstAdt->action = add_empty_action(srcAdt->action->id.name+2);
|
||||||
|
}
|
||||||
|
else if (dstAdt->action == srcAdt->action) {
|
||||||
|
printf("Argh! Source and Destination share animation! ('%s' and '%s' both use '%s') Making new empty action\n",
|
||||||
|
srcID->name, dstID->name, srcAdt->action->id.name);
|
||||||
|
|
||||||
|
// TODO: review this...
|
||||||
|
id_us_min(&dstAdt->action->id);
|
||||||
|
dstAdt->action = add_empty_action(dstAdt->action->id.name+2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* loop over base paths, trying to fix for each one... */
|
||||||
|
for (ld = basepaths->first; ld; ld = ld->next) {
|
||||||
|
const char *basepath = (const char *)ld->data;
|
||||||
|
action_move_fcurves_by_basepath(srcAdt->action, dstAdt->action, basepath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* drivers */
|
||||||
|
if (srcAdt->drivers.first) {
|
||||||
|
FCurve *fcu, *fcn=NULL;
|
||||||
|
|
||||||
|
/* check each driver against all the base paths to see if any should go */
|
||||||
|
for (fcu = srcAdt->drivers.first; fcu; fcu = fcn) {
|
||||||
|
fcn = fcu->next;
|
||||||
|
|
||||||
|
/* try each basepath in turn, but stop on the first one which works */
|
||||||
|
for (ld = basepaths->first; ld; ld = ld->next) {
|
||||||
|
const char *basepath = (const char *)ld->data;
|
||||||
|
|
||||||
|
if (animpath_matches_basepath(fcu->rna_path, basepath)) {
|
||||||
|
/* just need to change lists */
|
||||||
|
BLI_remlink(&srcAdt->drivers, fcu);
|
||||||
|
BLI_addtail(&dstAdt->drivers, fcu);
|
||||||
|
|
||||||
|
// TODO: add depsgraph flushing calls?
|
||||||
|
|
||||||
|
/* can stop now, as moved already */
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Path Validation -------------------------------------------- */
|
/* Path Validation -------------------------------------------- */
|
||||||
|
|
||||||
/* Check if a given RNA Path is valid, by tracing it from the given ID, and seeing if we can resolve it */
|
/* Check if a given RNA Path is valid, by tracing it from the given ID, and seeing if we can resolve it */
|
||||||
@@ -466,46 +636,77 @@ void BKE_animdata_fix_paths_rename (ID *owner_id, AnimData *adt, const char *pre
|
|||||||
/* Whole Database Ops -------------------------------------------- */
|
/* Whole Database Ops -------------------------------------------- */
|
||||||
|
|
||||||
/* apply the given callback function on all data in main database */
|
/* apply the given callback function on all data in main database */
|
||||||
void BKE_animdata_main_cb (Main *main, ID_AnimData_Edit_Callback func, void *user_data)
|
void BKE_animdata_main_cb (Main *mainptr, ID_AnimData_Edit_Callback func, void *user_data)
|
||||||
{
|
{
|
||||||
ID *id;
|
ID *id;
|
||||||
|
|
||||||
|
/* standard data version */
|
||||||
#define ANIMDATA_IDS_CB(first) \
|
#define ANIMDATA_IDS_CB(first) \
|
||||||
for (id= first; id; id= id->next) { \
|
for (id= first; id; id= id->next) { \
|
||||||
AnimData *adt= BKE_animdata_from_id(id); \
|
AnimData *adt= BKE_animdata_from_id(id); \
|
||||||
if (adt) func(id, adt, user_data); \
|
if (adt) func(id, adt, user_data); \
|
||||||
}
|
}
|
||||||
|
|
||||||
ANIMDATA_IDS_CB(main->nodetree.first); /* nodes */
|
/* "embedded" nodetree cases (i.e. scene/material/texture->nodetree) */
|
||||||
ANIMDATA_IDS_CB(main->tex.first); /* textures */
|
#define ANIMDATA_NODETREE_IDS_CB(first, NtId_Type) \
|
||||||
ANIMDATA_IDS_CB(main->lamp.first); /* lamps */
|
for (id= first; id; id= id->next) { \
|
||||||
ANIMDATA_IDS_CB(main->mat.first); /* materials */
|
AnimData *adt= BKE_animdata_from_id(id); \
|
||||||
ANIMDATA_IDS_CB(main->camera.first); /* cameras */
|
NtId_Type *ntp= (NtId_Type *)id; \
|
||||||
ANIMDATA_IDS_CB(main->key.first); /* shapekeys */
|
if (ntp->nodetree) { \
|
||||||
ANIMDATA_IDS_CB(main->mball.first); /* metaballs */
|
AnimData *adt2= BKE_animdata_from_id((ID *)ntp); \
|
||||||
ANIMDATA_IDS_CB(main->curve.first); /* curves */
|
if (adt2) func(id, adt2, user_data); \
|
||||||
ANIMDATA_IDS_CB(main->armature.first); /* armatures */
|
} \
|
||||||
ANIMDATA_IDS_CB(main->mesh.first); /* meshes */
|
if (adt) func(id, adt, user_data); \
|
||||||
ANIMDATA_IDS_CB(main->particle.first); /* particles */
|
}
|
||||||
ANIMDATA_IDS_CB(main->object.first); /* objects */
|
|
||||||
ANIMDATA_IDS_CB(main->world.first); /* worlds */
|
/* nodes */
|
||||||
ANIMDATA_IDS_CB(main->linestyle.first); /* linestyles */
|
ANIMDATA_IDS_CB(mainptr->nodetree.first);
|
||||||
|
|
||||||
/* scenes */
|
/* textures */
|
||||||
for (id= main->scene.first; id; id= id->next) {
|
ANIMDATA_NODETREE_IDS_CB(mainptr->tex.first, Tex);
|
||||||
AnimData *adt= BKE_animdata_from_id(id);
|
|
||||||
Scene *scene= (Scene *)id;
|
/* lamps */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->lamp.first);
|
||||||
/* do compositing nodes first (since these aren't included in main tree) */
|
|
||||||
if (scene->nodetree) {
|
/* materials */
|
||||||
AnimData *adt2= BKE_animdata_from_id((ID *)scene->nodetree);
|
ANIMDATA_NODETREE_IDS_CB(mainptr->mat.first, Material);
|
||||||
if (adt2) func(id, adt2, user_data);
|
|
||||||
}
|
/* cameras */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->camera.first);
|
||||||
/* now fix scene animation data as per normal */
|
|
||||||
if (adt) func((ID *)id, adt, user_data);
|
/* shapekeys */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->key.first);
|
||||||
|
|
||||||
|
/* metaballs */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->mball.first);
|
||||||
|
|
||||||
|
/* curves */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->curve.first);
|
||||||
|
|
||||||
|
/* armatures */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->armature.first);
|
||||||
|
|
||||||
|
/* lattices */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->latt.first);
|
||||||
|
|
||||||
|
/* meshes */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->mesh.first);
|
||||||
|
|
||||||
|
/* particles */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->particle.first);
|
||||||
|
|
||||||
|
/* objects */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->object.first);
|
||||||
|
|
||||||
|
/* worlds */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->world.first);
|
||||||
|
|
||||||
|
/* scenes */
|
||||||
|
ANIMDATA_NODETREE_IDS_CB(mainptr->scene.first, Scene);
|
||||||
|
|
||||||
|
/* line styles */
|
||||||
|
ANIMDATA_IDS_CB(mainptr->linestyle.first);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/* Fix all RNA-Paths throughout the database (directly access the Global.main version)
|
/* Fix all RNA-Paths throughout the database (directly access the Global.main version)
|
||||||
* NOTE: it is assumed that the structure we're replacing is <prefix><["><name><"]>
|
* NOTE: it is assumed that the structure we're replacing is <prefix><["><name><"]>
|
||||||
@@ -527,17 +728,29 @@ void BKE_all_animdata_fix_paths_rename (char *prefix, char *oldName, char *newNa
|
|||||||
BKE_animdata_fix_paths_rename(id, adt, prefix, oldName, newName, 0, 0, 1);\
|
BKE_animdata_fix_paths_rename(id, adt, prefix, oldName, newName, 0, 0, 1);\
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* another version of this macro for nodetrees */
|
||||||
|
#define RENAMEFIX_ANIM_NODETREE_IDS(first, NtId_Type) \
|
||||||
|
for (id= first; id; id= id->next) { \
|
||||||
|
AnimData *adt= BKE_animdata_from_id(id); \
|
||||||
|
NtId_Type *ntp= (NtId_Type *)id; \
|
||||||
|
if (ntp->nodetree) { \
|
||||||
|
AnimData *adt2= BKE_animdata_from_id((ID *)ntp); \
|
||||||
|
BKE_animdata_fix_paths_rename((ID *)ntp, adt2, prefix, oldName, newName, 0, 0, 1);\
|
||||||
|
} \
|
||||||
|
BKE_animdata_fix_paths_rename(id, adt, prefix, oldName, newName, 0, 0, 1);\
|
||||||
|
}
|
||||||
|
|
||||||
/* nodes */
|
/* nodes */
|
||||||
RENAMEFIX_ANIM_IDS(mainptr->nodetree.first);
|
RENAMEFIX_ANIM_IDS(mainptr->nodetree.first);
|
||||||
|
|
||||||
/* textures */
|
/* textures */
|
||||||
RENAMEFIX_ANIM_IDS(mainptr->tex.first);
|
RENAMEFIX_ANIM_NODETREE_IDS(mainptr->tex.first, Tex);
|
||||||
|
|
||||||
/* lamps */
|
/* lamps */
|
||||||
RENAMEFIX_ANIM_IDS(mainptr->lamp.first);
|
RENAMEFIX_ANIM_IDS(mainptr->lamp.first);
|
||||||
|
|
||||||
/* materials */
|
/* materials */
|
||||||
RENAMEFIX_ANIM_IDS(mainptr->mat.first);
|
RENAMEFIX_ANIM_NODETREE_IDS(mainptr->mat.first, Material);
|
||||||
|
|
||||||
/* cameras */
|
/* cameras */
|
||||||
RENAMEFIX_ANIM_IDS(mainptr->camera.first);
|
RENAMEFIX_ANIM_IDS(mainptr->camera.first);
|
||||||
@@ -554,8 +767,11 @@ void BKE_all_animdata_fix_paths_rename (char *prefix, char *oldName, char *newNa
|
|||||||
/* armatures */
|
/* armatures */
|
||||||
RENAMEFIX_ANIM_IDS(mainptr->armature.first);
|
RENAMEFIX_ANIM_IDS(mainptr->armature.first);
|
||||||
|
|
||||||
|
/* lattices */
|
||||||
|
RENAMEFIX_ANIM_IDS(mainptr->latt.first);
|
||||||
|
|
||||||
/* meshes */
|
/* meshes */
|
||||||
// TODO...
|
RENAMEFIX_ANIM_IDS(mainptr->mesh.first);
|
||||||
|
|
||||||
/* particles */
|
/* particles */
|
||||||
RENAMEFIX_ANIM_IDS(mainptr->particle.first);
|
RENAMEFIX_ANIM_IDS(mainptr->particle.first);
|
||||||
@@ -570,21 +786,9 @@ void BKE_all_animdata_fix_paths_rename (char *prefix, char *oldName, char *newNa
|
|||||||
RENAMEFIX_ANIM_IDS(mainptr->linestyle.first);
|
RENAMEFIX_ANIM_IDS(mainptr->linestyle.first);
|
||||||
|
|
||||||
/* scenes */
|
/* scenes */
|
||||||
for (id= mainptr->scene.first; id; id= id->next) {
|
RENAMEFIX_ANIM_NODETREE_IDS(mainptr->scene.first, Scene);
|
||||||
AnimData *adt= BKE_animdata_from_id(id);
|
|
||||||
Scene *scene= (Scene *)id;
|
|
||||||
|
|
||||||
/* do compositing nodes first (since these aren't included in main tree) */
|
|
||||||
if (scene->nodetree) {
|
|
||||||
AnimData *adt2= BKE_animdata_from_id((ID *)scene->nodetree);
|
|
||||||
BKE_animdata_fix_paths_rename((ID *)scene->nodetree, adt2, prefix, oldName, newName, 0, 0, 1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* now fix scene animation data as per normal */
|
|
||||||
BKE_animdata_fix_paths_rename((ID *)id, adt, prefix, oldName, newName, 0, 0, 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* *********************************** */
|
/* *********************************** */
|
||||||
/* KeyingSet API */
|
/* KeyingSet API */
|
||||||
|
|
||||||
@@ -689,7 +893,7 @@ KS_Path *BKE_keyingset_add_path (KeyingSet *ks, ID *id, const char group_name[],
|
|||||||
/* just store absolute info */
|
/* just store absolute info */
|
||||||
ksp->id= id;
|
ksp->id= id;
|
||||||
if (group_name)
|
if (group_name)
|
||||||
BLI_snprintf(ksp->group, 64, group_name);
|
BLI_strncpy(ksp->group, group_name, sizeof(ksp->group));
|
||||||
else
|
else
|
||||||
ksp->group[0]= '\0';
|
ksp->group[0]= '\0';
|
||||||
|
|
||||||
@@ -1906,7 +2110,6 @@ void BKE_animsys_evaluate_all_animation (Main *main, float ctime)
|
|||||||
EVAL_ANIM_IDS(main->camera.first, ADT_RECALC_ANIM);
|
EVAL_ANIM_IDS(main->camera.first, ADT_RECALC_ANIM);
|
||||||
|
|
||||||
/* shapekeys */
|
/* shapekeys */
|
||||||
// TODO: we probably need the same hack as for curves (ctime-hack)
|
|
||||||
EVAL_ANIM_IDS(main->key.first, ADT_RECALC_ANIM);
|
EVAL_ANIM_IDS(main->key.first, ADT_RECALC_ANIM);
|
||||||
|
|
||||||
/* metaballs */
|
/* metaballs */
|
||||||
|
|||||||
@@ -936,19 +936,15 @@ void armature_deform_verts(Object *armOb, Object *target, DerivedMesh *dm,
|
|||||||
dvert = NULL;
|
dvert = NULL;
|
||||||
|
|
||||||
if(armature_def_nr >= 0 && dvert) {
|
if(armature_def_nr >= 0 && dvert) {
|
||||||
armature_weight = 0.0f; /* a def group was given, so default to 0 */
|
armature_weight= defvert_find_weight(dvert, armature_def_nr);
|
||||||
for(j = 0; j < dvert->totweight; j++) {
|
|
||||||
if(dvert->dw[j].def_nr == armature_def_nr) {
|
if(invert_vgroup) {
|
||||||
armature_weight = dvert->dw[j].weight;
|
armature_weight= 1.0f-armature_weight;
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* hackish: the blending factor can be used for blending with prevCos too */
|
/* hackish: the blending factor can be used for blending with prevCos too */
|
||||||
if(prevCos) {
|
if(prevCos) {
|
||||||
if(invert_vgroup)
|
prevco_weight= armature_weight;
|
||||||
prevco_weight= 1.0f-armature_weight;
|
|
||||||
else
|
|
||||||
prevco_weight= armature_weight;
|
|
||||||
armature_weight= 1.0f;
|
armature_weight= 1.0f;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -504,7 +504,7 @@ DerivedMesh *clothModifier_do(ClothModifierData *clmd, Scene *scene, Object *ob,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* try to read from cache */
|
/* try to read from cache */
|
||||||
cache_result = BKE_ptcache_read(&pid, (float)framenr+scene->r.subframe, scene->r.frs_sec);
|
cache_result = BKE_ptcache_read(&pid, (float)framenr+scene->r.subframe);
|
||||||
|
|
||||||
if(cache_result == PTCACHE_READ_EXACT || cache_result == PTCACHE_READ_INTERPOLATED) {
|
if(cache_result == PTCACHE_READ_EXACT || cache_result == PTCACHE_READ_INTERPOLATED) {
|
||||||
implicit_set_positions(clmd);
|
implicit_set_positions(clmd);
|
||||||
|
|||||||
@@ -104,6 +104,11 @@ typedef struct LayerTypeInfo {
|
|||||||
|
|
||||||
/* a function to determine file size */
|
/* a function to determine file size */
|
||||||
size_t (*filesize)(CDataFile *cdf, void *data, int count);
|
size_t (*filesize)(CDataFile *cdf, void *data, int count);
|
||||||
|
|
||||||
|
/* a function to validate layer contents depending on
|
||||||
|
* sub-elements count
|
||||||
|
*/
|
||||||
|
void (*validate)(void *source, int sub_elements);
|
||||||
} LayerTypeInfo;
|
} LayerTypeInfo;
|
||||||
|
|
||||||
static void layerCopy_mdeformvert(const void *source, void *dest,
|
static void layerCopy_mdeformvert(const void *source, void *dest,
|
||||||
@@ -396,52 +401,6 @@ static void layerDefault_origspace_face(void *data, int count)
|
|||||||
osf[i] = default_osf;
|
osf[i] = default_osf;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Adapted from sculptmode.c */
|
|
||||||
static void mdisps_bilinear(float out[3], float (*disps)[3], int st, float u, float v)
|
|
||||||
{
|
|
||||||
int x, y, x2, y2;
|
|
||||||
const int st_max = st - 1;
|
|
||||||
float urat, vrat, uopp;
|
|
||||||
float d[4][3], d2[2][3];
|
|
||||||
|
|
||||||
if(u < 0)
|
|
||||||
u = 0;
|
|
||||||
else if(u >= st)
|
|
||||||
u = st_max;
|
|
||||||
if(v < 0)
|
|
||||||
v = 0;
|
|
||||||
else if(v >= st)
|
|
||||||
v = st_max;
|
|
||||||
|
|
||||||
x = floor(u);
|
|
||||||
y = floor(v);
|
|
||||||
x2 = x + 1;
|
|
||||||
y2 = y + 1;
|
|
||||||
|
|
||||||
if(x2 >= st) x2 = st_max;
|
|
||||||
if(y2 >= st) y2 = st_max;
|
|
||||||
|
|
||||||
urat = u - x;
|
|
||||||
vrat = v - y;
|
|
||||||
uopp = 1 - urat;
|
|
||||||
|
|
||||||
copy_v3_v3(d[0], disps[y * st + x]);
|
|
||||||
copy_v3_v3(d[1], disps[y * st + x2]);
|
|
||||||
copy_v3_v3(d[2], disps[y2 * st + x]);
|
|
||||||
copy_v3_v3(d[3], disps[y2 * st + x2]);
|
|
||||||
mul_v3_fl(d[0], uopp);
|
|
||||||
mul_v3_fl(d[1], urat);
|
|
||||||
mul_v3_fl(d[2], uopp);
|
|
||||||
mul_v3_fl(d[3], urat);
|
|
||||||
|
|
||||||
add_v3_v3v3(d2[0], d[0], d[1]);
|
|
||||||
add_v3_v3v3(d2[1], d[2], d[3]);
|
|
||||||
mul_v3_fl(d2[0], 1 - vrat);
|
|
||||||
mul_v3_fl(d2[1], vrat);
|
|
||||||
|
|
||||||
add_v3_v3v3(out, d2[0], d2[1]);
|
|
||||||
}
|
|
||||||
|
|
||||||
static void layerSwap_mdisps(void *data, const int *ci)
|
static void layerSwap_mdisps(void *data, const int *ci)
|
||||||
{
|
{
|
||||||
MDisps *s = data;
|
MDisps *s = data;
|
||||||
@@ -473,211 +432,6 @@ static void layerSwap_mdisps(void *data, const int *ci)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void mdisp_get_crn_rect(int face_side, float crn[3][4][2])
|
|
||||||
{
|
|
||||||
float offset = face_side*0.5f - 0.5f;
|
|
||||||
float mid[2];
|
|
||||||
|
|
||||||
mid[0] = offset * 4 / 3;
|
|
||||||
mid[1] = offset * 2 / 3;
|
|
||||||
|
|
||||||
crn[0][0][0] = mid[0]; crn[0][0][1] = mid[1];
|
|
||||||
crn[0][1][0] = offset; crn[0][1][1] = 0;
|
|
||||||
crn[0][2][0] = 0; crn[0][2][1] = 0;
|
|
||||||
crn[0][3][0] = offset; crn[0][3][1] = offset;
|
|
||||||
|
|
||||||
crn[1][0][0] = mid[0]; crn[1][0][1] = mid[1];
|
|
||||||
crn[1][1][0] = offset * 2; crn[1][1][1] = offset;
|
|
||||||
crn[1][2][0] = offset * 2; crn[1][2][1] = 0;
|
|
||||||
crn[1][3][0] = offset; crn[1][3][1] = 0;
|
|
||||||
|
|
||||||
crn[2][0][0] = mid[0]; crn[2][0][1] = mid[1];
|
|
||||||
crn[2][1][0] = offset; crn[2][1][1] = offset;
|
|
||||||
crn[2][2][0] = offset * 2; crn[2][2][1] = offset * 2;
|
|
||||||
crn[2][3][0] = offset * 2; crn[2][3][1] = offset;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void mdisp_rot_crn_to_face(int S, int corners, int face_side, float x, float y, float *u, float *v)
|
|
||||||
{
|
|
||||||
float offset = face_side*0.5f - 0.5f;
|
|
||||||
|
|
||||||
if(corners == 4) {
|
|
||||||
if(S == 1) { *u= offset + x; *v = offset - y; }
|
|
||||||
if(S == 2) { *u= offset + y; *v = offset + x; }
|
|
||||||
if(S == 3) { *u= offset - x; *v = offset + y; }
|
|
||||||
if(S == 0) { *u= offset - y; *v = offset - x; }
|
|
||||||
} else {
|
|
||||||
float crn[3][4][2], vec[4][2];
|
|
||||||
float p[2];
|
|
||||||
|
|
||||||
mdisp_get_crn_rect(face_side, crn);
|
|
||||||
|
|
||||||
interp_v2_v2v2(vec[0], crn[S][0], crn[S][1], x / offset);
|
|
||||||
interp_v2_v2v2(vec[1], crn[S][3], crn[S][2], x / offset);
|
|
||||||
interp_v2_v2v2(vec[2], crn[S][0], crn[S][3], y / offset);
|
|
||||||
interp_v2_v2v2(vec[3], crn[S][1], crn[S][2], y / offset);
|
|
||||||
|
|
||||||
isect_seg_seg_v2_point(vec[0], vec[1], vec[2], vec[3], p);
|
|
||||||
|
|
||||||
(*u) = p[0];
|
|
||||||
(*v) = p[1];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static int mdisp_pt_in_crn(float p[2], float crn[4][2])
|
|
||||||
{
|
|
||||||
float v[2][2];
|
|
||||||
float a[2][2];
|
|
||||||
|
|
||||||
sub_v2_v2v2(v[0], crn[1], crn[0]);
|
|
||||||
sub_v2_v2v2(v[1], crn[3], crn[0]);
|
|
||||||
|
|
||||||
sub_v2_v2v2(a[0], p, crn[0]);
|
|
||||||
sub_v2_v2v2(a[1], crn[2], crn[0]);
|
|
||||||
|
|
||||||
if(cross_v2v2(a[0], v[0]) * cross_v2v2(a[1], v[0]) < 0)
|
|
||||||
return 0;
|
|
||||||
|
|
||||||
if(cross_v2v2(a[0], v[1]) * cross_v2v2(a[1], v[1]) < 0)
|
|
||||||
return 0;
|
|
||||||
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void face_to_crn_interp(float u, float v, float v1[2], float v2[2], float v3[2], float v4[2], float *x)
|
|
||||||
{
|
|
||||||
float a = (v4[1]-v3[1])*v2[0]+(-v4[1]+v3[1])*v1[0]+(-v2[1]+v1[1])*v4[0]+(v2[1]-v1[1])*v3[0];
|
|
||||||
float b = (v3[1]-v)*v2[0]+(v4[1]-2*v3[1]+v)*v1[0]+(-v4[1]+v3[1]+v2[1]-v1[1])*u+(v4[0]-v3[0])*v-v1[1]*v4[0]+(-v2[1]+2*v1[1])*v3[0];
|
|
||||||
float c = (v3[1]-v)*v1[0]+(-v3[1]+v1[1])*u+v3[0]*v-v1[1]*v3[0];
|
|
||||||
float d = b * b - 4 * a * c;
|
|
||||||
float x1, x2;
|
|
||||||
|
|
||||||
if(a == 0) {
|
|
||||||
*x = -c / b;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
x1 = (-b - sqrtf(d)) / (2 * a);
|
|
||||||
x2 = (-b + sqrtf(d)) / (2 * a);
|
|
||||||
|
|
||||||
*x = maxf(x1, x2);
|
|
||||||
}
|
|
||||||
|
|
||||||
static int mdisp_rot_face_to_crn(int corners, int face_side, float u, float v, float *x, float *y)
|
|
||||||
{
|
|
||||||
float offset = face_side*0.5f - 0.5f;
|
|
||||||
int S;
|
|
||||||
|
|
||||||
if (corners == 4) {
|
|
||||||
if(u <= offset && v <= offset) S = 0;
|
|
||||||
else if(u > offset && v <= offset) S = 1;
|
|
||||||
else if(u > offset && v > offset) S = 2;
|
|
||||||
else if(u <= offset && v >= offset) S = 3;
|
|
||||||
|
|
||||||
if(S == 0) {
|
|
||||||
*y = offset - u;
|
|
||||||
*x = offset - v;
|
|
||||||
} else if(S == 1) {
|
|
||||||
*x = u - offset;
|
|
||||||
*y = offset - v;
|
|
||||||
} else if(S == 2) {
|
|
||||||
*y = u - offset;
|
|
||||||
*x = v - offset;
|
|
||||||
} else if(S == 3) {
|
|
||||||
*x= offset - u;
|
|
||||||
*y = v - offset;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
float crn[3][4][2];
|
|
||||||
float p[2] = {u, v};
|
|
||||||
|
|
||||||
mdisp_get_crn_rect(face_side, crn);
|
|
||||||
|
|
||||||
for (S = 0; S < 3; ++S) {
|
|
||||||
if (mdisp_pt_in_crn(p, crn[S]))
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
face_to_crn_interp(u, v, crn[S][0], crn[S][1], crn[S][3], crn[S][2], &p[0]);
|
|
||||||
face_to_crn_interp(u, v, crn[S][0], crn[S][3], crn[S][1], crn[S][2], &p[1]);
|
|
||||||
|
|
||||||
*x = p[0] * offset;
|
|
||||||
*y = p[1] * offset;
|
|
||||||
}
|
|
||||||
|
|
||||||
return S;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void mdisp_apply_weight(int S, int corners, int x, int y, int face_side,
|
|
||||||
float crn_weight[4][2], float *u_r, float *v_r)
|
|
||||||
{
|
|
||||||
float u, v, xl, yl;
|
|
||||||
float mid1[2], mid2[2], mid3[2];
|
|
||||||
|
|
||||||
mdisp_rot_crn_to_face(S, corners, face_side, x, y, &u, &v);
|
|
||||||
|
|
||||||
if(corners == 4) {
|
|
||||||
xl = u / (face_side - 1);
|
|
||||||
yl = v / (face_side - 1);
|
|
||||||
|
|
||||||
mid1[0] = crn_weight[0][0] * (1 - xl) + crn_weight[1][0] * xl;
|
|
||||||
mid1[1] = crn_weight[0][1] * (1 - xl) + crn_weight[1][1] * xl;
|
|
||||||
mid2[0] = crn_weight[3][0] * (1 - xl) + crn_weight[2][0] * xl;
|
|
||||||
mid2[1] = crn_weight[3][1] * (1 - xl) + crn_weight[2][1] * xl;
|
|
||||||
mid3[0] = mid1[0] * (1 - yl) + mid2[0] * yl;
|
|
||||||
mid3[1] = mid1[1] * (1 - yl) + mid2[1] * yl;
|
|
||||||
} else {
|
|
||||||
yl = v / (face_side - 1);
|
|
||||||
|
|
||||||
if(v == face_side - 1) xl = 1;
|
|
||||||
else xl = 1 - (face_side - 1 - u) / (face_side - 1 - v);
|
|
||||||
|
|
||||||
mid1[0] = crn_weight[0][0] * (1 - xl) + crn_weight[1][0] * xl;
|
|
||||||
mid1[1] = crn_weight[0][1] * (1 - xl) + crn_weight[1][1] * xl;
|
|
||||||
mid3[0] = mid1[0] * (1 - yl) + crn_weight[2][0] * yl;
|
|
||||||
mid3[1] = mid1[1] * (1 - yl) + crn_weight[2][1] * yl;
|
|
||||||
}
|
|
||||||
|
|
||||||
*u_r = mid3[0];
|
|
||||||
*v_r = mid3[1];
|
|
||||||
}
|
|
||||||
|
|
||||||
static void mdisp_flip_disp(int S, int corners, float axis_x[2], float axis_y[2], float disp[3])
|
|
||||||
{
|
|
||||||
float crn_x[2], crn_y[2];
|
|
||||||
float vx[2], vy[2], coord[2];
|
|
||||||
|
|
||||||
if (corners == 4) {
|
|
||||||
float x[4][2] = {{0, -1}, {1, 0}, {0, 1}, {-1, 0}};
|
|
||||||
float y[4][2] = {{-1, 0}, {0, -1}, {1, 0}, {0, 1}};
|
|
||||||
|
|
||||||
copy_v2_v2(crn_x, x[S]);
|
|
||||||
copy_v2_v2(crn_y, y[S]);
|
|
||||||
|
|
||||||
mul_v2_v2fl(vx, crn_x, disp[0]);
|
|
||||||
mul_v2_v2fl(vy, crn_y, disp[1]);
|
|
||||||
add_v2_v2v2(coord, vx, vy);
|
|
||||||
|
|
||||||
project_v2_v2v2(vx, coord, axis_x);
|
|
||||||
project_v2_v2v2(vy, coord, axis_y);
|
|
||||||
|
|
||||||
disp[0] = len_v2(vx);
|
|
||||||
disp[1] = len_v2(vy);
|
|
||||||
|
|
||||||
if(dot_v2v2(vx, axis_x) < 0)
|
|
||||||
disp[0] = -disp[0];
|
|
||||||
|
|
||||||
if(dot_v2v2(vy, axis_y) < 0)
|
|
||||||
disp[1] = -disp[1];
|
|
||||||
} else {
|
|
||||||
/* XXX: it was very overhead code to support displacement flipping
|
|
||||||
for case of tris without visible profit.
|
|
||||||
Maybe its not really big limitation? for now? (nazgul) */
|
|
||||||
disp[0] = 0;
|
|
||||||
disp[1] = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static void layerInterp_mdisps(void **sources, float *UNUSED(weights),
|
static void layerInterp_mdisps(void **sources, float *UNUSED(weights),
|
||||||
float *sub_weights, int count, void *dest)
|
float *sub_weights, int count, void *dest)
|
||||||
{
|
{
|
||||||
@@ -687,16 +441,55 @@ static void layerInterp_mdisps(void **sources, float *UNUSED(weights),
|
|||||||
int i, x, y;
|
int i, x, y;
|
||||||
int side, S, dst_corners, src_corners;
|
int side, S, dst_corners, src_corners;
|
||||||
float crn_weight[4][2];
|
float crn_weight[4][2];
|
||||||
float (*sw)[4] = NULL;
|
float (*sw)[4] = (void*)sub_weights;
|
||||||
float (*disps)[3], (*out)[3];
|
float (*disps)[3], (*out)[3];
|
||||||
|
|
||||||
s = sources[0];
|
s = sources[0];
|
||||||
dst_corners = multires_mdisp_corners(d);
|
dst_corners = multires_mdisp_corners(d);
|
||||||
src_corners = multires_mdisp_corners(s);
|
src_corners = multires_mdisp_corners(s);
|
||||||
|
|
||||||
/* XXX: For now, some restrictions on the input
|
if(sub_weights && count == 2 && src_corners == 3) {
|
||||||
should be implemented to allow quad<->tris face conversion */
|
src_corners = multires_mdisp_corners(sources[1]);
|
||||||
if(count != 1 || !sub_weights || dst_corners != src_corners) {
|
|
||||||
|
/* special case -- converting two triangles to quad */
|
||||||
|
if(src_corners == 3 && dst_corners == 4) {
|
||||||
|
MDisps tris[2];
|
||||||
|
int vindex[4] = {0};
|
||||||
|
|
||||||
|
S = 0;
|
||||||
|
for(i = 0; i < 2; i++)
|
||||||
|
for(y = 0; y < 4; y++)
|
||||||
|
for(x = 0; x < 4; x++)
|
||||||
|
if(sw[x+i*4][y])
|
||||||
|
vindex[x] = y;
|
||||||
|
|
||||||
|
for(i = 0; i < 2; i++) {
|
||||||
|
float sw[4][4] = {{0}};
|
||||||
|
int a = 7 & ~(1 << vindex[i*2] | 1 << vindex[i*2+1]);
|
||||||
|
|
||||||
|
sw[0][vindex[i*2+1]] = 1;
|
||||||
|
sw[1][vindex[i*2]] = 1;
|
||||||
|
|
||||||
|
for(x = 0; x < 3; x++)
|
||||||
|
if(a & (1 << x))
|
||||||
|
sw[2][x] = 1;
|
||||||
|
|
||||||
|
tris[i] = *((MDisps*)sources[i]);
|
||||||
|
tris[i].disps = MEM_dupallocN(tris[i].disps);
|
||||||
|
layerInterp_mdisps(&sources[i], NULL, (float*)sw, 1, &tris[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
mdisp_join_tris(d, &tris[0], &tris[1]);
|
||||||
|
|
||||||
|
for(i = 0; i < 2; i++)
|
||||||
|
MEM_freeN(tris[i].disps);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* For now, some restrictions on the input */
|
||||||
|
if(count != 1 || !sub_weights) {
|
||||||
for(i = 0; i < d->totdisp; ++i)
|
for(i = 0; i < d->totdisp; ++i)
|
||||||
zero_v3(d->disps[i]);
|
zero_v3(d->disps[i]);
|
||||||
|
|
||||||
@@ -739,7 +532,7 @@ static void layerInterp_mdisps(void **sources, float *UNUSED(weights),
|
|||||||
mdisp_apply_weight(S, dst_corners, x, y, st, crn_weight, &face_u, &face_v);
|
mdisp_apply_weight(S, dst_corners, x, y, st, crn_weight, &face_u, &face_v);
|
||||||
crn = mdisp_rot_face_to_crn(src_corners, st, face_u, face_v, &crn_u, &crn_v);
|
crn = mdisp_rot_face_to_crn(src_corners, st, face_u, face_v, &crn_u, &crn_v);
|
||||||
|
|
||||||
mdisps_bilinear((*out), &s->disps[crn*side*side], side, crn_u, crn_v);
|
old_mdisps_bilinear((*out), &s->disps[crn*side*side], side, crn_u, crn_v);
|
||||||
mdisp_flip_disp(crn, dst_corners, axis_x, axis_y, *out);
|
mdisp_flip_disp(crn, dst_corners, axis_x, axis_y, *out);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -768,6 +561,20 @@ static void layerCopy_mdisps(const void *source, void *dest, int count)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void layerValidate_mdisps(void *data, int sub_elements)
|
||||||
|
{
|
||||||
|
MDisps *disps = data;
|
||||||
|
if(disps->disps) {
|
||||||
|
int corners = multires_mdisp_corners(disps);
|
||||||
|
|
||||||
|
if(corners != sub_elements) {
|
||||||
|
MEM_freeN(disps->disps);
|
||||||
|
disps->totdisp = disps->totdisp / corners * sub_elements;
|
||||||
|
disps->disps = MEM_callocN(3*disps->totdisp*sizeof(float), "layerValidate_mdisps");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void layerFree_mdisps(void *data, int count, int UNUSED(size))
|
static void layerFree_mdisps(void *data, int count, int UNUSED(size))
|
||||||
{
|
{
|
||||||
int i;
|
int i;
|
||||||
@@ -1019,7 +826,8 @@ const LayerTypeInfo LAYERTYPEINFO[CD_NUMTYPES] = {
|
|||||||
{sizeof(MLoopCol), "MLoopCol", 1, "Col", NULL, NULL, layerInterp_mloopcol, NULL, layerDefault_mloopcol},
|
{sizeof(MLoopCol), "MLoopCol", 1, "Col", NULL, NULL, layerInterp_mloopcol, NULL, layerDefault_mloopcol},
|
||||||
{sizeof(float)*3*4, "", 0, NULL, NULL, NULL, NULL, NULL, NULL},
|
{sizeof(float)*3*4, "", 0, NULL, NULL, NULL, NULL, NULL, NULL},
|
||||||
{sizeof(MDisps), "MDisps", 1, NULL, layerCopy_mdisps,
|
{sizeof(MDisps), "MDisps", 1, NULL, layerCopy_mdisps,
|
||||||
layerFree_mdisps, layerInterp_mdisps, layerSwap_mdisps, NULL, layerRead_mdisps, layerWrite_mdisps, layerFilesize_mdisps},
|
layerFree_mdisps, layerInterp_mdisps, layerSwap_mdisps, NULL, layerRead_mdisps, layerWrite_mdisps,
|
||||||
|
layerFilesize_mdisps, layerValidate_mdisps},
|
||||||
{sizeof(MCol)*4, "MCol", 4, "WeightCol", NULL, NULL, layerInterp_mcol,
|
{sizeof(MCol)*4, "MCol", 4, "WeightCol", NULL, NULL, layerInterp_mcol,
|
||||||
layerSwap_mcol, layerDefault_mcol},
|
layerSwap_mcol, layerDefault_mcol},
|
||||||
{sizeof(MCol)*4, "MCol", 4, "IDCol", NULL, NULL, layerInterp_mcol,
|
{sizeof(MCol)*4, "MCol", 4, "IDCol", NULL, NULL, layerInterp_mcol,
|
||||||
@@ -1942,6 +1750,18 @@ void CustomData_em_copy_data(const CustomData *source, CustomData *dest,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void CustomData_em_validate_data(CustomData *data, void *block, int sub_elements)
|
||||||
|
{
|
||||||
|
int i;
|
||||||
|
for(i = 0; i < data->totlayer; i++) {
|
||||||
|
const LayerTypeInfo *typeInfo = layerType_getInfo(data->layers[i].type);
|
||||||
|
char *leayer_data = (char*)block + data->layers[i].offset;
|
||||||
|
|
||||||
|
if(typeInfo->validate)
|
||||||
|
typeInfo->validate(leayer_data, sub_elements);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void *CustomData_em_get(const CustomData *data, void *block, int type)
|
void *CustomData_em_get(const CustomData *data, void *block, int type)
|
||||||
{
|
{
|
||||||
int layer_index;
|
int layer_index;
|
||||||
|
|||||||
@@ -448,7 +448,7 @@ void flip_side_name (char *name, const char *from_name, int strip_number)
|
|||||||
sprintf (name, "%s%s%s%s", prefix, replace, suffix, number);
|
sprintf (name, "%s%s%s%s", prefix, replace, suffix, number);
|
||||||
}
|
}
|
||||||
|
|
||||||
float defvert_find_weight(const struct MDeformVert *dvert, int group_num)
|
float defvert_find_weight(const struct MDeformVert *dvert, const int group_num)
|
||||||
{
|
{
|
||||||
MDeformWeight *dw= defvert_find_index(dvert, group_num);
|
MDeformWeight *dw= defvert_find_index(dvert, group_num);
|
||||||
return dw ? dw->weight : 0.0f;
|
return dw ? dw->weight : 0.0f;
|
||||||
|
|||||||
@@ -1329,7 +1329,7 @@ DriverVar *driver_add_new_variable (ChannelDriver *driver)
|
|||||||
|
|
||||||
#ifdef WITH_PYTHON
|
#ifdef WITH_PYTHON
|
||||||
/* since driver variables are cached, the expression needs re-compiling too */
|
/* since driver variables are cached, the expression needs re-compiling too */
|
||||||
if(driver->type==DRIVER_TYPE_PYTHON)
|
if (driver->type==DRIVER_TYPE_PYTHON)
|
||||||
driver->flag |= DRIVER_FLAG_RENAMEVAR;
|
driver->flag |= DRIVER_FLAG_RENAMEVAR;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|||||||
@@ -1009,9 +1009,13 @@ FModifier *add_fmodifier (ListBase *modifiers, int type)
|
|||||||
fcm->flag = FMODIFIER_FLAG_EXPANDED;
|
fcm->flag = FMODIFIER_FLAG_EXPANDED;
|
||||||
BLI_addtail(modifiers, fcm);
|
BLI_addtail(modifiers, fcm);
|
||||||
|
|
||||||
|
/* tag modifier as "active" if no other modifiers exist in the stack yet */
|
||||||
|
if (modifiers->first == modifiers->last)
|
||||||
|
fcm->flag |= FMODIFIER_FLAG_ACTIVE;
|
||||||
|
|
||||||
/* add modifier's data */
|
/* add modifier's data */
|
||||||
fcm->data= MEM_callocN(fmi->size, fmi->structName);
|
fcm->data= MEM_callocN(fmi->size, fmi->structName);
|
||||||
|
|
||||||
/* init custom settings if necessary */
|
/* init custom settings if necessary */
|
||||||
if (fmi->new_data)
|
if (fmi->new_data)
|
||||||
fmi->new_data(fcm->data);
|
fmi->new_data(fcm->data);
|
||||||
|
|||||||
@@ -670,7 +670,7 @@ struct chartrans *BKE_text_to_curve(Scene *scene, Object *ob, int mode)
|
|||||||
VFont *vfont, *oldvfont;
|
VFont *vfont, *oldvfont;
|
||||||
VFontData *vfd= NULL;
|
VFontData *vfd= NULL;
|
||||||
Curve *cu;
|
Curve *cu;
|
||||||
CharInfo *info, *custrinfo;
|
CharInfo *info = NULL, *custrinfo;
|
||||||
TextBox *tb;
|
TextBox *tb;
|
||||||
VChar *che;
|
VChar *che;
|
||||||
struct chartrans *chartransdata=NULL, *ct;
|
struct chartrans *chartransdata=NULL, *ct;
|
||||||
|
|||||||
@@ -1191,7 +1191,7 @@ static void do_curve_key(Scene *scene, Object *ob, Key *key, char *out, int tot)
|
|||||||
|
|
||||||
if(key->slurph && key->type!=KEY_RELATIVE) {
|
if(key->slurph && key->type!=KEY_RELATIVE) {
|
||||||
Nurb *nu;
|
Nurb *nu;
|
||||||
int mode, i= 0, remain= 0, estep, count;
|
int mode=0, i= 0, remain= 0, estep=0, count=0;
|
||||||
|
|
||||||
delta= (float)key->slurph / tot;
|
delta= (float)key->slurph / tot;
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user