from math import hypot, sqrt, isfinite import bpy import time from mathutils import Vector #Vector utility functions class NdVector: vec = [] def __init__(self,vec): self.vec = vec[:] def __len__(self): return len(self.vec) def __mul__(self,otherMember): if type(otherMember)==type(1) or type(otherMember)==type(1.0): return NdVector([otherMember*x for x in self.vec]) else: a = self.vec b = otherMember.vec n = len(self) return sum([a[i]*b[i] for i in range(n)]) def __sub__(self,otherVec): a = self.vec b = otherVec.vec n = len(self) return NdVector([a[i]-b[i] for i in range(n)]) def __add__(self,otherVec): a = self.vec b = otherVec.vec n = len(self) return NdVector([a[i]+b[i] for i in range(n)]) def vecLength(self): return sqrt(self * self) def vecLengthSq(self): return (self * self) def __getitem__(self,i): return self.vec[i] length = property(vecLength) lengthSq = property(vecLengthSq) class dataPoint: index = 0 co = Vector((0,0,0,0)) # x,y1,y2,y3 coordinate of original point u = 0 #position according to parametric view of original data, [0,1] range temp = 0 #use this for anything def __init__(self,index,co,u=0): self.index = index self.co = co self.u = u def autoloop_anim(): context = bpy.context obj = context.active_object fcurves = [x for x in obj.animation_data.action.fcurves if x.select] data = [] end = len(fcurves[0].keyframe_points) for i in range(1,end): vec = [] for fcurve in fcurves: vec.append(fcurve.evaluate(i)) data.append(NdVector(vec)) def comp(a,b): return a*b N = len(data) Rxy = [0.0] * N for i in range(N): for j in range(i,min(i+N,N)): Rxy[i]+=comp(data[j],data[j-i]) for j in range(i): Rxy[i]+=comp(data[j],data[j-i+N]) Rxy[i]/=float(N) def bestLocalMaximum(Rxy): Rxyd = [Rxy[i]-Rxy[i-1] for i in range(1,len(Rxy))] maxs = [] for i in range(1,len(Rxyd)-1): a = Rxyd[i-1] b = Rxyd[i] print(a,b) if (a>=0 and b<0) or (a<0 and b>=0): #sign change (zerocrossing) at point i, denoting max point (only) maxs.append((i,max(Rxy[i],Rxy[i-1]))) return max(maxs,key=lambda x: x[1])[0] flm = bestLocalMaximum(Rxy[0:int(len(Rxy))]) diff = [] for i in range(len(data)-flm): diff.append((data[i]-data[i+flm]).lengthSq) def lowerErrorSlice(diff,e): bestSlice = (0,100000) #index, error at index for i in range(e,len(diff)-e): errorSlice = sum(diff[i-e:i+e+1]) if errorSlice= maxError: maxError = tmpError maxErrorPt = pt.index return maxError,maxErrorPt #calculated bezier derivative at point t. #That is, tangent of point t. def getBezDerivative(bez,t): n = len(bez)-1 sumVec = Vector((0,0,0,0)) for i in range(n-1): sumVec+=bernsteinPoly(n-1,i,t)*(bez[i+1]-bez[i]) return sumVec #use Newton-Raphson to find a better paramterization of datapoints, #one that minimizes the distance (or error) between bezier and original data. def newtonRaphson(data_pts,s,e,bez): for pt in data_pts[s:e+1]: if pt.index==s: pt.u=0 elif pt.index==e: pt.u=1 else: u = pt.u qu = bezierEval(bez,pt.u) qud = getBezDerivative(bez,u) #we wish to minimize f(u), the squared distance between curve and data fu = (qu-pt.co).length**2 fud = (2*(qu.x-pt.co.x)*(qud.x))-(2*(qu.y-pt.co.y)*(qud.y)) if fud==0: fu = 0 fud = 1 pt.u=pt.u-(fu/fud) def createDataPts(curveGroup, group_mode): data_pts = [] if group_mode: for i in range(len(curveGroup[0].keyframe_points)): x = curveGroup[0].keyframe_points[i].co.x y1 = curveGroup[0].keyframe_points[i].co.y y2 = curveGroup[1].keyframe_points[i].co.y y3 = curveGroup[2].keyframe_points[i].co.y data_pts.append(dataPoint(i,Vector((x,y1,y2,y3)))) else: for i in range(len(curveGroup.keyframe_points)): x = curveGroup.keyframe_points[i].co.x y1 = curveGroup.keyframe_points[i].co.y y2 = 0 y3 = 0 data_pts.append(dataPoint(i,Vector((x,y1,y2,y3)))) return data_pts def fitCubic(data_pts,s,e): if e-s<3: # if there are less than 3 points, fit a single basic bezier bez = fitSingleCubic2Pts(data_pts,s,e) else: #if there are more, parameterize the points and fit a single cubic bezier chordLength(data_pts,s,e) bez = fitSingleCubic(data_pts,s,e) #calculate max error and point where it occurs maxError,maxErrorPt = maxErrorAmount(data_pts,bez,s,e) #if error is small enough, reparameterization might be enough if maxErrorerror: for i in range(maxIterations): newtonRaphson(data_pts,s,e,bez) if e-s<3: bez = fitSingleCubic2Pts(data_pts,s,e) else: bez = fitSingleCubic(data_pts,s,e) #recalculate max error and point where it occurs maxError,maxErrorPt = maxErrorAmount(data_pts,bez,s,e) #repara wasn't enough, we need 2 beziers for this range. #Split the bezier at point of maximum error if maxError>error: fitCubic(data_pts,s,maxErrorPt) fitCubic(data_pts,maxErrorPt,e) else: #error is small enough, return the beziers. beziers.append(bez) return def createNewCurves(curveGroup,beziers,group_mode): #remove all existing data points if group_mode: for fcurve in curveGroup: for i in range(len(fcurve.keyframe_points)-1,0,-1): fcurve.keyframe_points.remove(fcurve.keyframe_points[i]) else: fcurve = curveGroup for i in range(len(fcurve.keyframe_points)-1,0,-1): fcurve.keyframe_points.remove(fcurve.keyframe_points[i]) #insert the calculated beziers to blender data.\ if group_mode: for fullbez in beziers: for i,fcurve in enumerate(curveGroup): bez = [Vector((vec[0],vec[i+1])) for vec in fullbez] newKey = fcurve.keyframe_points.insert(frame=bez[0].x,value=bez[0].y) newKey.handle_right = (bez[1].x,bez[1].y) newKey = fcurve.keyframe_points.insert(frame=bez[3].x,value=bez[3].y) newKey.handle_left= (bez[2].x,bez[2].y) else: for bez in beziers: for vec in bez: vec.resize_2d() newKey = fcurve.keyframe_points.insert(frame=bez[0].x,value=bez[0].y) newKey.handle_right = (bez[1].x,bez[1].y) newKey = fcurve.keyframe_points.insert(frame=bez[3].x,value=bez[3].y) newKey.handle_left= (bez[2].x,bez[2].y) #indices are detached from data point's frame (x) value and stored in the dataPoint object, represent a range data_pts = createDataPts(curveGroup,group_mode) s = 0 #start e = len(data_pts)-1 #end beziers = [] #begin the recursive fitting algorithm. fitCubic(data_pts,s,e) #remove old Fcurves and insert the new ones createNewCurves(curveGroup,beziers,group_mode) #Main function of simplification #sel_opt: either "sel" or "all" for which curves to effect #error: maximum error allowed, in fraction (20% = 0.0020), i.e. divide by 10000 from percentage wanted. #group_mode: boolean, to analyze each curve seperately or in groups, where group is all curves that effect the same property (e.g. a bone's x,y,z rotation) def fcurves_simplify(sel_opt="all", error=0.002, group_mode=True): # main vars context = bpy.context obj = context.active_object fcurves = obj.animation_data.action.fcurves if sel_opt=="sel": sel_fcurves = [fcurve for fcurve in fcurves if fcurve.select] else: sel_fcurves = fcurves[:] #Error threshold for Newton Raphson reparamatizing reparaError = error*32 maxIterations = 16 if group_mode: fcurveDict = {} #this loop sorts all the fcurves into groups of 3, based on their RNA Data path, which corresponds to which property they effect for curve in sel_fcurves: if curve.data_path in fcurveDict: #if this bone has been added, append the curve to its list fcurveDict[curve.data_path].append(curve) else: fcurveDict[curve.data_path] = [curve] #new bone, add a new dict value with this first curve fcurveGroups = fcurveDict.values() else: fcurveGroups = sel_fcurves if error>0.00000: #simplify every selected curve. totalt = 0 for i,fcurveGroup in enumerate(fcurveGroups): print("Processing curve "+str(i+1)+"/"+str(len(fcurveGroups))) t = time.clock() simplifyCurves(fcurveGroup,error,reparaError,maxIterations,group_mode) t = time.clock() - t print(str(t)[:5]+" seconds to process last curve") totalt+=t print(str(totalt)[:5]+" seconds, total time elapsed") return