def rotate(self, angle_degrees, axis_x, axis_y, axis_z ): """Follows the right hand rule. I visualize the right hand rule most easily as follows: Naturally, using your right hand, wrap it around the axis of rotation. Your fingers now point in the direction of rotation. """ angleRadians = angle_degrees / 180.0 * math.pi u = self.__make_normalized_vert3(axis_x, axis_y, axis_z ) u=-u #follow right hand rule S = Numeric.zeros( (3,3), Numeric.Float ) S[0,1] = -u[2] S[0,2] = u[1] S[1,0] = u[2] S[1,2] = -u[0] S[2,0] = -u[1] S[2,1] = u[0] U = Numeric.outerproduct(u,u) R = U + math.cos(angleRadians)*(MLab.eye(3)-U) + math.sin(angleRadians)*S R = Numeric.concatenate( (R,Numeric.zeros( (3,1), Numeric.Float)), axis=1) R = Numeric.concatenate( (R,Numeric.zeros( (1,4), Numeric.Float)), axis=0) R[3,3] = 1.0 self.matrix = numpy.dot(R,self.matrix)
def scale(self, x, y, z): T = MLab.eye(4,typecode=Numeric.Float) T[0,0] = x T[1,1] = y T[2,2] = z self.matrix = numpy.dot(T,self.matrix)
def translate(self, x, y, z): T = MLab.eye(4,typecode=Numeric.Float) T[3,0] = x T[3,1] = y T[3,2] = z self.matrix = numpy.dot(T,self.matrix)
def __init__(self,matrix=None): if matrix is None: self.matrix = MLab.eye(4,typecode=Numeric.Float) else: self.matrix = matrix
def fminBFGS(f, x0, fprime=None, args=(), avegtol=1e-5, maxiter=None, fulloutput=0, printmessg=1): """xopt = fminBFGS(f, x0, fprime=None, args=(), avegtol=1e-5, maxiter=None, fulloutput=0, printmessg=1) Optimize the function, f, whose gradient is given by fprime using the quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS) See Wright, and Nocedal 'Numerical Optimization', 1999, pg. 198. """ app_fprime = 0 if fprime is None: app_fprime = 1 x0 = Num.asarray(x0) if maxiter is None: maxiter = len(x0)*200 func_calls = 0 grad_calls = 0 k = 0 N = len(x0) gtol = N*avegtol I = MLab.eye(N) Hk = I if app_fprime: gfk = apply(approx_fprime,(x0,f)+args) func_calls = func_calls + len(x0) + 1 else: gfk = apply(fprime,(x0,)+args) grad_calls = grad_calls + 1 xk = x0 sk = [2*gtol] while (Num.add.reduce(abs(gfk)) > gtol) and (k < maxiter): pk = -Num.dot(Hk,gfk) alpha_k, fc, gc = line_search_BFGS(f,xk,pk,gfk,args) func_calls = func_calls + fc xkp1 = xk + alpha_k * pk sk = xkp1 - xk xk = xkp1 if app_fprime: gfkp1 = apply(approx_fprime,(xkp1,f)+args) func_calls = func_calls + gc + len(x0) + 1 else: gfkp1 = apply(fprime,(xkp1,)+args) grad_calls = grad_calls + gc + 1 yk = gfkp1 - gfk k = k + 1 rhok = 1 / Num.dot(yk,sk) A1 = I - sk[:,Num.NewAxis] * yk[Num.NewAxis,:] * rhok A2 = I - yk[:,Num.NewAxis] * sk[Num.NewAxis,:] * rhok Hk = Num.dot(A1,Num.dot(Hk,A2)) + rhok * sk[:,Num.NewAxis] * sk[Num.NewAxis,:] gfk = gfkp1 if printmessg or fulloutput: fval = apply(f,(xk,)+args) if k >= maxiter: warnflag = 1 if printmessg: print "Warning: Maximum number of iterations has been exceeded" print " Current function value: %f" % fval print " Iterations: %d" % k print " Function evaluations: %d" % func_calls print " Gradient evaluations: %d" % grad_calls else: warnflag = 0 if printmessg: print "Optimization terminated successfully." print " Current function value: %f" % fval print " Iterations: %d" % k print " Function evaluations: %d" % func_calls print " Gradient evaluations: %d" % grad_calls if fulloutput: return xk, fval, func_calls, grad_calls, warnflag else: return xk