def function_and_gradient(self, x): if not np.all(x >= self.lower_bounds): raise_or_warn( 'Optimizer violated the lower bounds for rate matrix elements.', self.on_error) # compute function K = np.zeros((self.N, self.N)) K[self.I, self.J] = x / self.pi[self.I] K[self.J, self.I] = x / self.pi[self.J] np.fill_diagonal(K, -sum1(K)) # compute eigendecomposition lam, A, Ainv = eigen_decomposition(K, self.pi) # T = kdot(kdot(A,np.diag(np.exp(self.tau*lam))),Ainv) T = sp.linalg.expm(self.dt * K) T[self. zero_C] = 1.0 # set unused elements to dummy to avoid division by 0 # check T!=0 for C!=0 if np.any(np.abs(T[self.nonzero_C]) <= 1E-20): warnings.warn( 'Warning: during iteration T_ij became very small while C(tau)_ij > 0. Regularizing T.', NotConnectedWarning) for i, j in zip(*self.nonzero_C): if T[i, j] <= 1E-20: T[i, j] = 1E-20 f = ksum(self.C * np.log(T)) if self.verbose: logging.info('iteration=%d, log L=%f' % (self.count, f)) self.count += 1 V = getV(lam, self.dt) # M = Ainv.dot(Ctau.T/T.T).dot(A)*V.T M = kdot(kdot(Ainv, np.ascontiguousarray(self.C.T / T.T)), A) * np.ascontiguousarray(V.T) # H = A.dot(M).dot(Ainv) H = kdot(kdot(A, M), Ainv) grad = np.zeros(len(x)) for i in range(len(x)): Di = self.D[i] grad[i] = vdot(H[Di.col, Di.row], Di.data) # this is (H.T*Di).sum() return (-f, -grad)
def test_kdot(self): d0 = np.random.randint(1, high=100) d1 = np.random.randint(1, high=100) d2 = np.random.randint(1, high=100) a = np.random.randn(d0, d1) b = np.random.randn(d1, d2) assert np.allclose(a.dot(b), kahandot.kdot(a, b))
def vdot(a, b): # Kahan summation scalar product n = a.shape[0] return kdot(a.reshape((1, n)), b.reshape((n, 1)))[0, 0]