Exemple #1
0
def obj_cholmod(R_hat, U_hat, rows, cols, D, P, psd_tolerance=1e-6, factor=None):
    """ Dense objective of the maximization problem.
  
  R_hat -- sufficient statistics for the learning problem 
  
  
  """
    return -logdet_cholmod(D, P, rows, cols, psd_tolerance, factor) + (R_hat.dot(D) + 2 * U_hat.dot(P))
Exemple #2
0
def star(n,diag):
  m = n-1
  D = diag*np.ones(n,dtype=np.double)+np.arange(n)/float(n)
  P = np.arange(m)/float(m)+1
  rows = np.zeros((n-1,),dtype=np.int)
  cols = np.arange(1,n,dtype=np.int)
  return (D,P,rows,cols)

(D,P,rows,cols) = star(5,4)
X = build_dense(D, P, rows, cols)
Xs = build_sparse(D, P, rows, cols)

l1 = logdet_dense(D, P, rows, cols)
l2 = logdet_dense_chol(D, P, rows, cols)
l3 = logdet_cholmod(D, P, rows, cols)

(M,Dn,Pn) = normalized_problem(D, P, rows, cols)

test_data(D, P, rows, cols)
W = la.inv(X)
#Q = random_projection_cholmod(D, U, rows, cols, k, factor)
Q = random_projection_cholmod_csc(Xs, k=1000)
A = Q.T
print A.shape
R = np.sum(A*A,axis=1)
U = np.sum(A[rows]*A[cols],axis=1)
R_ = W.diagonal()
U_ = W[rows,cols]

#X = build_sparse(D, P, rows, cols)