Exemple #1
0
def gmrf_learn_cov_cholmod(
    R,
    U,
    rows,
    cols,
    edge_count,
    k,
    min_variance=1e-2,
    min_edge_count=10,
    num_iterations=50,
    psd_tolerance=1e-3,
    finish_early=True,
):
    n = len(R)
    m = len(U)
    mask = edge_count >= min_edge_count
    active_m = np.sum(mask)
    tic("m={0}, active m={1}".format(m, active_m), "gmrf_learn_cov_cholmod")
    active_U = U[mask]
    active_rows = rows[mask]
    active_cols = cols[mask]
    # A number of variables hare independant (due to lack of observations)
    independent_mask = independent_variables(n, active_rows, active_cols)
    # Put them aside and use the independent strategy to solve them.
    indep_idxs = np.arange(n)[independent_mask]
    R_indep = R[indep_idxs]
    # Solve the regularized version for independent variables
    D_indep = 1.0 / np.maximum(min_variance * np.ones_like(R_indep), R_indep)
    # Putting together the dependent and independent parts
    D = np.zeros_like(R)
    D[independent_mask] = D_indep
    P = np.zeros_like(U)
    # No need to solve for the outer diagonal terms, they are all zeros.
    # Solve for the dependent terms
    dependent_mask = ~independent_mask
    n_dep = np.sum(dependent_mask)
    if n_dep > 0:
        idxs_dep = np.arange(n)[dependent_mask]
        reverse_idxs_dep = np.zeros(n, dtype=np.int64)
        reverse_idxs_dep[dependent_mask] = np.arange(n_dep)
        rows_dep = reverse_idxs_dep[active_rows]
        cols_dep = reverse_idxs_dep[active_cols]
        R_dep = R[idxs_dep]
        U_dep = active_U
        (M, R_hat, U_hat) = normalized_problem(R_dep, U_dep, rows_dep, cols_dep)
        tic("Computing symbolic cholesky factorization of the graph...", "gmrf_learn_cov_cholmod")
        # Doing delayed import so that the rest of the code runs without sk-learn
        from scikits.sparse.cholmod import analyze

        Xs_dep = build_sparse(np.ones_like(R_hat), np.ones_like(U_hat), rows_dep, cols_dep)
        factor = analyze(Xs_dep)
        tic("Cholesky done", "gmrf_learn_cov_cholmod")
        # TODO add the other parameters
        (D_norm_dep, P_norm_dep) = covsel_cvx_cholmod(
            R_hat, U_hat, rows_dep, cols_dep, k, psd_tolerance, factor, num_iterations, finish_early
        )
        D[dependent_mask] = D_norm_dep / (M ** 2)
        P[mask] = P_norm_dep / (M[rows_dep] * M[cols_dep])
    return (D, P)
Exemple #2
0
def is_psd_cholmod(R,U,rows,cols,tolerance=1e-6,factor=None):
  X = build_sparse(R, U, rows, cols)
  from scikits.sparse.cholmod import cholesky
  try:
    full_factor = cholesky(X) if factor is None else factor.cholesky(X)
  except:
    return False
  D = full_factor.D()
  return np.all(D>tolerance)
Exemple #3
0
def logdet_cholmod(R,U,rows,cols, psd_tolerance=1e-6, factor=None):
  from scikits.sparse.cholmod import cholesky
  if not is_psd_cholmod(R, U, rows, cols, psd_tolerance, factor):
    return -np.Inf
  X = build_sparse(R, U, rows, cols)
#  print factor
  filled_factor = cholesky(X) if factor is None else factor.cholesky(X)
  D = filled_factor.D()
  return np.sum(np.log(D))
Exemple #4
0
def smallest_ev_arpack(R,U,rows,cols,tolerance=1e-4):
  X = build_sparse(R, U, rows, cols)
  (eis,_)=eigsh(X, k=1, sigma=-3, which='LM',tol=tolerance,maxiter=1000)
  ei = eis[0]
  return ei
Exemple #5
0
def is_psd_arpack(R,U,rows,cols,tolerance=1e-4):
  X = build_sparse(R, U, rows, cols)
  (eis,_)=eigsh(X, k=1, sigma=-1, which='LM')
  ei = eis[0]
  return ei>tolerance
Exemple #6
0
  covsel_cvx_cholmod
from mm.arterial_hkt.gmrf_learning.quic_cpp.low_rank import random_projection_cholmod,\
  random_projection_cholmod_csc
from scikits.sparse.cholmod import analyze

def star(n,diag):
  m = n-1
  D = diag*np.ones(n,dtype=np.double)+np.arange(n)/float(n)
  P = np.arange(m)/float(m)+1
  rows = np.zeros((n-1,),dtype=np.int)
  cols = np.arange(1,n,dtype=np.int)
  return (D,P,rows,cols)

(D,P,rows,cols) = star(5,4)
X = build_dense(D, P, rows, cols)
Xs = build_sparse(D, P, rows, cols)

l1 = logdet_dense(D, P, rows, cols)
l2 = logdet_dense_chol(D, P, rows, cols)
l3 = logdet_cholmod(D, P, rows, cols)

(M,Dn,Pn) = normalized_problem(D, P, rows, cols)

test_data(D, P, rows, cols)
W = la.inv(X)
#Q = random_projection_cholmod(D, U, rows, cols, k, factor)
Q = random_projection_cholmod_csc(Xs, k=1000)
A = Q.T
print A.shape
R = np.sum(A*A,axis=1)
U = np.sum(A[rows]*A[cols],axis=1)
def independent_variables(n, rows, cols):
  """ Returns a mask that contains the independent variables.
  """
  m = len(rows)
  X= build_sparse(np.ones(n), np.ones(m), rows, cols)
  return X.sum(axis=1)==1
Exemple #8
0
def random_projection_cholmod(R,U,rows,cols,k,factor=None):
  X = build_sparse(R, U, rows, cols)
  return random_projection_cholmod_csc(X, k,factor)