Esempio n. 1
0
def create_aop_learner(data, lifting=2, nu=1E3, kappa=1 * 1e6, mu=8 * 1E4):
    k = data.shape[0]
    init_Omega = np.float32(rng.randn(k * lifting, k))
    for i in xrange(0, k * lifting):
        init_Omega[i, :] = init_Omega[i, :] / sqrt((init_Omega[i, :]**2).sum())
    Omega = theano.shared(init_Omega)
    Omega_sym = T.matrix("Omega")
    kappa *= 1.0 / log((k**2))
    mu = mu * 2.0 / ((lifting * k)**2 - lifting * k)

    Omega_normal = normalizing_AOP(Omega_sym, k)
    no_lin_constraint = mu * no_linear_dependencies_constraint(
        Omega_sym, k * lifting)
    full_rank_constraint_ = kappa * full_rank_constraint(
        Omega_sym, k * lifting, k)

    cost_Omega_data = normalized_l2norm_cost(
        logsquared_cost(T.dot(Omega_normal, data), nu, axis=0),
        data.shape[1]) + no_lin_constraint + full_rank_constraint_

    grad_Omega_data = T.grad(cost_Omega_data, Omega_sym)

    cg_Omega_data = ObliqueCG(Omega,
                              cost_Omega_data,
                              grad_Omega_data,
                              Omega_sym,
                              k * lifting,
                              k,
                              t_init=1,
                              rho=0.9,
                              max_iter_line_search=125)
    return cg_Omega_data, Omega, Omega_sym
Esempio n. 2
0
def create_aop_denoiser(Omega, noisy,nu, alpha):
    Omega_sym = T.matrix('Omega')
    noisy_shared = theano.shared(noisy.astype(np.float32))
    denoised = theano.shared(np.copy(noisy.astype(np.float32)))
    denoised_sym = T.matrix("denoised")
    Omega_normal = normalizing_AOP(Omega_sym, noisy.shape[0])
    cost = (((noisy_shared - denoised_sym) ** 2).sum() / denoised_sym.shape[1]) + \
           alpha * normalized_l2norm_cost(logsquared_cost(T.dot(Omega_normal, denoised_sym), nu, axis=0), denoised_sym.shape[1])
    grad = theano.grad(cost, denoised_sym)
    cg_denoising = CG(denoised, cost, grad, denoised_sym, k=noisy.shape[0], n=noisy.shape[1], t_init=1, rho=0.9,
                      max_iter_line_search=125, other_givens={Omega_sym: Omega})
    return cg_denoising,denoised
Esempio n. 3
0
def create_aop_denoiser(Omega, noisy, nu, alpha):
    Omega_sym = T.matrix('Omega')
    noisy_shared = theano.shared(noisy.astype(np.float32))
    denoised = theano.shared(np.copy(noisy.astype(np.float32)))
    denoised_sym = T.matrix("denoised")
    Omega_normal = normalizing_AOP(Omega_sym, noisy.shape[0])
    cost = (((noisy_shared - denoised_sym) ** 2).sum() / denoised_sym.shape[1]) + \
           alpha * normalized_l2norm_cost(logsquared_cost(T.dot(Omega_normal, denoised_sym), nu, axis=0), denoised_sym.shape[1])
    grad = theano.grad(cost, denoised_sym)
    cg_denoising = CG(denoised,
                      cost,
                      grad,
                      denoised_sym,
                      k=noisy.shape[0],
                      n=noisy.shape[1],
                      t_init=1,
                      rho=0.9,
                      max_iter_line_search=125,
                      other_givens={Omega_sym: Omega})
    return cg_denoising, denoised
Esempio n. 4
0
def create_aop_learner(data, lifting=2, nu=1E3, kappa=1 * 1e6, mu=8 * 1E4):
    k = data.shape[0]
    init_Omega = np.float32(rng.randn(k * lifting, k))
    for i in range(0, k * lifting):
        init_Omega[i, :] = init_Omega[i, :] / sqrt((init_Omega[i, :] ** 2).sum())
    Omega = theano.shared(init_Omega)
    Omega_sym = T.matrix("Omega")
    kappa *= 1.0 / log((k ** 2))
    mu = mu * 2.0 / ((lifting * k) ** 2 - lifting * k)

    Omega_normal = normalizing_AOP(Omega_sym, k)
    no_lin_constraint = mu * no_linear_dependencies_constraint(Omega_sym, k * lifting)
    full_rank_constraint_ = kappa * full_rank_constraint(Omega_sym, k * lifting, k)

    cost_Omega_data = normalized_l2norm_cost(logsquared_cost(T.dot(Omega_normal, data), nu, axis=0),
                                             data.shape[1]) + no_lin_constraint + full_rank_constraint_

    grad_Omega_data = T.grad(cost_Omega_data, Omega_sym)

    cg_Omega_data = ObliqueCG(Omega, cost_Omega_data, grad_Omega_data, Omega_sym, k * lifting, k, t_init=1, rho=0.9,
                              max_iter_line_search=125)
    return cg_Omega_data, Omega, Omega_sym