Exemple #1
0
def test_initial_value_shape():
    shapedAns = answer.reshape(-1, 1)
    [x, status, hist] = l1ls(A, y, lmbda, x0=shapedAns, tar_gap=rel_tol)
    assert_allclose(x, shapedAns, atol=1e-5)
    expect(x.shape).to_equal(shapedAns.shape)
    expect(hist.shape[0]).to_equal(1)
    expect(status).to_equal('Solved')
Exemple #2
0
def RSC_identif(train_set, test_image, mean, reductor, dico_norm, nb_classes):
    """ Perform the identification of a test_image thanks to the adapted RSC algorithm """
    e = np.array((test_image - mean).astype(float))
    norm_y = norm_column(test_image)
    test_normalized = normalize_column(test_image)
    for j in range(NB_ITER):
        delta = f_delta(e)
        mu = PARAM_C / delta
        before_exp = mu * (e ** 2 - delta)
        todiag = to_diag(before_exp)

        w_train = normalize_matrix(train_set * todiag[:, np.newaxis])
        w_test = normalize_column(todiag * test_image)

        w_train_red = dim_reduct(w_train, reductor, DIM_REDUCTION)
        w_test_red = dim_reduct(w_test, reductor, DIM_REDUCTION)
        D = normalize_matrix(w_train_red)
        y = normalize_column(w_test_red)

        if REG_METHOD == 'l1':
            [x, status, hist] = L.l1ls(D, y, LAMBDA, quiet=True)
        else:
            x = l2_ls(D, y, LAMBDA)

        e = norm_y * (test_normalized - dico_norm.dot(x))

    return classif(D, y, x, TRAINING_FACES, nb_classes)
Exemple #3
0
def RSC_identif(train_set, test_image, mean, reductor, dico_norm, nb_classes):
    """ Perform the identification of a test_image thanks to the adapted RSC algorithm """
    e = np.array((test_image - mean).astype(float))
    norm_y = norm_column(test_image)
    test_normalized = normalize_column(test_image)
    for j in range(NB_ITER):
        delta = f_delta(e)
        mu = PARAM_C / delta
        before_exp = mu * (e**2 - delta)
        todiag = to_diag(before_exp)

        w_train = normalize_matrix(train_set * todiag[:, np.newaxis])
        w_test = normalize_column(todiag * test_image)

        w_train_red = dim_reduct(w_train, reductor, DIM_REDUCTION)
        w_test_red = dim_reduct(w_test, reductor, DIM_REDUCTION)
        D = normalize_matrix(w_train_red)
        y = normalize_column(w_test_red)

        if REG_METHOD == 'l1':
            [x, status, hist] = L.l1ls(D, y, LAMBDA, quiet=True)
        else:
            x = l2_ls(D, y, LAMBDA)

        e = norm_y * (test_normalized - dico_norm.dot(x))

    return classif(D, y, x, TRAINING_FACES, nb_classes)
def test_initial_value_shape():
    shapedAns = answer.reshape(-1, 1)
    [x, status, hist] = l1ls(A, y, lmbda, x0=shapedAns, tar_gap=rel_tol)
    assert_allclose(x, shapedAns, atol=1e-5)
    expect(x.shape).to_equal(shapedAns.shape)
    expect(hist.shape[0]).to_equal(1)
    expect(status).to_equal("Solved")
Exemple #5
0
def sift_identif(sift, dico):
    print "sift"
    e = np.array((sift - mean).astype(float))
    norm_y = normColumn(sift)
    NTest = normalizeColumn(sift)
    for j in range(nbIter):
        delta = fdelta(e)
        mu = param_c / delta
        before_exp = mu * (e**2 - delta)
        todiag = toDiag(before_exp)

        WTrain = normalizeMatrix(dico * todiag[:, np.newaxis])
        WTest = normalizeColumn(todiag * sift)

        WTrainRed = dimReduct(WTrain, reductor)
        WTestRed = dimReduct(WTest, reductor)
        D = normalizeMatrix(WTrainRed)
        y = normalizeColumn(WTestRed)

        [x, status, hist] = L.l1ls(D, y, lmbda, quiet=True)
        #x = l2_ls(D, y, lmbda)

        if j == 0:
            alpha = x
        else:
            # eta=find_eta(y,D,alpha,x,mu,delta,nbDim)
            alpha = alpha + eta * (x - alpha)

        if not (silence):
            #debug_alpha(alpha)
            pass

        e = norm_y * (NTest - dico_norm.dot(alpha))
    cl = classif(D, y, alpha, nbLabels)
    print cl
    return cl
Exemple #6
0
def test_initial_value():
    [x, status, hist] = l1ls(A, y, lmbda, x0=answer, tar_gap=rel_tol)
    assert_allclose(x, answer, atol=1e-5)
    expect(x.shape).to_equal(answer.shape)
    expect(hist.shape[0]).to_equal(1)
    expect(status).to_equal('Solved')
Exemple #7
0
def test_high_accuracy():
    [x, status, hist] = l1ls(A, y, lmbda, tar_gap=rel_tol / 10)
    assert_allclose(x, answer_high_accuracy, atol=1e-5)
    expect(hist.shape).to_equal((16, 5))
    expect(status).to_equal('Solved')
Exemple #8
0
def test_small_example_sparse():
    [x, status, hist] = l1ls(S.csr_matrix(A), y, lmbda, tar_gap=rel_tol)
    assert_allclose(x, answer, atol=1e-5)
    expect(hist.shape).to_equal((12, 5))
    expect(status).to_equal('Solved')
Exemple #9
0
def test_small_example_zero():
    [x, status, hist] = l1ls(A, np.zeros_like(y), lmbda, tar_gap=rel_tol)
    expect(status).to_equal('Failed')
Exemple #10
0
def test_shaped_y():
    # Shape of 'y' does not make a difference
    [x, status, hist] = l1ls(A, y.reshape(-1, 1), lmbda, tar_gap=rel_tol)
    expect(x.shape).to_equal((A.shape[1], ))
    expect(status).to_equal('Solved')
def test_initial_value():
    [x, status, hist] = l1ls(A, y, lmbda, x0=answer, tar_gap=rel_tol)
    assert_allclose(x, answer, atol=1e-5)
    expect(x.shape).to_equal(answer.shape)
    expect(hist.shape[0]).to_equal(1)
    expect(status).to_equal("Solved")
def test_high_accuracy():
    [x, status, hist] = l1ls(A, y, lmbda, tar_gap=rel_tol / 10)
    assert_allclose(x, answer_high_accuracy, atol=1e-5)
    expect(hist.shape).to_equal((16, 5))
    expect(status).to_equal("Solved")
def test_small_example_sparse():
    [x, status, hist] = l1ls(S.csr_matrix(A), y, lmbda, tar_gap=rel_tol)
    assert_allclose(x, answer, atol=1e-5)
    expect(hist.shape).to_equal((12, 5))
    expect(status).to_equal("Solved")
def test_small_example_zero():
    [x, status, hist] = l1ls(A, np.zeros_like(y), lmbda, tar_gap=rel_tol)
    expect(status).to_equal("Failed")
def test_shaped_y():
    # Shape of 'y' does not make a difference
    [x, status, hist] = l1ls(A, y.reshape(-1, 1), lmbda, tar_gap=rel_tol)
    expect(x.shape).to_equal((A.shape[1],))
    expect(status).to_equal("Solved")
Exemple #16
0
# Reconstruction with L2 (Ridge) penalization
rgr_ridge = Ridge(alpha=0.2)
start1 = timeit.default_timer()
rgr_ridge.fit(proj_operator, proj.ravel())
rec_l2 = rgr_ridge.coef_.reshape(l, l)
stop1 = timeit.default_timer()
print(stop1 - start1)

print('rec', rec_l2.shape)

# Try l1ls
lmbda = 0.1
rel_tol = 0.05
start = timeit.default_timer()
[x, status, hist] = L.l1ls(proj_operator, proj.ravel(), lmbda, tar_gap=rel_tol)
stop = timeit.default_timer()
print(stop - start)
xzzz = x.reshape(l, l)
print(x.shape)

# Hyper Para Tuning
lmbda = 0.01
rel_tol = 0.05
start = timeit.default_timer()
[x2, status2, hist2] = L.l1ls(proj_operator,
                              proj.ravel(),
                              lmbda,
                              tar_gap=rel_tol)
stop = timeit.default_timer()
print(stop - start)