Esempio n. 1
0
def test_blocks_crf_directional():
    # test latent directional CRF on blocks
    # test that all results are the same as equivalent LatentGridCRF
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    pairwise_weights = np.array([0, 0, 0, -4, -4, 0, -4, -4, 0, 0])
    unary_weights = np.repeat(np.eye(2), 2, axis=0)
    w = np.hstack([unary_weights.ravel(), pairwise_weights])
    pw_directional = np.array([
        0, 0, -4, -4, 0, 0, -4, -4, -4, -4, 0, 0, -4, -4, 0, 0, 0, 0, -4, -4,
        0, 0, -4, -4, -4, -4, 0, 0, -4, -4, 0, 0
    ])
    w_directional = np.hstack([unary_weights.ravel(), pw_directional])
    crf = LatentGridCRF(n_states_per_label=2, inference_method='lp')
    crf.initialize(X, Y)
    directional_crf = LatentDirectionalGridCRF(n_states_per_label=2,
                                               inference_method='lp')
    directional_crf.initialize(X, Y)
    h_hat = crf.inference(x, w)
    h_hat_d = directional_crf.inference(x, w_directional)
    assert_array_equal(h_hat, h_hat_d)

    h = crf.latent(x, y, w)
    h_d = directional_crf.latent(x, y, w_directional)
    assert_array_equal(h, h_d)

    h_hat = crf.loss_augmented_inference(x, y, w)
    h_hat_d = directional_crf.loss_augmented_inference(x, y, w_directional)
    assert_array_equal(h_hat, h_hat_d)

    joint_feature = crf.joint_feature(x, h_hat)
    joint_feature_d = directional_crf.joint_feature(x, h_hat)
    assert_array_equal(np.dot(joint_feature, w),
                       np.dot(joint_feature_d, w_directional))
Esempio n. 2
0
def test_binary_blocks_one_slack_graph():
    #testing cutting plane ssvm on easy binary dataset
    # generate graphs explicitly for each example
    print("testing %s" % inference_method)
    X, Y = generate_blocks(n_samples=3)
    crf = GraphCRF(inference_method=inference_method)
    clf = OneSlackSSVM(model=crf, max_iter=100, C=1,
                       check_constraints=True, break_on_bad=True,
                       n_jobs=1, tol=.1)
    x1, x2, x3 = X
    y1, y2, y3 = Y
    n_states = len(np.unique(Y))
    # delete some rows to make it more fun
    x1, y1 = x1[:, :-1], y1[:, :-1]
    x2, y2 = x2[:-1], y2[:-1]
    # generate graphs
    X_ = [x1, x2, x3]
    G = [make_grid_edges(x) for x in X_]

    # reshape / flatten x and y
    X_ = [x.reshape(-1, n_states) for x in X_]
    Y = [y.ravel() for y in [y1, y2, y3]]

    X = zip(X_, G)

    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    for y, y_pred in zip(Y, Y_pred):
        assert_array_equal(y, y_pred)
Esempio n. 3
0
def test_continuous_y():
    for inference_method in get_installed(["lp", "ad3"]):
        X, Y = generate_blocks(n_samples=1)
        x, y = X[0], Y[0]
        w = np.array([1, 0, 0, 1, 0, -4, 0])  # unary  # pairwise

        crf = GridCRF(inference_method=inference_method)
        crf.initialize(X, Y)
        joint_feature = crf.joint_feature(x, y)
        y_cont = np.zeros_like(x)
        gx, gy = np.indices(x.shape[:-1])
        y_cont[gx, gy, y] = 1
        # need to generate edge marginals
        vert = np.dot(y_cont[1:, :, :].reshape(-1, 2).T, y_cont[:-1, :, :].reshape(-1, 2))
        # horizontal edges
        horz = np.dot(y_cont[:, 1:, :].reshape(-1, 2).T, y_cont[:, :-1, :].reshape(-1, 2))
        pw = vert + horz

        joint_feature_cont = crf.joint_feature(x, (y_cont, pw))
        assert_array_almost_equal(joint_feature, joint_feature_cont)

        const = find_constraint(crf, x, y, w, relaxed=False)
        const_cont = find_constraint(crf, x, y, w, relaxed=True)

        # djoint_feature and loss are equal:
        assert_array_almost_equal(const[1], const_cont[1], 4)
        assert_almost_equal(const[2], const_cont[2], 4)

        # returned y_hat is one-hot version of other
        if isinstance(const_cont[0], tuple):
            assert_array_equal(const[0], np.argmax(const_cont[0][0], axis=-1))

            # test loss:
            assert_almost_equal(crf.loss(y, const[0]), crf.continuous_loss(y, const_cont[0][0]), 4)
Esempio n. 4
0
def test_binary_blocks_cutting_plane():
    #testing cutting plane ssvm on easy binary dataset
    # generate graphs explicitly for each example
    for inference_method in get_installed(["dai", "lp", "qpbo", "ad3", 'ogm']):
        print("testing %s" % inference_method)
        X, Y = generate_blocks(n_samples=3)
        crf = GraphCRF(inference_method=inference_method)
        clf = NSlackSSVM(model=crf,
                         max_iter=20,
                         C=100,
                         check_constraints=True,
                         break_on_bad=False,
                         n_jobs=1)
        x1, x2, x3 = X
        y1, y2, y3 = Y
        n_states = len(np.unique(Y))
        # delete some rows to make it more fun
        x1, y1 = x1[:, :-1], y1[:, :-1]
        x2, y2 = x2[:-1], y2[:-1]
        # generate graphs
        X_ = [x1, x2, x3]
        G = [make_grid_edges(x) for x in X_]

        # reshape / flatten x and y
        X_ = [x.reshape(-1, n_states) for x in X_]
        Y = [y.ravel() for y in [y1, y2, y3]]

        X = zip(X_, G)

        clf.fit(X, Y)
        Y_pred = clf.predict(X)
        for y, y_pred in zip(Y, Y_pred):
            assert_array_equal(y, y_pred)
Esempio n. 5
0
def test_binary_blocks():
    X, Y = generate_blocks(n_samples=10)
    crf = GridCRF()
    clf = StructuredPerceptron(model=crf, max_iter=40)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
Esempio n. 6
0
def test_binary_blocks():
    X, Y = generate_blocks(n_samples=10)
    crf = GridCRF()
    clf = StructuredPerceptron(model=crf, max_iter=40)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
Esempio n. 7
0
def test_binary_blocks_one_slack_graph():
    #testing cutting plane ssvm on easy binary dataset
    # generate graphs explicitly for each example
    X, Y = generate_blocks(n_samples=3)
    crf = GraphCRF(inference_method=inference_method)
    clf = OneSlackSSVM(model=crf, max_iter=100, C=1,
                       check_constraints=True, break_on_bad=True,
                       n_jobs=1, tol=.1)
    x1, x2, x3 = X
    y1, y2, y3 = Y
    n_states = len(np.unique(Y))
    # delete some rows to make it more fun
    x1, y1 = x1[:, :-1], y1[:, :-1]
    x2, y2 = x2[:-1], y2[:-1]
    # generate graphs
    X_ = [x1, x2, x3]
    G = [make_grid_edges(x) for x in X_]

    # reshape / flatten x and y
    X_ = [x.reshape(-1, n_states) for x in X_]
    Y = [y.ravel() for y in [y1, y2, y3]]

    X = list(zip(X_, G))

    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    for y, y_pred in zip(Y, Y_pred):
        assert_array_equal(y, y_pred)
Esempio n. 8
0
def test_max_product_binary_blocks():
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    w = np.array([1, 0, 0, 1, 0, -4, 0])  # unary  # pairwise
    crf = GridCRF(inference_method="max-product")
    crf.initialize(X, Y)
    y_hat = crf.inference(x, w)
    assert_array_equal(y, y_hat)
Esempio n. 9
0
def test_binary_blocks_batches_n_slack():
    #testing cutting plane ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=5)
    crf = GridCRF(inference_method=inference_method)
    clf = NSlackSSVM(model=crf, max_iter=20, batch_size=1, C=100)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
Esempio n. 10
0
def test_binary_blocks_crf_n8_lp():
    X, Y = generate_blocks(n_samples=1, noise=1)
    x, y = X[0], Y[0]
    w = np.array([1, 0, 0, 1, 1, -1.4, 1])  # unary  # pairwise
    crf = GridCRF(neighborhood=8)
    crf.initialize(X, Y)
    y_hat = crf.inference(x, w)
    assert_array_equal(y, y_hat)
Esempio n. 11
0
def test_binary_blocks():
    #testing subgradient ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=5)
    crf = GridCRF(inference_method=inference_method)
    clf = SubgradientSSVM(model=crf)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
def test_binary_blocks():
    #testing subgradient ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=5)
    crf = GridCRF(inference_method=inference_method)
    clf = SubgradientSSVM(model=crf)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
Esempio n. 13
0
def test_binary_blocks_cutting_plane():
    #testing cutting plane ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=5)
    crf = GridCRF(inference_method=inference_method)
    clf = NSlackSSVM(model=crf, max_iter=20, C=100,
                     check_constraints=True, break_on_bad=False)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
def test_binary_blocks_perceptron_online():
    #testing subgradient ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=10)
    inference_method = get_installed(['qpbo', 'ad3', 'lp'])[0]
    crf = GridCRF(inference_method=inference_method)
    clf = StructuredPerceptron(model=crf, max_iter=20)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
Esempio n. 15
0
def test_binary_blocks():
    #testing subgradient ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=5)
    crf = GridCRF(inference_method=inference_method)
    clf = SubgradientSSVM(model=crf, C=100, learning_rate=1, decay_exponent=1,
                          momentum=0, decay_t0=10)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
Esempio n. 16
0
def test_blocks_crf_unaries():
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    unary_weights = np.repeat(np.eye(2), 2, axis=0)
    pairwise_weights = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
    w = np.hstack([unary_weights.ravel(), pairwise_weights])
    crf = LatentGridCRF(n_states_per_label=2, n_labels=2, n_features=2)
    h_hat = crf.inference(x, w)
    assert_array_equal(h_hat / 2, np.argmax(x, axis=-1))
Esempio n. 17
0
def test_loss_augmentation():
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    w = np.array([1, 0, 0, 1, 0, -4, 0])  # unary  # pairwise
    crf = GridCRF()
    crf.initialize(X, Y)
    y_hat, energy = crf.loss_augmented_inference(x, y, w, return_energy=True)

    assert_almost_equal(energy + crf.loss(y, y_hat), -np.dot(w, crf.joint_feature(x, y_hat)))
Esempio n. 18
0
def test_binary_blocks_crf():
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    w = np.array([1, 0, 0, 1, 0, -4, 0])  # unary  # pairwise
    for inference_method in get_installed(["dai", "qpbo", "lp", "ad3", "ogm"]):
        crf = GridCRF(inference_method=inference_method)
        crf.initialize(X, Y)
        y_hat = crf.inference(x, w)
        assert_array_equal(y, y_hat)
Esempio n. 19
0
def test_binary_ssvm_attractive_potentials():
    # test that submodular SSVM can learn the block dataset
    X, Y = generate_blocks(n_samples=10)
    crf = GridCRF(inference_method=inference_method)
    submodular_clf = NSlackSSVM(model=crf, max_iter=200, C=100,
                                check_constraints=True,
                                negativity_constraint=[5])
    submodular_clf.fit(X, Y)
    Y_pred = submodular_clf.predict(X)
    assert_array_equal(Y, Y_pred)
    assert_true(submodular_clf.w[5] < 0)
Esempio n. 20
0
def test_binary_blocks_crf_n8_lp():
    X, Y = generate_blocks(n_samples=1, noise=1)
    x, y = X[0], Y[0]
    w = np.array([1, 0,  # unary
                  0, 1,
                  1,     # pairwise
                  -1.4, 1])
    crf = GridCRF(neighborhood=8)
    crf.initialize(X, Y)
    y_hat = crf.inference(x, w)
    assert_array_equal(y, y_hat)
Esempio n. 21
0
def test_binary_blocks_crf():
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    w = np.array([1, 0,  # unary
                  0, 1,
                  0,     # pairwise
                  -4, 0])
    for inference_method in get_installed(['dai', 'qpbo', 'lp', 'ad3', 'ogm']):
        crf = GridCRF(inference_method=inference_method)
        crf.initialize(X, Y)
        y_hat = crf.inference(x, w)
        assert_array_equal(y, y_hat)
Esempio n. 22
0
def test_blocks_crf():
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    pairwise_weights = np.array([0, 0, 0, -4, -4, 0, -4, -4, 0, 0])
    unary_weights = np.repeat(np.eye(2), 2, axis=0)
    w = np.hstack([unary_weights.ravel(), pairwise_weights])
    crf = LatentGridCRF(n_states_per_label=2, n_labels=2, n_features=2)
    h_hat = crf.inference(x, w)
    assert_array_equal(y, h_hat / 2)

    h = crf.latent(x, y, w)
    assert_equal(crf.loss(h, h_hat), 0)
Esempio n. 23
0
def test_binary_blocks_crf():
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    w = np.array([1, 0,  # unary
                  0, 1,
                  0,     # pairwise
                  -4, 0])
    for inference_method in get_installed(['dai', 'qpbo', 'lp', 'ad3', 'ogm']):
        crf = GridCRF(inference_method=inference_method)
        crf.initialize(X, Y)
        y_hat = crf.inference(x, w)
        assert_array_equal(y, y_hat)
Esempio n. 24
0
def test_blocks_crf_unaries():
    X, Y = generate_blocks(n_samples=1)
    x, _ = X[0], Y[0]
    unary_weights = np.repeat(np.eye(2), 2, axis=0)
    pairwise_weights = np.array([0,
                                 0, 0,
                                 0, 0, 0,
                                 0, 0, 0, 0])
    w = np.hstack([unary_weights.ravel(), pairwise_weights])
    crf = LatentGridCRF(n_states_per_label=2, n_labels=2, n_features=2)
    h_hat = crf.inference(x, w)
    assert_array_equal(h_hat // 2, np.argmax(x, axis=-1))
Esempio n. 25
0
def test_loss_augmentation():
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    w = np.array([1, 0,  # unary
                  0, 1,
                  0,     # pairwise
                  -4, 0])
    crf = GridCRF()
    crf.initialize(X, Y)
    y_hat, energy = crf.loss_augmented_inference(x, y, w, return_energy=True)

    assert_almost_equal(energy + crf.loss(y, y_hat),
                        -np.dot(w, crf.psi(x, y_hat)))
Esempio n. 26
0
def test_binary_blocks():
    #testing subgradient ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=5)
    crf = GridCRF(inference_method=inference_method)
    clf = SubgradientSSVM(model=crf,
                          C=100,
                          learning_rate=1,
                          decay_exponent=1,
                          momentum=0,
                          decay_t0=10)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
def test_binary_ssvm_attractive_potentials_edgefeaturegraph(inference_method="qpbo"):
    X, Y = generate_blocks(n_samples=10)
    crf = GridCRF(inference_method=inference_method)

    #######

    # convert X,Y to EdgeFeatureGraphCRF instances
    crf_edge = EdgeFeatureGraphCRF(inference_method=inference_method,
                                   symmetric_edge_features=[0]
                                    )
    X_edge = []
    Y_edge = []
    for i in range(X.shape[0]):
        unaries = X[i].reshape((-1, 2))
        edges = crf._get_edges(X[i])
        edge_feats = np.ones((edges.shape[0], 1))
        X_edge.append((unaries, edges, edge_feats))
        Y_edge.append((Y[i].reshape((-1,))))

    submodular_clf_edge = SubgradientSSVM(model=crf_edge, max_iter=100, C=1,
                                verbose=1,
                                zero_constraint=[4,7],
                                negativity_constraint=[5,6],
                                )

    # fit the model with non-negativity constraint on the off-diagonal potential
    submodular_clf_edge.fit(X_edge, Y_edge)

    assert submodular_clf_edge.w[5] == submodular_clf_edge.w[6] # symmetry constraint on edge features

    # # # bias doesn't matter
    # submodular_clf_edge.w += 10*np.ones(submodular_clf_edge.w.shape)
    # print len(submodular_clf_edge.w), submodular_clf_edge.w

    Y_pred = submodular_clf_edge.predict(X_edge)

    assert_array_equal(Y_edge, Y_pred)

    # try to fit the model with non-negativity constraint on the off-diagonal potential, this time
    # with inverted sign on the edge features
    X_edge_neg = [ (x[0], x[1], -x[2]) for x in X_edge ]
    submodular_clf_edge = SubgradientSSVM(model=crf_edge, max_iter=100, C=1,
                                verbose=1,
                                zero_constraint=[4,7],
                                negativity_constraint=[5,6],
                                )
    submodular_clf_edge.fit(X_edge_neg, Y_edge)
    Y_pred = submodular_clf_edge.predict(X_edge_neg)

    assert_array_equal(Y_edge, Y_pred)
Esempio n. 28
0
def test_blocks_crf():
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    pairwise_weights = np.array([0,
                                 0,  0,
                                -4, -4, 0,
                                -4, -4, 0, 0])
    unary_weights = np.repeat(np.eye(2), 2, axis=0)
    w = np.hstack([unary_weights.ravel(), pairwise_weights])
    crf = LatentGridCRF(n_states_per_label=2, n_labels=2, n_features=2)
    h_hat = crf.inference(x, w)
    assert_array_equal(y, h_hat // 2)

    h = crf.latent(x, y, w)
    assert_equal(crf.loss(h, h_hat), 0)
def test_binary_blocks_cutting_plane_latent_node():
    #testing cutting plane ssvm on easy binary dataset
    # we use the LatentNodeCRF without latent nodes and check that it does the
    # same as GraphCRF
    X, Y = generate_blocks(n_samples=3)
    crf = GraphCRF()
    clf = NSlackSSVM(model=crf,
                     max_iter=20,
                     C=100,
                     check_constraints=True,
                     break_on_bad=False,
                     n_jobs=1)
    x1, x2, x3 = X
    y1, y2, y3 = Y
    n_states = len(np.unique(Y))
    # delete some rows to make it more fun
    x1, y1 = x1[:, :-1], y1[:, :-1]
    x2, y2 = x2[:-1], y2[:-1]
    # generate graphs
    X_ = [x1, x2, x3]
    G = [make_grid_edges(x) for x in X_]

    # reshape / flatten x and y
    X_ = [x.reshape(-1, n_states) for x in X_]
    Y = [y.ravel() for y in [y1, y2, y3]]

    X = zip(X_, G)

    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    for y, y_pred in zip(Y, Y_pred):
        assert_array_equal(y, y_pred)

    latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=0)
    latent_svm = LatentSSVM(NSlackSSVM(model=latent_crf,
                                       max_iter=20,
                                       C=100,
                                       check_constraints=True,
                                       break_on_bad=False,
                                       n_jobs=1),
                            latent_iter=3)
    X_latent = zip(X_, G, np.zeros(len(X_)))
    latent_svm.fit(X_latent, Y, H_init=Y)
    Y_pred = latent_svm.predict(X_latent)
    for y, y_pred in zip(Y, Y_pred):
        assert_array_equal(y, y_pred)

    assert_array_almost_equal(latent_svm.w, clf.w)
Esempio n. 30
0
def test_max_product_binary_blocks():
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    w = np.array([
        1,
        0,  # unary
        0,
        1,
        0,  # pairwise
        -4,
        0
    ])
    crf = GridCRF(inference_method='max-product')
    crf.initialize(X, Y)
    y_hat = crf.inference(x, w)
    assert_array_equal(y, y_hat)
Esempio n. 31
0
def test_continuous_y():
    for inference_method in get_installed(["lp", "ad3"]):
        X, Y = generate_blocks(n_samples=1)
        x, y = X[0], Y[0]
        w = np.array([
            1,
            0,  # unary
            0,
            1,
            0,  # pairwise
            -4,
            0
        ])

        crf = LatentGridCRF(n_labels=2,
                            n_features=2,
                            n_states_per_label=1,
                            inference_method=inference_method)
        joint_feature = crf.joint_feature(x, y)
        y_cont = np.zeros_like(x)
        gx, gy = np.indices(x.shape[:-1])
        y_cont[gx, gy, y] = 1
        # need to generate edge marginals
        vert = np.dot(y_cont[1:, :, :].reshape(-1, 2).T,
                      y_cont[:-1, :, :].reshape(-1, 2))
        # horizontal edges
        horz = np.dot(y_cont[:, 1:, :].reshape(-1, 2).T,
                      y_cont[:, :-1, :].reshape(-1, 2))
        pw = vert + horz

        joint_feature_cont = crf.joint_feature(x, (y_cont, pw))
        assert_array_almost_equal(joint_feature, joint_feature_cont, 4)

        const = find_constraint(crf, x, y, w, relaxed=False)
        const_cont = find_constraint(crf, x, y, w, relaxed=True)

        # djoint_feature and loss are equal:
        assert_array_almost_equal(const[1], const_cont[1], 4)
        assert_almost_equal(const[2], const_cont[2], 4)

        if isinstance(const_cont[0], tuple):
            # returned y_hat is one-hot version of other
            assert_array_equal(const[0], np.argmax(const_cont[0][0], axis=-1))

            # test loss:
            assert_almost_equal(crf.loss(y, const[0]),
                                crf.continuous_loss(y, const_cont[0][0]), 4)
def test_binary_blocks_cutting_plane_latent_node():
    #testing cutting plane ssvm on easy binary dataset
    # we use the LatentNodeCRF without latent nodes and check that it does the
    # same as GraphCRF
    X, Y = generate_blocks(n_samples=3)
    crf = GraphCRF()
    clf = NSlackSSVM(model=crf, max_iter=20, C=100, check_constraints=True,
                     break_on_bad=False, n_jobs=1)
    x1, x2, x3 = X
    y1, y2, y3 = Y
    n_states = len(np.unique(Y))
    # delete some rows to make it more fun
    x1, y1 = x1[:, :-1], y1[:, :-1]
    x2, y2 = x2[:-1], y2[:-1]
    # generate graphs
    X_ = [x1, x2, x3]
    G = [make_grid_edges(x) for x in X_]

    # reshape / flatten x and y
    X_ = [x.reshape(-1, n_states) for x in X_]
    Y = [y.ravel() for y in [y1, y2, y3]]

    X = zip(X_, G)

    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    for y, y_pred in zip(Y, Y_pred):
        assert_array_equal(y, y_pred)

    latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=0)
    latent_svm = LatentSSVM(NSlackSSVM(model=latent_crf, max_iter=20, C=100,
                                       check_constraints=True,
                                       break_on_bad=False, n_jobs=1),
                            latent_iter=3)
    X_latent = zip(X_, G, np.zeros(len(X_)))
    latent_svm.fit(X_latent, Y, H_init=Y)
    Y_pred = latent_svm.predict(X_latent)
    for y, y_pred in zip(Y, Y_pred):
        assert_array_equal(y, y_pred)

    assert_array_almost_equal(latent_svm.w, clf.w)
Esempio n. 33
0
def test_continuous_y():
    for inference_method in get_installed(["lp", "ad3"]):
        X, Y = generate_blocks(n_samples=1)
        x, y = X[0], Y[0]
        w = np.array([1, 0,  # unary
                      0, 1,
                      0,     # pairwise
                      -4, 0])

        crf = LatentGridCRF(n_labels=2, n_features=2, n_states_per_label=1,
                            inference_method=inference_method)
        psi = crf.psi(x, y)
        y_cont = np.zeros_like(x)
        gx, gy = np.indices(x.shape[:-1])
        y_cont[gx, gy, y] = 1
        # need to generate edge marginals
        vert = np.dot(y_cont[1:, :, :].reshape(-1, 2).T,
                      y_cont[:-1, :, :].reshape(-1, 2))
        # horizontal edges
        horz = np.dot(y_cont[:, 1:, :].reshape(-1, 2).T,
                      y_cont[:, :-1, :].reshape(-1, 2))
        pw = vert + horz

        psi_cont = crf.psi(x, (y_cont, pw))
        assert_array_almost_equal(psi, psi_cont)

        const = find_constraint(crf, x, y, w, relaxed=False)
        const_cont = find_constraint(crf, x, y, w, relaxed=True)

        # dpsi and loss are equal:
        assert_array_almost_equal(const[1], const_cont[1])
        assert_almost_equal(const[2], const_cont[2])

        if isinstance(const_cont[0], tuple):
            # returned y_hat is one-hot version of other
            assert_array_equal(const[0], np.argmax(const_cont[0][0], axis=-1))

            # test loss:
            assert_almost_equal(crf.loss(y, const[0]),
                                crf.continuous_loss(y, const_cont[0][0]))
Esempio n. 34
0
def test_blocks_crf_directional():
    # test latent directional CRF on blocks
    # test that all results are the same as equivalent LatentGridCRF
    X, Y = generate_blocks(n_samples=1)
    x, y = X[0], Y[0]
    pairwise_weights = np.array([0,
                                 0,   0,
                                -4, -4,  0,
                                -4, -4,  0, 0])
    unary_weights = np.repeat(np.eye(2), 2, axis=0)
    w = np.hstack([unary_weights.ravel(), pairwise_weights])
    pw_directional = np.array([0,   0, -4, -4,
                               0,   0, -4, -4,
                               -4, -4,  0,  0,
                               -4, -4,  0,  0,
                               0,   0, -4, -4,
                               0,   0, -4, -4,
                               -4, -4,  0,  0,
                               -4, -4,  0,  0])
    w_directional = np.hstack([unary_weights.ravel(), pw_directional])
    crf = LatentGridCRF(n_states_per_label=2, inference_method='lp')
    crf.initialize(X, Y)
    directional_crf = LatentDirectionalGridCRF(n_states_per_label=2,
                                               inference_method='lp')
    directional_crf.initialize(X, Y)
    h_hat = crf.inference(x, w)
    h_hat_d = directional_crf.inference(x, w_directional)
    assert_array_equal(h_hat, h_hat_d)

    h = crf.latent(x, y, w)
    h_d = directional_crf.latent(x, y, w_directional)
    assert_array_equal(h, h_d)

    h_hat = crf.loss_augmented_inference(x, y, w)
    h_hat_d = directional_crf.loss_augmented_inference(x, y, w_directional)
    assert_array_equal(h_hat, h_hat_d)

    psi = crf.psi(x, h_hat)
    psi_d = directional_crf.psi(x, h_hat)
    assert_array_equal(np.dot(psi, w), np.dot(psi_d, w_directional))
Esempio n. 35
0
def test_continuous_y():
    for inference_method in get_installed(["lp", "ad3"]):
        X, Y = generate_blocks(n_samples=1)
        x, y = X[0], Y[0]
        w = np.array([1, 0,  # unary
                      0, 1,
                      0,     # pairwise
                      -4, 0])

        crf = GridCRF(inference_method=inference_method)
        crf.initialize(X, Y)
        psi = crf.psi(x, y)
        y_cont = np.zeros_like(x)
        gx, gy = np.indices(x.shape[:-1])
        y_cont[gx, gy, y] = 1
        # need to generate edge marginals
        vert = np.dot(y_cont[1:, :, :].reshape(-1, 2).T,
                      y_cont[:-1, :, :].reshape(-1, 2))
        # horizontal edges
        horz = np.dot(y_cont[:, 1:, :].reshape(-1, 2).T,
                      y_cont[:, :-1, :].reshape(-1, 2))
        pw = vert + horz

        psi_cont = crf.psi(x, (y_cont, pw))
        assert_array_almost_equal(psi, psi_cont)

        const = find_constraint(crf, x, y, w, relaxed=False)
        const_cont = find_constraint(crf, x, y, w, relaxed=True)

        # dpsi and loss are equal:
        assert_array_almost_equal(const[1], const_cont[1])
        assert_almost_equal(const[2], const_cont[2])

        # returned y_hat is one-hot version of other
        if isinstance(const_cont[0], tuple):
            assert_array_equal(const[0], np.argmax(const_cont[0][0], axis=-1))

            # test loss:
            assert_almost_equal(crf.loss(y, const[0]),
                                crf.continuous_loss(y, const_cont[0][0]))