Ejemplo n.º 1
0
def test_multinomial_blocks_subgradient_batch():
    #testing cutting plane ssvm on easy multinomial dataset
    X, Y = generate_blocks_multinomial(n_samples=10, noise=0.6, seed=1)
    n_labels = len(np.unique(Y))
    crf = GridCRF(n_states=n_labels, inference_method=inference_method)
    clf = SubgradientSSVM(model=crf, max_iter=100, batch_size=-1)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)

    clf2 = SubgradientSSVM(model=crf, max_iter=100, batch_size=len(X))
    clf2.fit(X, Y)
    Y_pred2 = clf2.predict(X)
    assert_array_equal(Y, Y_pred2)
Ejemplo n.º 2
0
def test_latent_node_boxes_standard_latent():
    # learn the "easy" 2x2 boxes dataset.
    # a 2x2 box is placed randomly in a 4x4 grid
    # we add a latent variable for each 2x2 patch
    # that should make the model fairly simple

    X, Y = make_simple_2x2(seed=1, n_samples=40)
    latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1)
    one_slack = OneSlackSSVM(latent_crf)
    n_slack = NSlackSSVM(latent_crf)
    subgradient = SubgradientSSVM(latent_crf, max_iter=100)
    for base_svm in [one_slack, n_slack, subgradient]:
        base_svm.C = 10
        latent_svm = LatentSSVM(base_svm, latent_iter=10)

        G = [make_grid_edges(x) for x in X]

        # make edges for hidden states:
        edges = make_edges_2x2()

        G = [np.vstack([make_grid_edges(x), edges]) for x in X]

        # reshape / flatten x and y
        X_flat = [x.reshape(-1, 1) for x in X]
        Y_flat = [y.ravel() for y in Y]

        X_ = zip(X_flat, G, [2 * 2 for x in X_flat])
        latent_svm.fit(X_[:20], Y_flat[:20])

        assert_array_equal(latent_svm.predict(X_[:20]), Y_flat[:20])
        assert_equal(latent_svm.score(X_[:20], Y_flat[:20]), 1)

        # test that score is not always 1
        assert_true(.98 < latent_svm.score(X_[20:], Y_flat[20:]) < 1)
Ejemplo n.º 3
0
def test_binary_blocks():
    #testing subgradient ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=5)
    crf = GridCRF(inference_method=inference_method)
    clf = SubgradientSSVM(model=crf)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
Ejemplo n.º 4
0
 def fresh_train(self, x, y, iterations=10):
     self.model = EdgeFeatureGraphCRF(inference_method="max-product")
     self.learner = SubgradientSSVM(
         model=self.model,
         max_iter=iterations,
         logger=SaveLogger(model_file.format(self.userId + "-learner")))
     self.learner.fit(x, y, warm_start=False)
     self.save()
Ejemplo n.º 5
0
def test_multinomial_checker_subgradient():
    X, Y = generate_checker_multinomial(n_samples=10, noise=0.4)
    n_labels = len(np.unique(Y))
    crf = GridCRF(n_states=n_labels, inference_method=inference_method)
    clf = SubgradientSSVM(model=crf, max_iter=50)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
Ejemplo n.º 6
0
 def fresh_train(self, x, y, iterations=10):
     self.model = ChainCRF(inference_method="max-product")
     self.learner = SubgradientSSVM(
         model=self.model,
         max_iter=iterations,
         logger=SaveLogger(
             MODEL_PATH_TEMPLATE.format(self.userId + "-learner")),
         show_loss_every=50)
     self.learner.fit(x, y, warm_start=False)
     self.save()
Ejemplo n.º 7
0
def test_blobs_2d_subgradient():
    # make two gaussian blobs
    X, Y = make_blobs(n_samples=80, centers=3, random_state=42)
    # we have to add a constant 1 feature by hand :-/
    X = np.hstack([X, np.ones((X.shape[0], 1))])
    X_train, X_test, Y_train, Y_test = X[:40], X[40:], Y[:40], Y[40:]

    pbl = MultiClassClf(n_features=3, n_classes=3)
    svm = SubgradientSSVM(pbl, C=1000)

    svm.fit(X_train, Y_train)
    assert_array_equal(Y_test, np.hstack(svm.predict(X_test)))
Ejemplo n.º 8
0
def test_binary_blocks():
    #testing subgradient ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=5)
    crf = GridCRF(inference_method=inference_method)
    clf = SubgradientSSVM(model=crf,
                          C=100,
                          learning_rate=1,
                          decay_exponent=1,
                          momentum=0,
                          decay_t0=10)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
Ejemplo n.º 9
0
def test_multinomial_blocks_subgradient():
    #testing cutting plane ssvm on easy multinomial dataset
    X, Y = generate_blocks_multinomial(n_samples=10, noise=0.3, seed=1)
    n_labels = len(np.unique(Y))
    crf = GridCRF(n_states=n_labels, inference_method=inference_method)
    clf = SubgradientSSVM(model=crf,
                          max_iter=50,
                          C=10,
                          momentum=.98,
                          learning_rate=0.001)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
Ejemplo n.º 10
0
def test_subgradient_svm_as_crf_pickling():

    iris = load_iris()
    X, y = iris.data, iris.target

    X_ = [(np.atleast_2d(x), np.empty((0, 2), dtype=np.int)) for x in X]
    Y = y.reshape(-1, 1)

    X_train, X_test, y_train, y_test = train_test_split(X_, Y, random_state=1)
    _, file_name = mkstemp()

    pbl = GraphCRF(n_features=4, n_states=3, inference_method='unary')
    logger = SaveLogger(file_name)
    svm = SubgradientSSVM(pbl, logger=logger, max_iter=100)
    svm.fit(X_train, y_train)

    assert_less(.97, svm.score(X_test, y_test))
    assert_less(.97, logger.load().score(X_test, y_test))
def test_latent_node_boxes_standard_latent_features():
    # learn the "easy" 2x2 boxes dataset.
    # we make it even easier now by adding features that encode the correct
    # latent state. This basically tests that the features are actually used

    X, Y = make_simple_2x2(seed=1, n_samples=20, n_flips=6)
    latent_crf = LatentNodeCRF(n_labels=2,
                               n_hidden_states=2,
                               n_features=1,
                               latent_node_features=True)
    one_slack = OneSlackSSVM(latent_crf)
    n_slack = NSlackSSVM(latent_crf)
    subgradient = SubgradientSSVM(latent_crf,
                                  max_iter=100,
                                  learning_rate=0.01,
                                  momentum=0)
    for base_svm in [one_slack, n_slack, subgradient]:
        base_svm.C = 10
        latent_svm = LatentSSVM(base_svm, latent_iter=10)

        G = [make_grid_edges(x) for x in X]

        # make edges for hidden states:
        edges = make_edges_2x2()

        G = [np.vstack([make_grid_edges(x), edges]) for x in X]

        # reshape / flatten x and y
        X_flat = [x.reshape(-1, 1) for x in X]
        # augment X with the features for hidden units
        X_flat = [
            np.vstack([x, y[::2, ::2].reshape(-1, 1)])
            for x, y in zip(X_flat, Y)
        ]
        Y_flat = [y.ravel() for y in Y]

        X_ = zip(X_flat, G, [2 * 2 for x in X_flat])
        latent_svm.fit(X_[:10], Y_flat[:10])

        assert_array_equal(latent_svm.predict(X_[:10]), Y_flat[:10])
        assert_equal(latent_svm.score(X_[:10], Y_flat[:10]), 1)

        # we actually become prefect ^^
        assert_true(.98 < latent_svm.score(X_[10:], Y_flat[10:]) <= 1)
Ejemplo n.º 12
0
def test_with_crosses_base_svms():
    # very simple dataset. k-means init is perfect
    n_labels = 2
    crf = LatentGridCRF(n_labels=n_labels, n_states_per_label=[1, 2])
    one_slack = OneSlackSSVM(crf, inference_cache=50)
    n_slack = NSlackSSVM(crf)
    subgradient = SubgradientSSVM(crf,
                                  max_iter=400,
                                  learning_rate=.01,
                                  decay_exponent=0,
                                  decay_t0=10)

    X, Y = generate_crosses(n_samples=10, noise=5, n_crosses=1, total_size=8)

    for base_ssvm in [one_slack, n_slack, subgradient]:
        base_ssvm.C = 100.
        clf = LatentSSVM(base_ssvm=base_ssvm)
        clf.fit(X, Y)
        Y_pred = clf.predict(X)
        assert_array_equal(np.array(Y_pred), Y)
        assert_equal(clf.score(X, Y), 1)
Ejemplo n.º 13
0
def test_ssvm_objectives():
    # test that the algorithms provide consistent objective curves.
    # this is not that strong a test now but at least makes sure that
    # the objective function is called.
    X, Y = generate_blocks_multinomial(n_samples=10, noise=1.5, seed=0)
    n_labels = len(np.unique(Y))
    crf = GridCRF(n_states=n_labels, inference_method=inference_method)
    # once for n-slack
    clf = NSlackSSVM(model=crf, max_iter=5, C=1, tol=.1)
    clf.fit(X, Y)
    primal_objective = objective_primal(clf.model, clf.w, X, Y, clf.C)
    assert_almost_equal(clf.primal_objective_curve_[-1], primal_objective)

    # once for one-slack
    clf = OneSlackSSVM(model=crf, max_iter=5, C=1, tol=.1)
    clf.fit(X, Y)
    primal_objective = objective_primal(clf.model,
                                        clf.w,
                                        X,
                                        Y,
                                        clf.C,
                                        variant='one_slack')
    assert_almost_equal(clf.primal_objective_curve_[-1], primal_objective)

    # now subgradient. Should also work in batch-mode.
    clf = SubgradientSSVM(model=crf, max_iter=5, C=1, batch_size=-1)
    clf.fit(X, Y)
    primal_objective = objective_primal(clf.model, clf.w, X, Y, clf.C)
    assert_almost_equal(clf.objective_curve_[-1], primal_objective)

    # frank wolfe
    clf = FrankWolfeSSVM(model=crf, max_iter=5, C=1, batch_mode=True)
    clf.fit(X, Y)
    primal_objective = objective_primal(clf.model, clf.w, X, Y, clf.C)
    assert_almost_equal(clf.primal_objective_curve_[-1], primal_objective)
    # block-coordinate Frank-Wolfe
    clf = FrankWolfeSSVM(model=crf, max_iter=5, C=1, batch_mode=False)
    clf.fit(X, Y)
    primal_objective = objective_primal(clf.model, clf.w, X, Y, clf.C)
    assert_almost_equal(clf.primal_objective_curve_[-1], primal_objective)
Ejemplo n.º 14
0
def test_objective():
    # test that SubgradientLatentSSVM does the same as SubgradientSVM,
    # in particular that it has the same loss, if there are no latent states.
    X, Y = generate_blocks_multinomial(n_samples=10, noise=.3, seed=1)
    inference_method = get_installed(["qpbo", "ad3", "lp"])[0]
    n_labels = 3
    crfl = LatentGridCRF(n_labels=n_labels, n_states_per_label=1,
                         inference_method=inference_method)
    clfl = SubgradientLatentSSVM(model=crfl, max_iter=20, C=10.,
                                 learning_rate=0.001, momentum=0.98)
    crfl.initialize(X, Y)
    clfl.w = np.zeros(crfl.size_joint_feature)  # this disables random init
    clfl.fit(X, Y)

    crf = GridCRF(n_states=n_labels, inference_method=inference_method)
    clf = SubgradientSSVM(model=crf, max_iter=20, C=10., learning_rate=0.001,
                          momentum=0.98)
    clf.fit(X, Y)
    assert_array_almost_equal(clf.w, clfl.w)
    assert_almost_equal(clf.objective_curve_[-1], clfl.objective_curve_[-1])
    assert_array_equal(clf.predict(X), clfl.predict(X))
    assert_array_equal(clf.predict(X), Y)
Ejemplo n.º 15
0
digits = load_digits()
X, y = digits.data, digits.target
#X = X / 255.
X = X / 16.
#y = y.astype(np.int) - 1
X_train, X_test, y_train, y_test = train_test_split(X, y)

# we add a constant 1 feature for the bias
X_train_bias = np.hstack([X_train, np.ones((X_train.shape[0], 1))])
X_test_bias = np.hstack([X_test, np.ones((X_test.shape[0], 1))])

model = MultiClassClf(n_features=X_train_bias.shape[1], n_classes=10)
n_slack_svm = NSlackSSVM(model, verbose=2, check_constraints=False, C=0.1,
                         batch_size=100, tol=1e-2)
one_slack_svm = OneSlackSSVM(model, verbose=2, C=.10, tol=.001)
subgradient_svm = SubgradientSSVM(model, C=0.1, learning_rate=0.000001,
                                  max_iter=1000, verbose=0)

fw_bc_svm = FrankWolfeSSVM(model, C=.1, max_iter=50)
fw_batch_svm = FrankWolfeSSVM(model, C=.1, max_iter=50, batch_mode=True)

# n-slack cutting plane ssvm
start = time()
n_slack_svm.fit(X_train_bias, y_train)
time_n_slack_svm = time() - start
y_pred = np.hstack(n_slack_svm.predict(X_test_bias))
print("Score with pystruct n-slack ssvm: %f (took %f seconds)"
      % (np.mean(y_pred == y_test), time_n_slack_svm))

## 1-slack cutting plane ssvm
start = time()
one_slack_svm.fit(X_train_bias, y_train)
                  'branch_and_bound': True
              }))

n_slack_svm = NSlackSSVM(crf,
                         check_constraints=False,
                         max_iter=50,
                         batch_size=1,
                         tol=0.001)
one_slack_svm = OneSlackSSVM(crf,
                             check_constraints=False,
                             max_iter=100,
                             tol=0.001,
                             inference_cache=50)
subgradient_svm = SubgradientSSVM(crf,
                                  learning_rate=0.001,
                                  max_iter=20,
                                  decay_exponent=0,
                                  momentum=0)
bcfw_svm = FrankWolfeSSVM(crf, max_iter=50, check_dual_every=4)

#n-slack cutting plane ssvm
n_slack_svm.fit(X, Y)

# 1-slack cutting plane ssvm
one_slack_svm.fit(X, Y)

# online subgradient ssvm
subgradient_svm.fit(X, Y)

# Block coordinate Frank-Wolfe
bcfw_svm.fit(X, Y)
Ejemplo n.º 17
0
X, y = digits.data, digits.target

# make binary task by doing odd vs even numers
y = y % 2
# code as +1 and -1
y = 2 * y - 1
X /= X.max()

X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)

pbl = BinaryClf()
n_slack_svm = NSlackSSVM(pbl, C=10, batch_size=-1)
one_slack_svm = OneSlackSSVM(pbl, C=10, tol=0.1)
subgradient_svm = SubgradientSSVM(pbl,
                                  C=10,
                                  learning_rate=0.1,
                                  max_iter=100,
                                  batch_size=10)

# we add a constant 1 feature for the bias
X_train_bias = np.hstack([X_train, np.ones((X_train.shape[0], 1))])
X_test_bias = np.hstack([X_test, np.ones((X_test.shape[0], 1))])

# n-slack cutting plane ssvm
start = time()
n_slack_svm.fit(X_train_bias, y_train)
time_n_slack_svm = time() - start
acc_n_slack = n_slack_svm.score(X_test_bias, y_test)
print("Score with pystruct n-slack ssvm: %f (took %f seconds)" %
      (acc_n_slack, time_n_slack_svm))
Ejemplo n.º 18
0
def make_random_trees(n_samples=50, n_nodes=100, n_states=7, n_features=10):
    crf = GraphCRF(inference_method='max-product', n_states=n_states,
                   n_features=n_features)
    weights = np.random.randn(crf.size_joint_feature)
    X, y = [], []
    for i in range(n_samples):
        distances = np.random.randn(n_nodes, n_nodes)
        features = np.random.randn(n_nodes, n_features)
        tree = minimum_spanning_tree(sparse.csr_matrix(distances))
        edges = np.c_[tree.nonzero()]
        X.append((features, edges))
        y.append(crf.inference(X[-1], weights))

    return X, y, weights


X, y, weights = make_random_trees(n_nodes=1000)

X_train, X_test, y_train, y_test = train_test_split(X, y)

#tree_model = MultiLabelClf(edges=tree, inference_method=('ogm', {'alg': 'dyn'}))
tree_model = GraphCRF(inference_method='max-product')

tree_ssvm = SubgradientSSVM(tree_model, max_iter=4, C=1, verbose=10)

print("fitting tree model...")
tree_ssvm.fit(X_train, y_train)

print("Training loss tree model: %f" % tree_ssvm.score(X_train, y_train))
print("Test loss tree model: %f" % tree_ssvm.score(X_test, y_test))