def test_latent_node_boxes_edge_features(): # learn the "easy" 2x2 boxes dataset. # smoketest using a single constant edge feature X, Y = make_simple_2x2(seed=1, n_samples=40) latent_crf = EdgeFeatureLatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1) base_svm = OneSlackSSVM(latent_crf) base_svm.C = 10 latent_svm = LatentSSVM(base_svm, latent_iter=10) G = [make_grid_edges(x) for x in X] # make edges for hidden states: edges = make_edges_2x2() G = [np.vstack([make_grid_edges(x), edges]) for x in X] # reshape / flatten x and y X_flat = [x.reshape(-1, 1) for x in X] Y_flat = [y.ravel() for y in Y] #X_ = zip(X_flat, G, [2 * 2 for x in X_flat]) # add edge features X_ = [(x, g, np.ones((len(g), 1)), 4) for x, g in zip(X_flat, G)] latent_svm.fit(X_[:20], Y_flat[:20]) assert_array_equal(latent_svm.predict(X_[:20]), Y_flat[:20]) assert_equal(latent_svm.score(X_[:20], Y_flat[:20]), 1) # test that score is not always 1 assert_true(.98 < latent_svm.score(X_[20:], Y_flat[20:]) < 1)
def test_latent_node_boxes_standard_latent(): # learn the "easy" 2x2 boxes dataset. # a 2x2 box is placed randomly in a 4x4 grid # we add a latent variable for each 2x2 patch # that should make the model fairly simple X, Y = make_simple_2x2(seed=1, n_samples=40) latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1) one_slack = OneSlackSSVM(latent_crf) n_slack = NSlackSSVM(latent_crf) subgradient = SubgradientSSVM(latent_crf, max_iter=100) for base_svm in [one_slack, n_slack, subgradient]: base_svm.C = 10 latent_svm = LatentSSVM(base_svm, latent_iter=10) G = [make_grid_edges(x) for x in X] # make edges for hidden states: edges = make_edges_2x2() G = [np.vstack([make_grid_edges(x), edges]) for x in X] # reshape / flatten x and y X_flat = [x.reshape(-1, 1) for x in X] Y_flat = [y.ravel() for y in Y] X_ = list(zip(X_flat, G, [2 * 2 for x in X_flat])) latent_svm.fit(X_[:20], Y_flat[:20]) assert_array_equal(latent_svm.predict(X_[:20]), Y_flat[:20]) assert_equal(latent_svm.score(X_[:20], Y_flat[:20]), 1) # test that score is not always 1 assert_true(.98 < latent_svm.score(X_[20:], Y_flat[20:]) < 1)
def test_latent_node_boxes_standard_latent(): # learn the "easy" 2x2 boxes dataset. # a 2x2 box is placed randomly in a 4x4 grid # we add a latent variable for each 2x2 patch # that should make the model fairly simple X, Y = make_simple_2x2(seed=1, n_samples=40) latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1) one_slack = OneSlackSSVM(latent_crf) n_slack = NSlackSSVM(latent_crf) subgradient = SubgradientSSVM(latent_crf, max_iter=100) for base_svm in [one_slack, n_slack, subgradient]: base_svm.C = 10 latent_svm = LatentSSVM(base_svm, latent_iter=10) G = [make_grid_edges(x) for x in X] # make edges for hidden states: edges = make_edges_2x2() G = [np.vstack([make_grid_edges(x), edges]) for x in X] # reshape / flatten x and y X_flat = [x.reshape(-1, 1) for x in X] Y_flat = [y.ravel() for y in Y] X_ = zip(X_flat, G, [2 * 2 for x in X_flat]) latent_svm.fit(X_[:20], Y_flat[:20]) assert_array_equal(latent_svm.predict(X_[:20]), Y_flat[:20]) assert_equal(latent_svm.score(X_[:20], Y_flat[:20]), 1) # test that score is not always 1 assert_true(.98 < latent_svm.score(X_[20:], Y_flat[20:]) < 1)
def test_latent_node_boxes_standard_latent_features(): # learn the "easy" 2x2 boxes dataset. # we make it even easier now by adding features that encode the correct # latent state. This basically tests that the features are actually used X, Y = make_simple_2x2(seed=1, n_samples=20, n_flips=6) latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1, latent_node_features=True) one_slack = OneSlackSSVM(latent_crf) n_slack = NSlackSSVM(latent_crf) subgradient = SubgradientSSVM(latent_crf, max_iter=100, learning_rate=0.01, momentum=0) for base_svm in [one_slack, n_slack, subgradient]: base_svm.C = 10 latent_svm = LatentSSVM(base_svm, latent_iter=10) G = [make_grid_edges(x) for x in X] # make edges for hidden states: edges = make_edges_2x2() G = [np.vstack([make_grid_edges(x), edges]) for x in X] # reshape / flatten x and y X_flat = [x.reshape(-1, 1) for x in X] # augment X with the features for hidden units X_flat = [ np.vstack([x, y[::2, ::2].reshape(-1, 1)]) for x, y in zip(X_flat, Y) ] Y_flat = [y.ravel() for y in Y] X_ = zip(X_flat, G, [2 * 2 for x in X_flat]) latent_svm.fit(X_[:10], Y_flat[:10]) assert_array_equal(latent_svm.predict(X_[:10]), Y_flat[:10]) assert_equal(latent_svm.score(X_[:10], Y_flat[:10]), 1) # we actually become prefect ^^ assert_true(.98 < latent_svm.score(X_[10:], Y_flat[10:]) <= 1)
def test_latent_node_boxes_latent_subgradient(): # same as above, now with elementary subgradients X, Y = make_simple_2x2(seed=1) latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1) latent_svm = SubgradientLatentSSVM(model=latent_crf, max_iter=50, C=10) G = [make_grid_edges(x) for x in X] edges = make_edges_2x2() G = [np.vstack([make_grid_edges(x), edges]) for x in X] # reshape / flatten x and y X_flat = [x.reshape(-1, 1) for x in X] Y_flat = [y.ravel() for y in Y] X_ = list(zip(X_flat, G, [4 * 4 for x in X_flat])) latent_svm.fit(X_, Y_flat) assert_equal(latent_svm.score(X_, Y_flat), 1)
def test_latent_node_boxes_latent_subgradient(): # same as above, now with elementary subgradients X, Y = make_simple_2x2(seed=1) latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1) latent_svm = SubgradientLatentSSVM(model=latent_crf, max_iter=50, C=10) G = [make_grid_edges(x) for x in X] edges = make_edges_2x2() G = [np.vstack([make_grid_edges(x), edges]) for x in X] # reshape / flatten x and y X_flat = [x.reshape(-1, 1) for x in X] Y_flat = [y.ravel() for y in Y] X_ = zip(X_flat, G, [4 * 4 for x in X_flat]) latent_svm.fit(X_, Y_flat) assert_equal(latent_svm.score(X_, Y_flat), 1)
def test_latent_node_boxes_standard_latent_features(): # learn the "easy" 2x2 boxes dataset. # we make it even easier now by adding features that encode the correct # latent state. This basically tests that the features are actually used X, Y = make_simple_2x2(seed=1, n_samples=20, n_flips=6) latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1, latent_node_features=True) one_slack = OneSlackSSVM(latent_crf) n_slack = NSlackSSVM(latent_crf) subgradient = SubgradientSSVM(latent_crf, max_iter=100, learning_rate=0.01, momentum=0) for base_svm in [one_slack, n_slack, subgradient]: base_svm.C = 10 latent_svm = LatentSSVM(base_svm, latent_iter=10) G = [make_grid_edges(x) for x in X] # make edges for hidden states: edges = make_edges_2x2() G = [np.vstack([make_grid_edges(x), edges]) for x in X] # reshape / flatten x and y X_flat = [x.reshape(-1, 1) for x in X] # augment X with the features for hidden units X_flat = [np.vstack([x, y[::2, ::2].reshape(-1, 1)]) for x, y in zip(X_flat, Y)] Y_flat = [y.ravel() for y in Y] X_ = zip(X_flat, G, [2 * 2 for x in X_flat]) latent_svm.fit(X_[:10], Y_flat[:10]) assert_array_equal(latent_svm.predict(X_[:10]), Y_flat[:10]) assert_equal(latent_svm.score(X_[:10], Y_flat[:10]), 1) # we actually become prefect ^^ assert_true(.98 < latent_svm.score(X_[10:], Y_flat[10:]) <= 1)
for a, x in zip(ax[0], boxes): plot_grid(x[size * size:].reshape(size / 2, size / 2), cmap=cmap, axes=a, border_color="green") for a, x in zip(ax[1], boxes): plot_grid(x[:size * size].reshape(size, size), cmap=cmap, axes=a, border_color="green") fig.subplots_adjust(.01, .03, .98, .75, .2, .05) fig.suptitle(title) # learn the "easy" 2x2 boxes dataset. # a 2x2 box is placed randomly in a 4x4 grid # we add a latent variable for each 2x2 patch # that should make the model fairly simple X, Y = make_simple_2x2(seed=1) # flatten X and Y X_flat = [x.reshape(-1, 1).astype(np.float) for x in X] Y_flat = [y.ravel() for y in Y] # first, use standard graph CRF. Can't do much, high loss. crf = GraphCRF() svm = NSlackSSVM(model=crf, max_iter=200, C=1, n_jobs=1) G = [make_grid_edges(x) for x in X] X_grid_edges = list(zip(X_flat, G)) svm.fit(X_grid_edges, Y_flat) plot_boxes(svm.predict(X_grid_edges), title="Non-latent SSVM predictions")
border_color="green") for a, x in zip(ax[1], boxes): plot_grid(x[:size * size].reshape(size, size), cmap=cmap, axes=a, border_color="green") fig.subplots_adjust(.01, .03, .98, .75, .2, .05) fig.suptitle(title) # learn the "easy" 2x2 boxes dataset. # a 2x2 box is placed randomly in a 4x4 grid # we add a latent variable for each 2x2 patch # that should make the model fairly simple X, Y = make_simple_2x2(seed=1) # flatten X and Y X_flat = [x.reshape(-1, 1).astype(np.float) for x in X] Y_flat = [y.ravel() for y in Y] # first, use standard graph CRF. Can't do much, high loss. crf = GraphCRF() svm = NSlackSSVM(model=crf, max_iter=200, C=1, n_jobs=1) G = [make_grid_edges(x) for x in X] asdf = zip(X_flat, G) svm.fit(asdf, Y_flat) plot_boxes(svm.predict(asdf), title="Non-latent SSVM predictions") print("Training score binary grid CRF: %f" % svm.score(asdf, Y_flat))