C=1, tol=.01, inactive_threshold=1e-3, batch_size=10) latent_svm = LatentSSVM(base_ssvm=base_ssvm, latent_iter=2) latent_svm.fit(X_train_, y_train) print("Score with binary SVM:") print("Train: {:2.2f}".format(svm.score(X_train_, y_train))) print("Test: {:2.2f}".format(svm.score(X_test_, y_test))) print("Score with latent SVM:") print("Train: {:2.2f}".format(latent_svm.score(X_train_, y_train))) print("Test: {:2.2f}".format(latent_svm.score(X_test_, y_test))) h_pred = np.hstack(latent_svm.predict_latent(X_test_)) print("Latent class counts: %s" % repr(np.bincount(h_pred))) # plot first few digits from each latent class plt.figure(figsize=(3, 5)) plt.suptitle("Example digits from each of\nthe ten latent classes.") n_latent_classes = 10 n_examples = 7 for latent_class in xrange(n_latent_classes): examples = X_test[h_pred == latent_class][:n_examples] for k, example in enumerate(examples): plt.subplot(n_latent_classes, n_examples, 1 + (n_examples * latent_class + k)) plt.imshow(example.reshape((8, 8)), cmap=plt.cm.gray_r) plt.xticks(())
show_loss_every=10, inference_cache=50) latent_svm = LatentSSVM(ssvm) # make edges for hidden states: edges = [] node_indices = np.arange(4 * 4).reshape(4, 4) for i, (x, y) in enumerate(itertools.product([0, 2], repeat=2)): for j in xrange(x, x + 2): for k in xrange(y, y + 2): edges.append([i + 4 * 4, node_indices[j, k]]) G = [np.vstack([make_grid_edges(x), edges]) for x in X] # Random initialization H_init = [ np.hstack([y.ravel(), np.random.randint(2, 4, size=2 * 2)]) for y in Y ] plot_boxes(H_init, title="Top: Random initial hidden states. Bottom: Ground" "truth labeling.") X_ = zip(X_flat, G, [2 * 2 for x in X_flat]) latent_svm.fit(X_, Y_flat, H_init) print("Training score with latent nodes: %f " % latent_svm.score(X_, Y_flat)) H = latent_svm.predict_latent(X_) plot_boxes(H, title="Top: Hidden states after training. Bottom: Prediction.") plt.show()
inference_method='lp') ssvm = OneSlackSSVM(model=latent_crf, max_iter=200, C=100, n_jobs=-1, show_loss_every=10, inference_cache=50) latent_svm = LatentSSVM(ssvm) # make edges for hidden states: edges = [] node_indices = np.arange(4 * 4).reshape(4, 4) for i, (x, y) in enumerate(itertools.product([0, 2], repeat=2)): for j in range(x, x + 2): for k in range(y, y + 2): edges.append([i + 4 * 4, node_indices[j, k]]) G = [np.vstack([make_grid_edges(x), edges]) for x in X] # Random initialization H_init = [np.hstack([y.ravel(), np.random.randint(2, 4, size=2 * 2)]) for y in Y] plot_boxes(H_init, title="Top: Random initial hidden states. Bottom: Ground" "truth labeling.") X_ = list(zip(X_flat, G, [2 * 2 for x in X_flat])) latent_svm.fit(X_, Y_flat, H_init) print("Training score with latent nodes: %f " % latent_svm.score(X_, Y_flat)) H = latent_svm.predict_latent(X_) plot_boxes(H, title="Top: Hidden states after training. Bottom: Prediction.") plt.show()
latent_pbl = LatentGraphCRF(n_states_per_label=5, inference_method='unary') base_ssvm = NSlackSSVM(latent_pbl, C=1, tol=.01, inactive_threshold=1e-3, batch_size=10) latent_svm = LatentSSVM(base_ssvm=base_ssvm, latent_iter=2) latent_svm.fit(X_train_, y_train) print("Score with binary SVM:") print("Train: {:2.2f}".format(svm.score(X_train_, y_train))) print("Test: {:2.2f}".format(svm.score(X_test_, y_test))) print("Score with latent SVM:") print("Train: {:2.2f}".format(latent_svm.score(X_train_, y_train))) print("Test: {:2.2f}".format(latent_svm.score(X_test_, y_test))) h_pred = np.hstack(latent_svm.predict_latent(X_test_)) print("Latent class counts: %s" % repr(np.bincount(h_pred))) # plot first few digits from each latent class plt.figure(figsize=(3, 5)) plt.suptitle("Example digits from each of\nthe ten latent classes.") n_latent_classes = 10 n_examples = 7 for latent_class in range(n_latent_classes): examples = X_test[h_pred == latent_class][:n_examples] for k, example in enumerate(examples): plt.subplot(n_latent_classes, n_examples, 1 + (n_examples * latent_class + k)) plt.imshow(example.reshape((8, 8)), cmap=plt.cm.gray_r) plt.xticks(())