def test_multinomial_blocks_subgradient_batch(): #testing cutting plane ssvm on easy multinomial dataset X, Y = generate_blocks_multinomial(n_samples=10, noise=0.6, seed=1) n_labels = len(np.unique(Y)) crf = GridCRF(n_states=n_labels, inference_method=inference_method) clf = SubgradientSSVM(model=crf, max_iter=100, batch_size=-1) clf.fit(X, Y) Y_pred = clf.predict(X) assert_array_equal(Y, Y_pred) clf2 = SubgradientSSVM(model=crf, max_iter=100, batch_size=len(X)) clf2.fit(X, Y) Y_pred2 = clf2.predict(X) assert_array_equal(Y, Y_pred2)
def test_binary_ssvm_attractive_potentials_edgefeaturegraph(inference_method="qpbo"): X, Y = generate_blocks(n_samples=10) crf = GridCRF(inference_method=inference_method) ####### # convert X,Y to EdgeFeatureGraphCRF instances crf_edge = EdgeFeatureGraphCRF(inference_method=inference_method, symmetric_edge_features=[0] ) X_edge = [] Y_edge = [] for i in range(X.shape[0]): unaries = X[i].reshape((-1, 2)) edges = crf._get_edges(X[i]) edge_feats = np.ones((edges.shape[0], 1)) X_edge.append((unaries, edges, edge_feats)) Y_edge.append((Y[i].reshape((-1,)))) submodular_clf_edge = SubgradientSSVM(model=crf_edge, max_iter=100, C=1, verbose=1, zero_constraint=[4,7], negativity_constraint=[5,6], ) # fit the model with non-negativity constraint on the off-diagonal potential submodular_clf_edge.fit(X_edge, Y_edge) assert submodular_clf_edge.w[5] == submodular_clf_edge.w[6] # symmetry constraint on edge features # # # bias doesn't matter # submodular_clf_edge.w += 10*np.ones(submodular_clf_edge.w.shape) # print len(submodular_clf_edge.w), submodular_clf_edge.w Y_pred = submodular_clf_edge.predict(X_edge) assert_array_equal(Y_edge, Y_pred) # try to fit the model with non-negativity constraint on the off-diagonal potential, this time # with inverted sign on the edge features X_edge_neg = [ (x[0], x[1], -x[2]) for x in X_edge ] submodular_clf_edge = SubgradientSSVM(model=crf_edge, max_iter=100, C=1, verbose=1, zero_constraint=[4,7], negativity_constraint=[5,6], ) submodular_clf_edge.fit(X_edge_neg, Y_edge) Y_pred = submodular_clf_edge.predict(X_edge_neg) assert_array_equal(Y_edge, Y_pred)
def test_binary_blocks_subgradient(): #testing subgradient ssvm on easy binary dataset X, Y = toy.generate_blocks(n_samples=10) crf = GridCRF() clf = SubgradientSSVM(model=crf, max_iter=200, C=100, learning_rate=0.1) clf.fit(X, Y) Y_pred = clf.predict(X) assert_array_equal(Y, Y_pred)
def test_binary_blocks(): #testing subgradient ssvm on easy binary dataset X, Y = generate_blocks(n_samples=5) crf = GridCRF(inference_method=inference_method) clf = SubgradientSSVM(model=crf) clf.fit(X, Y) Y_pred = clf.predict(X) assert_array_equal(Y, Y_pred)
def test_multinomial_checker_subgradient(): X, Y = generate_checker_multinomial(n_samples=10, noise=0.4) n_labels = len(np.unique(Y)) crf = GridCRF(n_states=n_labels, inference_method=inference_method) clf = SubgradientSSVM(model=crf, max_iter=50) clf.fit(X, Y) Y_pred = clf.predict(X) assert_array_equal(Y, Y_pred)
def test_binary_checker_subgradient(): #testing subgradient ssvm on non-submodular binary dataset X, Y = toy.generate_checker(n_samples=10) crf = GridCRF() clf = SubgradientSSVM(model=crf, max_iter=100, C=100, momentum=.9, learning_rate=0.1) clf.fit(X, Y) Y_pred = clf.predict(X) assert_array_equal(Y, Y_pred)
def test_binary_blocks(): #testing subgradient ssvm on easy binary dataset X, Y = generate_blocks(n_samples=5) crf = GridCRF(inference_method=inference_method) clf = SubgradientSSVM(model=crf, C=100, learning_rate=1, decay_exponent=1, momentum=0, decay_t0=10) clf.fit(X, Y) Y_pred = clf.predict(X) assert_array_equal(Y, Y_pred)
def test_multinomial_checker_subgradient(): X, Y = toy.generate_checker_multinomial(n_samples=10, noise=0.0) n_labels = len(np.unique(Y)) crf = GridCRF(n_states=n_labels) clf = SubgradientSSVM(model=crf, max_iter=50, C=10, momentum=.98, learning_rate=0.01) clf.fit(X, Y) Y_pred = clf.predict(X) assert_array_equal(Y, Y_pred)
def test_multinomial_blocks_subgradient(): #testing cutting plane ssvm on easy multinomial dataset X, Y = generate_blocks_multinomial(n_samples=10, noise=0.3, seed=1) n_labels = len(np.unique(Y)) crf = GridCRF(n_states=n_labels, inference_method=inference_method) clf = SubgradientSSVM(model=crf, max_iter=50, C=10, momentum=.98, learning_rate=0.001) clf.fit(X, Y) Y_pred = clf.predict(X) assert_array_equal(Y, Y_pred)
def test_blobs_2d_subgradient(): # make two gaussian blobs X, Y = make_blobs(n_samples=80, centers=3, random_state=42) # we have to add a constant 1 feature by hand :-/ X = np.hstack([X, np.ones((X.shape[0], 1))]) X_train, X_test, Y_train, Y_test = X[:40], X[40:], Y[:40], Y[40:] pbl = MultiClassClf(n_features=3, n_classes=3) svm = SubgradientSSVM(pbl, C=1000) svm.fit(X_train, Y_train) assert_array_equal(Y_test, np.hstack(svm.predict(X_test)))
def test_objective(): # test that LatentSubgradientSSVM does the same as SubgradientSVM, # in particular that it has the same loss, if there are no latent states. X, Y = toy.generate_blocks_multinomial(n_samples=10) n_labels = 3 crfl = LatentGridCRF(n_labels=n_labels, n_states_per_label=1) clfl = LatentSubgradientSSVM(model=crfl, max_iter=50, C=10., learning_rate=0.001, momentum=0.98, decay_exponent=0) clfl.w = np.zeros(crfl.size_psi) # this disables random init clfl.fit(X, Y) crf = GridCRF(n_states=n_labels) clf = SubgradientSSVM(model=crf, max_iter=50, C=10., learning_rate=0.001, momentum=0.98, decay_exponent=0) clf.fit(X, Y) assert_array_almost_equal(clf.w, clfl.w) assert_array_equal(clf.predict(X), Y) assert_almost_equal(clf.objective_curve_[-1], clfl.objective_curve_[-1])
def test_objective(): # test that SubgradientLatentSSVM does the same as SubgradientSVM, # in particular that it has the same loss, if there are no latent states. X, Y = generate_blocks_multinomial(n_samples=10, noise=.3, seed=1) inference_method = get_installed(["qpbo", "ad3", "lp"])[0] n_labels = 3 crfl = LatentGridCRF(n_labels=n_labels, n_states_per_label=1, inference_method=inference_method) clfl = SubgradientLatentSSVM(model=crfl, max_iter=20, C=10., learning_rate=0.001, momentum=0.98) crfl.initialize(X, Y) clfl.w = np.zeros(crfl.size_joint_feature) # this disables random init clfl.fit(X, Y) crf = GridCRF(n_states=n_labels, inference_method=inference_method) clf = SubgradientSSVM(model=crf, max_iter=20, C=10., learning_rate=0.001, momentum=0.98) clf.fit(X, Y) assert_array_almost_equal(clf.w, clfl.w) assert_almost_equal(clf.objective_curve_[-1], clfl.objective_curve_[-1]) assert_array_equal(clf.predict(X), clfl.predict(X)) assert_array_equal(clf.predict(X), Y)
n_gestures = len(np.unique(gesture_labels)) frame_prior_train, frame_transition_matrix_train = calculate_hmm_params(frame_labels, n_gestures) gesture_prior_train, gesture_transition_matrix_train = calculate_hmm_params(gesture_labels, n_gestures) print "Unary (frame) score:", frame_clf_train.score(np.vstack(frame_hists_train), np.hstack(frame_labels)) print "Unary (gesture) score:", gesture_clf_train.score(np.vstack(gesture_hists_train), np.hstack(gesture_labels)) gesture_transition_matrix_train = np.ones([n_gestures,3])/3. # Markov CRF markovCRF = MarkovCRF(n_states=n_gestures, clf=frame_clf_train, prior=frame_prior_train, transition=frame_transition_matrix_train, inference_method='dai') markov_svm = SubgradientSSVM(markovCRF, verbose=1, C=1., n_jobs=1) markov_svm.fit(frame_hists_train, frame_labels) m_predict = markov_svm.predict(frame_hists_train) print 'Markov w:', markov_svm.w print 'Markov CRF score: {}%'.format(100*np.sum([np.sum(np.equal(m_predict[i],x)) for i,x in enumerate(frame_labels)]) / np.sum([np.size(x) for x in frame_labels], dtype=np.float)) # semi-Markov CRF sm_crf = SemiMarkovCRF(n_states=n_gestures,clf=gesture_clf_train, prior=gesture_prior_train, transition_matrix=gesture_transition_matrix_train) sm_svm = SubgradientSSVM(sm_crf, verbose=1, C=1., n_jobs=1) sm_svm.fit(frame_hists_train, frame_labels) sm_predict = sm_svm.predict(frame_hists_train) print 'Semi-Markov w:', sm_svm.w print 'Semi-Markov CRF score: {}%'.format(100*np.sum([np.sum(sm_predict[i]==x) for i,x in enumerate(frame_labels)]) / np.sum([np.size(x) for x in frame_labels], dtype=np.float)) # Markov semi-Markov CRF MarkovSemiMarkovCRF = MarkovSemiMarkovCRF(n_states=n_gestures, markov_prior=frame_prior_train, markov_transition=frame_transition_matrix_train,
print("Score with pystruct n-slack ssvm: %f (took %f seconds)" % (np.mean(y_pred == y_test), time_n_slack_svm)) ## 1-slack cutting plane ssvm start = time() one_slack_svm.fit(X_train_bias, y_train) time_one_slack_svm = time() - start y_pred = np.hstack(one_slack_svm.predict(X_test_bias)) print("Score with pystruct 1-slack ssvm: %f (took %f seconds)" % (np.mean(y_pred == y_test), time_one_slack_svm)) #online subgradient ssvm start = time() subgradient_svm.fit(X_train_bias, y_train) time_subgradient_svm = time() - start y_pred = np.hstack(subgradient_svm.predict(X_test_bias)) print("Score with pystruct subgradient ssvm: %f (took %f seconds)" % (np.mean(y_pred == y_test), time_subgradient_svm)) # the standard one-vs-rest multi-class would probably be as good and faster # but solving a different model libsvm = LinearSVC(multi_class='crammer_singer', C=.1) start = time() libsvm.fit(X_train, y_train) time_libsvm = time() - start print("Score with sklearn and libsvm: %f (took %f seconds)" % (libsvm.score(X_test, y_test), time_libsvm)) start = time()
test_conll_os_ssvm = conlleval_fmt(iob_test, test_os_ssvm_preds) test_conll_os_ssvm_file = open('test_conll_os_ssvm.txt', 'wb') for sentence in test_conll_os_ssvm: test_conll_os_ssvm_file.write(bytes(sentence, 'UTF-8')) test_conll_os_ssvm_file.close() print(conlleval_results('test_conll_os_ssvm.txt')) if args.subgrad: ### fit subgradient ssvm crf = ChainCRF() sg_ssvm = SubgradientSSVM(crf, max_iter=200, verbose=args.verbose, n_jobs=-1, use_memmapping_pool=0, show_loss_every=20, shuffle=True) sg_ssvm.fit(list(X_train_tsvd), y_train) test_sg_ssvm_preds = [[id2label[i] for i in sent] for sent in sg_ssvm.predict(X_test_tsvd)] test_conll_sg_ssvm = conlleval_fmt(iob_test, test_sg_ssvm_preds) test_conll_sg_ssvm_file = open('test_conll_sg_ssvm.txt', 'wb') for sentence in test_conll_sg_ssvm: test_conll_sg_ssvm_file.write(bytes(sentence, 'UTF-8')) test_conll_sg_ssvm_file.close() print(conlleval_results('test_conll_sg_ssvm.txt')) if args.evals: print(conlleval_results('test_conll_svc.txt')) print(conlleval_results('test_conll_crfsuite.txt')) print(conlleval_results('test_conll_searn.txt')) print(conlleval_results('test_conll_os_ssvm.txt')) print(conlleval_results('test_conll_sg_ssvm.txt'))
class EdgeCRFClassifier: def __init__(self, userId="anonymous"): self.model = None self.learner = None self.featurizer = None self.userId = userId def fresh_train(self, x, y, iterations=10): self.model = EdgeFeatureGraphCRF(inference_method="max-product") self.learner = SubgradientSSVM( model=self.model, max_iter=iterations, logger=SaveLogger(model_file.format(self.userId + "-learner"))) self.learner.fit(x, y, warm_start=False) self.save() def fresh_train_default(self, iterations=10): default_train = scriptdir + '/../../../data/compression/' \ 'googlecomp100.train.lbl' featurizer = edge_featurize.Featurizer() x, y = featurizer.fit_transform(default_train) self.fresh_train(x, y, iterations=iterations) def update(self, x, y): """ Performs an online update of the model :param x: Input data :param y: List of Numpy array of label IDs :return: """ self.learner.fit(x, y, warm_start=False) def predict(self, x): self.check_featurizer_set() label_ids = self.learner.predict(x) labels = [] for sent in label_ids: labels.append(np.array(self.featurizer.map_inv(sent))) return labels, label_ids def set_featurizer(self, featurizer): self.featurizer = featurizer def featurize_train(self, train_data, iterations=10): self.check_featurizer_set() x, y = self.featurizer.fit_transform(train_data) self.fresh_train(x, y, iterations) def featurize_update(self, src, y): self.check_featurizer_set() x, _ = self.featurizer.transform(src) self.update(x, y) def featurize_predict(self, data): self.check_featurizer_set() x, _ = self.featurizer.transform(data) return self.predict(x) def save(self, userId=None): if not userId: userId = self.userId with open(model_file.format(userId), 'wb') as pf: pickle.dump((self.learner, self.model, self.featurizer), pf, pickle.HIGHEST_PROTOCOL) def load(self, userId=None): if not userId: userId = self.userId with open(model_file.format(userId), 'rb') as pf: self.learner, self.model, self.featurizer = pickle.load(pf) return self def load_default_init(self): with open(model_file.format("default"), 'rb') as pf: self.learner, self.model, self.featurizer = pickle.load(pf) def check_featurizer_set(self): if not self.featurizer: raise RuntimeError("Featurizer not set. Use set_featurizer().") def text_predict(self, input_txt): original = [] simplified = [] X, parses = self.featurizer.transform_plain(input_txt) for x, parse in zip(X, parses): labels = self.predict([x])[0] # tokens = parses[0]['form'] tokens = parse['form'] original.append(detokenizer.detokenize([t for t in tokens], True)) # original.append(" ".join([t for t in tokens])) # print('#\n#\n#') # print(" ".join(tokens) + "\t===>\t", end='') graph = nx.DiGraph() for s, t in x[1]: # graph.add_edge(tokens[s], tokens[t]) graph.add_edge(s, t) # print(graph.nodes()) for i, l in enumerate(labels[0]): if l == 'DEL': for s, t in graph.edges(): # print(t, s) if t == i: # print("DEL", t) for n in dfs_tree(graph, t).nodes(): # print(n) graph.remove_node(n) # print(graph.nodes()) simplified.append( detokenizer.detokenize( [tokens[n] for n in sorted(graph.nodes())], True)) # simplified.append(" ".join( # [tokens[n] for n in sorted(graph.nodes())])) return original, simplified