def ssvm_classifier() :

	x_train,y_train,x_test,y_test = load_data1()

	print "Data Loaded"
	

	pca = PCA(n_components= 1000)
	x_train_reduced = pca.fit_transform(x_train)
	x_test_reduced = pca.fit_transform(x_test)

	print "PCA finished"

	print "Learning the model"

	n_labels = y_train.shape[1]

	full = np.vstack([x for x in itertools.combinations(range(n_labels), 2)])
	tree = chow_liu_tree(y_train)

	
	independent_model = MultiLabelClf(inference_method='unary')


	independent_ssvm = OneSlackSSVM(independent_model, C=.1, tol=0.01)
	independent_ssvm.fit(x_train_reduced, y_train)
	print "saving model ..."
	with open("data/independent_ssvm.pkl","wb+") as f :
		cp.dump(independent_ssvm,f)
	#print "Calculatin the cross-validation scores"
	#scores = model_selection.cross_val_score(independent_ssvm,x_train_reduced,y_train,cv=3)

	print independent_ssvm.score(x_test_reduced,y_test)
def train(x_train, y_train, x_test, y_test):
    x_train = np.asarray(x_train, dtype=np.float)
    y_train = np.asarray(y_train, dtype=np.int64)
    # x_test = np.asarray(x_test, dtype=np.float)
    # y_test = np.asarray(y_test, dtype=np.int64)
    x_test = x_train
    y_test = y_train

    from pystruct.learners import NSlackSSVM, OneSlackSSVM, SubgradientSSVM, LatentSSVM, SubgradientLatentSSVM, PrimalDSStructuredSVM
    from pystruct.models import MultiLabelClf, MultiClassClf

    clf = OneSlackSSVM(MultiLabelClf(),
                       C=1,
                       show_loss_every=1,
                       verbose=1,
                       max_iter=1000)
    # print(x_train, y_train)
    # input()
    clf.fit(x_train, y_train)
    result = clf.predict(x_test)
    print('Result: \n', result)
    print('True label:\n', y_test)
    clf.score(x_test, y_test)
    print('\n')

    count = 0
    for i in range(len(result)):
        # print(np.sum(np.square(y_test[i]-result[i])))
        if np.sum(np.square(y_test[i] - result[i])) != 0:
            print('True label: ', y_test[i], 'Predict:  ', result[i])
            count += 1
    print(count)

    translate_vector(x_test, y_test)
def test_initialization():
    x = np.random.normal(size=(13, 5))
    y = np.random.randint(2, size=(13, 3))
    # no edges make independent model
    model = MultiLabelClf()
    model.initialize(x, y)
    assert_equal(model.n_states, 2)
    assert_equal(model.n_labels, 3)
    assert_equal(model.n_features, 5)
    assert_equal(model.size_psi, 5 * 3)

    # setting and then initializing is no-op
    model = MultiLabelClf(n_features=5, n_labels=3)
    model.initialize(x, y)  # smoketest

    model = MultiLabelClf(n_features=3, n_labels=3)
    assert_raises(ValueError, model.initialize, X=x, Y=y)
def test_multilabel_fully():
    # test inference and energy with fully connected model
    n_features = 5
    n_labels = 4
    edges = np.vstack([x for x in itertools.combinations(range(n_labels), 2)])
    model = MultiLabelClf(n_labels=n_labels, n_features=n_features,
                            edges=edges)
    rnd = np.random.RandomState(0)

    x = rnd.normal(size=n_features)
    w = rnd.normal(size=n_features * n_labels + 4 * len(edges))
    y = model.inference(x, w)

    # test psi / energy
    psi = model.psi(x, y)
    energy = compute_energy(model._get_unary_potentials(x, w),
                            model._get_pairwise_potentials(x, w), edges, y)
    assert_almost_equal(energy, np.dot(psi, w))

    # for continuous y
    #y_cont = model.inference(x, w, relaxed=True)
    y_continuous = np.zeros((n_labels, 2))
    pairwise_marginals = []
    for edge in edges:
        # indicator of one of four possible states of the edge
        pw = np.zeros((2, 2))
        pw[y[edge[0]], y[edge[1]]] = 1
        pairwise_marginals.append(pw)

    pairwise_marginals = np.vstack(pairwise_marginals)

    y_continuous[np.arange(n_labels), y] = 1
    assert_array_almost_equal(
        psi, model.psi(x, (y_continuous, pairwise_marginals)))
def test_multilabel_independent():
    # test inference and energy with independent model
    edges = np.zeros((0, 2), dtype=np.int)
    n_features = 5
    n_labels = 4
    model = MultiLabelClf(n_labels=n_labels, n_features=n_features,
                            edges=edges)
    rnd = np.random.RandomState(0)

    x = rnd.normal(size=5)
    w = rnd.normal(size=n_features * n_labels)
    # test inference
    y = model.inference(x, w)
    y_ = np.dot(w.reshape(n_labels, n_features), x) > 0
    assert_array_equal(y, y_)

    # test psi / energy
    psi = model.psi(x, y)
    energy = compute_energy(model._get_unary_potentials(x, w),
                            model._get_pairwise_potentials(x, w), edges, y)
    assert_almost_equal(energy, np.dot(psi, w))

    # for continuous y
    y_continuous = np.zeros((n_labels, 2))
    y_continuous[np.arange(n_labels), y] = 1
    assert_array_almost_equal(
        psi, model.psi(x, (y_continuous, np.zeros((0, n_labels, n_labels)))))
def test_multilabel_independent():
    # test inference and energy with independent model
    edges = np.zeros((0, 2), dtype=np.int)
    n_features = 5
    n_labels = 4
    model = MultiLabelClf(n_labels=n_labels, n_features=n_features,
                          edges=edges)
    rnd = np.random.RandomState(0)

    x = rnd.normal(size=5)
    w = rnd.normal(size=n_features * n_labels)
    # test inference
    y = model.inference(x, w)
    y_ = np.dot(w.reshape(n_labels, n_features), x) > 0
    assert_array_equal(y, y_)

    # test joint_feature / energy
    joint_feature = model.joint_feature(x, y)
    energy = compute_energy(model._get_unary_potentials(x, w),
                            model._get_pairwise_potentials(x, w), edges, y)
    assert_almost_equal(energy, np.dot(joint_feature, w))

    # for continuous y
    y_continuous = np.zeros((n_labels, 2))
    y_continuous[np.arange(n_labels), y] = 1
    assert_array_almost_equal(
        joint_feature, model.joint_feature(x, (y_continuous, np.zeros((0, n_labels, n_labels)))))
def test_multilabel_fully():
    # test inference and energy with fully connected model
    n_features = 5
    n_labels = 4
    edges = np.vstack([x for x in itertools.combinations(range(n_labels), 2)])
    model = MultiLabelClf(n_labels=n_labels, n_features=n_features,
                          edges=edges)
    rnd = np.random.RandomState(0)

    x = rnd.normal(size=n_features)
    w = rnd.normal(size=n_features * n_labels + 4 * len(edges))
    y = model.inference(x, w)

    # test joint_feature / energy
    joint_feature = model.joint_feature(x, y)
    energy = compute_energy(model._get_unary_potentials(x, w),
                            model._get_pairwise_potentials(x, w), edges, y)
    assert_almost_equal(energy, np.dot(joint_feature, w))

    # for continuous y
    #y_cont = model.inference(x, w, relaxed=True)
    y_continuous = np.zeros((n_labels, 2))
    pairwise_marginals = []
    for edge in edges:
        # indicator of one of four possible states of the edge
        pw = np.zeros((2, 2))
        pw[y[edge[0]], y[edge[1]]] = 1
        pairwise_marginals.append(pw)

    pairwise_marginals = np.vstack(pairwise_marginals)

    y_continuous[np.arange(n_labels), y] = 1
    assert_array_almost_equal(
        joint_feature, model.joint_feature(x, (y_continuous, pairwise_marginals)))
def test_initialization():
    x = np.random.normal(size=(13, 5))
    y = np.random.randint(2, size=(13, 3))
    # no edges make independent model
    model = MultiLabelClf()
    model.initialize(x, y)
    assert_equal(model.n_states, 2)
    assert_equal(model.n_labels, 3)
    assert_equal(model.n_features, 5)
    assert_equal(model.size_joint_feature, 5 * 3)

    # setting and then initializing is no-op
    model = MultiLabelClf(n_features=5, n_labels=3)
    model.initialize(x, y)  # smoketest

    model = MultiLabelClf(n_features=3, n_labels=3)
    assert_raises(ValueError, model.initialize, X=x, Y=y)
Beispiel #9
0
def train_structured_svm(observations, targets):
    """

    :param observations: our train dataset
    :param targets: multiple target variables.
    :return: the structured svm model
    """

    # ideally you can say the edges that are connected. For now, we use full.
    n_labels = len(targets[0])

    full = np.vstack([x for x in itertools.combinations(range(n_labels), 2)])
    #tree = chow_liu_tree(targets)

    # Choose the best model...
    full_model = MultiLabelClf(edges=full, inference_method='lp')

    #tree_model = MultiLabelClf(edges=tree, inference_method="max-product")
    full_ssvm = OneSlackSSVM(full_model, inference_cache=50, C=.1, tol=0.01)
    full_ssvm.fit(np.array(observations), np.array(targets))

    return full_ssvm
def Strukturni(x_train, y_train, x_test, y_test):

    import itertools
    import time

    import numpy as np
    from scipy import sparse

    from sklearn.metrics import hamming_loss
    from sklearn.metrics import accuracy_score
    from sklearn.metrics import mutual_info_score
    from scipy.sparse.csgraph import minimum_spanning_tree

    from pystruct.learners import OneSlackSSVM
    #    from pystruct.learners import FrankWolfeSSVM
    from pystruct.models import MultiLabelClf
    from pystruct.models import GraphCRF

    from sklearn.neural_network import MLPClassifier
    from sklearn.tree import DecisionTreeClassifier

    def chow_liu_tree(y_):
        n_labels = y_.shape[1]
        mi = np.zeros((n_labels, n_labels))
        for i in range(n_labels):
            for j in range(n_labels):
                mi[i, j] = mutual_info_score(y_[:, i], y_[:, j])
        mst = minimum_spanning_tree(sparse.csr_matrix(-mi))
        edges = np.vstack(mst.nonzero()).T
        edges.sort(axis=1)
        return edges

    x_train = x_train.values
    y_train = y_train.values
    y_train = y_train.astype(int)
    y_test = y_test.values
    y_test = y_test.astype(int)
    x_test = x_test.values

    time_ST = np.zeros(7)
    HL = np.zeros(7)
    ACC = np.zeros(7)

    n_labels = y_train.shape[1]

    full = np.vstack([x for x in itertools.combinations(range(n_labels), 2)])
    tree = chow_liu_tree(y_train)
    """ CRF chain """
    train_tree = []
    train_full = []
    test_tree = []
    test_full = []
    for k in range(y_train.shape[0]):
        X_train_CRF = np.zeros([y_train.shape[1], 18])
        for i in range(y_train.shape[1]):
            kolone = np.array([x for x in range(i * 18, 18 * (i + 1))])
            X_train_CRF[i, :] = x_train[k, kolone]
        train_tree.append((X_train_CRF.copy(), tree.T))
        train_full.append((X_train_CRF.copy(), full.T))

    for k in range(y_test.shape[0]):
        X_test_CRF = np.zeros([y_test.shape[1], 18])
        for i in range(y_test.shape[1]):
            kolone = np.array([x for x in range(i * 18, 18 * (i + 1))])
            X_test_CRF[i, :] = x_test[k, kolone]
        test_tree.append((X_test_CRF.copy(), tree))
        test_full.append((X_test_CRF.copy(), full))
    """ SSVM, MLP, CRF-graph, DT - pystruct """
    """CREATE DATASET FOR GNN """
    """ Define models """
    full_model = MultiLabelClf(edges=full)
    independent_model = MultiLabelClf()
    tree_model = MultiLabelClf(edges=tree, inference_method='max-product')

    modelCRF_tree = GraphCRF(directed=False, inference_method="max-product")
    modelCRF_full = GraphCRF(directed=False, inference_method="max-product")
    """ Define learn algorithm """
    full_ssvm = OneSlackSSVM(full_model,
                             inference_cache=50,
                             C=.1,
                             tol=0.01,
                             max_iter=150)
    tree_ssvm = OneSlackSSVM(tree_model,
                             inference_cache=50,
                             C=.1,
                             tol=0.01,
                             max_iter=150)
    independent_ssvm = OneSlackSSVM(independent_model,
                                    C=.1,
                                    tol=0.01,
                                    max_iter=150)
    MLP = MLPClassifier()
    DT = DecisionTreeClassifier()
    CRF_tree = OneSlackSSVM(model=modelCRF_tree, C=.1, max_iter=250)
    CRF_full = OneSlackSSVM(model=modelCRF_full, C=.1, max_iter=250)
    """ Fit models """

    start_time = time.time()
    independent_ssvm.fit(x_train, y_train)
    y_ind = independent_ssvm.predict(x_test)
    time_ST[0] = time.time() - start_time

    start_time = time.time()
    full_ssvm.fit(x_train, y_train)
    y_full = full_ssvm.predict(x_test)
    time_ST[1] = time.time() - start_time

    start_time = time.time()
    tree_ssvm.fit(x_train, y_train)
    y_tree = tree_ssvm.predict(x_test)
    time_ST[2] = time.time() - start_time

    start_time = time.time()
    MLP.fit(x_train, y_train)
    y_MLP = MLP.predict(x_test)
    time_ST[3] = time.time() - start_time

    start_time = time.time()
    DT.fit(x_train, y_train)
    y_DT = DT.predict(x_test)
    time_ST[4] = time.time() - start_time

    start_time = time.time()
    CRF_tree.fit(train_tree, y_train)
    yCRF_tree = np.asarray(CRF_tree.predict(test_tree))
    time_ST[5] = time.time() - start_time

    start_time = time.time()
    CRF_full.fit(train_full, y_train)
    yCRF_full = np.asarray(CRF_full.predict(test_full))
    time_ST[6] = time.time() - start_time
    """ EVALUATE models """
    y_full = np.asarray(y_full)
    y_ind = np.asarray(y_ind)
    y_tree = np.asarray(y_tree)

    HL[0] = hamming_loss(y_test, y_ind)
    HL[1] = hamming_loss(y_test, y_full)
    HL[2] = hamming_loss(y_test, y_tree)
    HL[3] = hamming_loss(y_test, y_MLP)
    HL[4] = hamming_loss(y_test, y_DT)
    HL[5] = hamming_loss(y_test, yCRF_tree)
    HL[6] = hamming_loss(y_test, yCRF_full)

    y_ind = y_ind.reshape([y_ind.shape[0] * y_ind.shape[1]])
    y_full = y_full.reshape([y_full.shape[0] * y_full.shape[1]])
    y_tree = y_tree.reshape([y_tree.shape[0] * y_tree.shape[1]])
    y_MLP = y_MLP.reshape([y_MLP.shape[0] * y_MLP.shape[1]])
    y_DT = y_DT.reshape([y_DT.shape[0] * y_DT.shape[1]])
    yCRF_tree = yCRF_tree.reshape([yCRF_tree.shape[0] * yCRF_tree.shape[1]])
    yCRF_full = yCRF_full.reshape([yCRF_full.shape[0] * yCRF_full.shape[1]])
    y_test = y_test.reshape([y_test.shape[0] * y_test.shape[1]])

    ACC[0] = accuracy_score(y_test, y_ind)
    ACC[1] = accuracy_score(y_test, y_full)
    ACC[2] = accuracy_score(y_test, y_tree)
    ACC[3] = accuracy_score(y_test, y_MLP)
    ACC[4] = accuracy_score(y_test, y_DT)
    ACC[5] = accuracy_score(y_test, y_MLP)
    ACC[6] = accuracy_score(y_test, y_DT)

    return ACC, HL, time_ST
def crf_postprocess(X_train, y_train, X_test, train_examples=2000):
    clf = NSlackSSVM(MultiLabelClf(), verbose=1, n_jobs=-1, show_loss_every=1)
    clf.fit(X_train, y_train)
    pred = clf.predict(X_test)
    pred = np.array(pred)
    return pred
fv = np.load(sys.argv[1])
fold = np.load(sys.argv[2])

features, labels = reshape_features(fv)

#pdb.set_trace()

fidx = fold[0, :, 0]

X_train, X_test = features[fidx == 0], features[fidx == 1]
y_train, y_test = labels[fidx == 0], labels[fidx == 1]

model = ChainCRF()
#model = BinaryClf(X_train.shape[0])
model = MultiLabelClf()
ssvm = NSlackSSVM(model=model, max_iter=500)

X_train_l = [row for row in X_train]
y_train_l = [row for row in y_train]

X_test_l = [row for row in X_test]

ssvm.fit(X_train, y_train)

y_pred_l = ssvm.predict(X_test_l)

y_pred = np.array(y_pred_l)
#for pred in y_pred:
#    print(pred)
Beispiel #13
0
    X = np.hstack([X, np.ones((X.shape[0], 1))])
    y = yeast.target.toarray().astype(np.int).T

    X_train, X_test = X[:1500], X[1500:]
    y_train, y_test = y[:1500], y[1500:]

else:
    scene = load_scene()
    X_train, X_test = scene['X_train'], scene['X_test']
    y_train, y_test = scene['y_train'], scene['y_test']

n_labels = y_train.shape[1]
full = np.vstack([x for x in itertools.combinations(range(n_labels), 2)])
tree = chow_liu_tree(y_train)

full_model = MultiLabelClf(edges=full, inference_method='qpbo')
independent_model = MultiLabelClf(inference_method='unary')
tree_model = MultiLabelClf(edges=tree, inference_method="max-product")

full_ssvm = OneSlackSSVM(full_model, inference_cache=50, C=.1, tol=0.01)

tree_ssvm = OneSlackSSVM(tree_model, inference_cache=50, C=.1, tol=0.01)

independent_ssvm = OneSlackSSVM(independent_model, C=.1, tol=0.01)

print("fitting independent model...")
independent_ssvm.fit(X_train, y_train)
print("fitting full model...")
full_ssvm.fit(X_train, y_train)
print("fitting tree model...")
tree_ssvm.fit(X_train, y_train)
Beispiel #14
0
def Strukturni(x_train, y_train, x_test, y_test):

    import itertools
    import time

    import numpy as np
    from scipy import sparse

    from sklearn.metrics import hamming_loss
    from sklearn.metrics import accuracy_score
    from sklearn.metrics import mutual_info_score
    from scipy.sparse.csgraph import minimum_spanning_tree

    from pystruct.learners import OneSlackSSVM
    from pystruct.models import MultiLabelClf
    #    from pystruct.models import GraphCRF
    from sklearn.neural_network import MLPClassifier
    from sklearn.tree import DecisionTreeClassifier

    x_train = x_train.values
    y_train = y_train.values
    y_test = y_test.values
    x_test = x_test.values
    """ CRF chain """
    """ SSVM, MLP - pystruct """
    """CREATE DATASET FOR GNN """
    def chow_liu_tree(y_):
        n_labels = y_.shape[1]
        mi = np.zeros((n_labels, n_labels))
        for i in range(n_labels):
            for j in range(n_labels):
                mi[i, j] = mutual_info_score(y_[:, i], y_[:, j])
        mst = minimum_spanning_tree(sparse.csr_matrix(-mi))
        edges = np.vstack(mst.nonzero()).T
        edges.sort(axis=1)
        return edges

    n_labels = y_train.shape[1]
    full = np.vstack([x for x in itertools.combinations(range(n_labels), 2)])
    tree = chow_liu_tree(y_train)
    """ Define models """
    full_model = MultiLabelClf(edges=full)
    independent_model = MultiLabelClf()
    tree_model = MultiLabelClf(edges=tree, inference_method='max-product')
    """ Define learn algorithm """
    full_ssvm = OneSlackSSVM(full_model,
                             inference_cache=50,
                             C=.1,
                             tol=0.01,
                             max_iter=150)
    tree_ssvm = OneSlackSSVM(tree_model,
                             inference_cache=50,
                             C=.1,
                             tol=0.01,
                             max_iter=150)
    independent_ssvm = OneSlackSSVM(independent_model,
                                    C=.1,
                                    tol=0.01,
                                    max_iter=150)

    MLP = MLPClassifier()
    DT = DecisionTreeClassifier()
    """ Fit models """

    time_ST = np.zeros(5)

    start_time = time.time()
    DT.fit(x_train, y_train)
    y_DT = DT.predict(x_test)
    time_ST[4] = time.time() - start_time

    start_time = time.time()
    MLP.fit(x_train, y_train)
    y_MLP = MLP.predict(x_test)
    time_ST[3] = time.time() - start_time

    start_time = time.time()
    independent_ssvm.fit(x_train, y_train)
    y_ind = independent_ssvm.predict(x_test)
    time_ST[0] = time.time() - start_time

    start_time = time.time()
    full_ssvm.fit(x_train, y_train)
    y_full = full_ssvm.predict(x_test)
    time_ST[1] = time.time() - start_time

    start_time = time.time()
    tree_ssvm.fit(x_train, y_train)
    y_tree = tree_ssvm.predict(x_test)
    time_ST[2] = time.time() - start_time
    """ EVALUATE models """
    HL = np.zeros(5)
    ACC = np.zeros(5)

    y_full = np.asarray(y_full)
    y_ind = np.asarray(y_ind)
    y_tree = np.asarray(y_tree)

    HL[0] = hamming_loss(y_test, y_ind)
    HL[1] = hamming_loss(y_test, y_full)
    HL[2] = hamming_loss(y_test, y_tree)
    HL[3] = hamming_loss(y_test, y_MLP)
    HL[4] = hamming_loss(y_test, y_DT)

    y_ind = y_ind.reshape([y_ind.shape[0] * y_ind.shape[1]])
    y_full = y_full.reshape([y_full.shape[0] * y_full.shape[1]])
    y_tree = y_tree.reshape([y_tree.shape[0] * y_tree.shape[1]])
    y_MLP = y_MLP.reshape([y_MLP.shape[0] * y_MLP.shape[1]])
    y_DT = y_DT.reshape([y_DT.shape[0] * y_DT.shape[1]])
    y_test = y_test.reshape([y_test.shape[0] * y_test.shape[1]])

    ACC[0] = accuracy_score(y_test, y_ind)
    ACC[1] = accuracy_score(y_test, y_full)
    ACC[2] = accuracy_score(y_test, y_tree)
    ACC[3] = accuracy_score(y_test, y_MLP)
    ACC[4] = accuracy_score(y_test, y_DT)

    return ACC, HL, time_ST
Beispiel #15
0
    yeast = fetch_mldata("yeast")

    X = yeast.data
    X = np.hstack([X, np.ones((X.shape[0], 1))])
    y = yeast.target.toarray().astype(np.int).T

    X_train, X_test = X[:1500], X[1500:]
    y_train, y_test = y[:1500], y[1500:]

else:
    scene = load_scene()
    X_train, X_test = scene['X_train'], scene['X_test']
    y_train, y_test = scene['y_train'], scene['y_test']

n_labels = y_train.shape[1]
full = np.vstack([x for x in itertools.combinations(range(n_labels), 2)])
tree = chow_liu_tree(y_train)

#tree_model = MultiLabelClf(edges=tree, inference_method=('ogm', {'alg': 'dyn'}))
tree_model = MultiLabelClf(edges=tree, inference_method='max-product')

tree_ssvm = OneSlackSSVM(tree_model, inference_cache=50, C=.1, tol=0.01)

print("fitting tree model...")
tree_ssvm.fit(X_train, y_train)

print("Training loss tree model: %f" %
      hamming_loss(y_train, np.vstack(tree_ssvm.predict(X_train))))
print("Test loss tree model: %f" %
      hamming_loss(y_test, np.vstack(tree_ssvm.predict(X_test))))
Beispiel #16
0
            if (gt == est).all():
                exact += 1
    return exact / (datalen)


vfnumpypath = "../vflabelnumpy/"

Xval = np.loadtxt(vfnumpypath + "Xval.txt")
Yval = np.loadtxt(vfnumpypath + "Yval.txt", dtype="int")
print("val end export")
Xtrain = np.loadtxt(vfnumpypath + "Xtrain.txt")
Ytrain = np.loadtxt(vfnumpypath + "Ytrain.txt", dtype="int")
print("train end export")

#independent Model
independent_model = MultiLabelClf(inference_method='unary')
independent_ssvm = OneSlackSSVM(independent_model, C=.1, tol=0.01)

print("fitting independent model...")
independent_ssvm.fit(Xtrain, Ytrain)

#print np.vstack(independent_ssvm.predict(Xval))[1,:]

print("Test exact matching ratio: %f" % check_exactmatchratio(
    Yval, np.vstack(independent_ssvm.predict(Xval)), datatotest))

print(
    f1_score(Yval[3, :],
             np.vstack(independent_ssvm.predict(Xtrain))[3, :],
             average='macro'))
'''
Beispiel #17
0
    X = np.hstack([X, np.ones((X.shape[0], 1))])
    y = yeast.target.toarray().astype(np.int).T

    X_train, X_test = X[:1500], X[1500:]
    y_train, y_test = y[:1500], y[1500:]

else:
    scene = load_scene()
    X_train, X_test = scene['X_train'], scene['X_test']
    y_train, y_test = scene['y_train'], scene['y_test']

n_labels = y_train.shape[1]
full = np.vstack([x for x in itertools.combinations(range(n_labels), 2)])
tree = chow_liu_tree(y_train)

full_model = MultiLabelClf(edges=full, inference_method=None)
independent_model = MultiLabelClf(inference_method='unary')
tree_model = MultiLabelClf(edges=tree)

full_ssvm = OneSlackSSVM(full_model, inference_cache=50, C=.1, tol=0.01)

tree_ssvm = OneSlackSSVM(tree_model, inference_cache=50, C=.1, tol=0.01)

independent_ssvm = OneSlackSSVM(independent_model, C=.1, tol=0.01)

print("fitting independent model...")
independent_ssvm.fit(X_train, y_train)
print("fitting full model...")
full_ssvm.fit(X_train, y_train)
print("fitting tree model...")
tree_ssvm.fit(X_train, y_train)