Ejemplo n.º 1
0
def visualize_pascal(plot_probabilities=False):
    data = load_pascal('val')
    ds = PascalSegmentation()
    for x, y, f, sps in zip(data.X, data.Y, data.file_names, data.superpixels):
        fig, ax = plt.subplots(2, 3)
        ax = ax.ravel()
        image = ds.get_image(f)
        y_pixel = ds.get_ground_truth(f)
        x_raw = load_kraehenbuehl(f)

        boundary_image = mark_boundaries(image, sps)

        ax[0].imshow(image)
        ax[1].imshow(y_pixel, cmap=ds.cmap)
        ax[2].imshow(boundary_image)
        ax[3].imshow(np.argmax(x_raw, axis=-1), cmap=ds.cmap, vmin=0, vmax=256)
        ax[4].imshow(y[sps], cmap=ds.cmap, vmin=0, vmax=256)
        ax[5].imshow(np.argmax(x, axis=-1)[sps], cmap=ds.cmap, vmin=0,
                     vmax=256)
        for a in ax:
            a.set_xticks(())
            a.set_yticks(())
        plt.savefig("figures_pascal_val/%s.png" % f, bbox_inches='tight')
        plt.close()
        if plot_probabilities:
            fig, ax = plt.subplots(3, 7)
            for k in range(21):
                ax.ravel()[k].matshow(x[:, :, k], vmin=0, vmax=1)
            for a in ax.ravel():
                a.set_xticks(())
                a.set_yticks(())
            plt.savefig("figures_pascal_val/%s_prob.png" % f,
                        bbox_inches='tight')
            plt.close()
    tracer()
Ejemplo n.º 2
0
def train_svm(C=0.1, grid=False):
    pascal = PascalSegmentation()

    files_train = pascal.get_split("kTrain")
    superpixels = [slic_n(pascal.get_image(f), n_superpixels=100,
                          compactness=10)
                   for f in files_train]
    bow = SiftBOW(pascal, n_words=1000, color_sift=True)
    data_train = bow.fit_transform(files_train, superpixels)

    data_train = add_global_descriptor(data_train)

    svm = LinearSVC(C=C, dual=False, class_weight='auto')
    chi2 = AdditiveChi2Sampler()

    X, y = np.vstack(data_train.X), np.hstack(data_train.Y)
    X = chi2.fit_transform(X)
    svm.fit(X, y)
    print(svm.score(X, y))
    eval_on_sp(pascal, data_train, [svm.predict(chi2.transform(x)) for x in
                                    data_train.X], print_results=True)

    files_val = pascal.get_split("kVal")
    superpixels_val = [slic_n(pascal.get_image(f), n_superpixels=100,
                              compactness=10) for f in files_val]
    data_val = bow.transform(files_val, superpixels_val)
    data_val = add_global_descriptor(data_val)
    eval_on_sp(pascal, data_val, [svm.predict(chi2.transform(x)) for x in
                                  data_val.X], print_results=True)

    tracer()
Ejemplo n.º 3
0
def load_pascal(which='train', year="2010", sp_type="slic", n_jobs=-1):
    pascal = PascalSegmentation()
    files = pascal.get_split(which=which, year=year)
    results = Parallel(n_jobs=n_jobs)(delayed(load_pascal_single)(
        f, which=which, sp_type=sp_type, pascal=pascal) for f in files)
    X, Y, superpixels, segments = zip(*results)
    if sp_type == "slic":
        return DataBunch(X, Y, files, superpixels)
    else:
        return HierarchicalDataBunch(X, Y, files, superpixels, segments)
Ejemplo n.º 4
0
def test_remove_small_segments():
    pascal = PascalSegmentation()
    train_files = pascal.get_split()

    idx = 10
    image = pascal.get_image(train_files[idx])
    segments, superpixels = superpixels_segments(train_files[idx])
    new_regions, correspondences = merge_small_sp(image, superpixels)
    new_counts = np.bincount(new_regions.ravel())
    if np.any(new_counts < 50):
        raise ValueError("Stupid thing!")
def visualize_sps():
    pascal = PascalSegmentation()
    train_files = pascal.get_split()

    for image_file in train_files:
        print(image_file)
        image = pascal.get_image(image_file)
        segments, superpixels = superpixels_segments(image_file)
        new_regions, correspondences = merge_small_sp(image, superpixels)
        clean_regions = morphological_clean_sp(image, new_regions, 4)
        imsave("segment_sp_fixed/%s.png" % image_file,
               mark_boundaries(image, clean_regions))
Ejemplo n.º 6
0
def visualize_sps():
    pascal = PascalSegmentation()
    train_files = pascal.get_split()

    for image_file in train_files:
        print(image_file)
        image = pascal.get_image(image_file)
        segments, superpixels = superpixels_segments(image_file)
        new_regions, correspondences = merge_small_sp(image, superpixels)
        clean_regions = morphological_clean_sp(image, new_regions, 4)
        imsave("segment_sp_fixed/%s.png"
               % image_file, mark_boundaries(image, clean_regions))
Ejemplo n.º 7
0
def load_pascal_pixelwise(which='train', year="2010"):
    pascal = PascalSegmentation()
    if which not in ["train", "val"]:
        raise ValueError("Expected 'which' to be 'train' or 'val', got %s." %
                         which)
    split_file = pascal_path + "/ImageSets/Segmentation/%s.txt" % which
    files = np.loadtxt(split_file, dtype=np.str)
    files = [f for f in files if f.split("_")[0] <= year]
    X, Y = [], []
    for f in files:
        X.append(load_kraehenbuehl(f))
        Y.append(pascal.get_ground_truth(f))

    return DataBunchNoSP(X, Y, files)
def visualize_segments():
    pascal = PascalSegmentation()
    train_files = pascal.get_split()

    for image_file in train_files:
        print(image_file)
        image = pascal.get_image(image_file)
        segments, superpixels = superpixels_segments(image_file)
        new_regions, correspondences = merge_small_sp(image, superpixels)
        clean_regions = morphological_clean_sp(image, new_regions, 4)
        new_regions, correspondences = merge_small_sp(image, superpixels)
        clean_regions = morphological_clean_sp(image, new_regions, 4)
        marked = mark_boundaries(image, clean_regions)
        edges = create_segment_sp_graph(segments, clean_regions)
        edges = np.array(edges)
        n_segments = segments.shape[2]
        segment_centers = [
            regionprops(segments.astype(np.int)[:, :, i],
                        ['Centroid'])[0]['Centroid'] for i in range(n_segments)
        ]
        segment_centers = np.vstack(segment_centers)[:, ::-1]
        superpixel_centers = get_superpixel_centers(clean_regions)
        grr = min(n_segments, 10)
        fig, axes = plt.subplots(3, grr // 3, figsize=(30, 30))

        for i, ax in enumerate(axes.ravel()):
            ax.imshow(mark_boundaries(marked, segments[:, :, i], (1, 0, 0)))
            ax.scatter(segment_centers[:, 0],
                       segment_centers[:, 1],
                       color='red')
            ax.scatter(superpixel_centers[:, 0],
                       superpixel_centers[:, 1],
                       color='blue')
            this_edges = edges[edges[:, 1] == i]
            for edge in this_edges:
                ax.plot([
                    superpixel_centers[edge[0]][0], segment_centers[edge[1]][0]
                ], [
                    superpixel_centers[edge[0]][1], segment_centers[edge[1]][1]
                ],
                        c='black')
            ax.set_xlim(0, superpixels.shape[1])
            ax.set_ylim(superpixels.shape[0], 0)
            ax.set_xticks(())
            ax.set_yticks(())
        #plt.show()
        #imsave("segments_test/%s.png" % image_file, marked)
        plt.savefig("segments_test/%s.png" % image_file)
        plt.close()
Ejemplo n.º 9
0
def train_svm(C=0.1, grid=False):
    ds = PascalSegmentation()
    svm = LinearSVC(C=C, dual=False, class_weight='auto')

    if grid:
        data_train = load_pascal("kTrain")
        X, y = shuffle(data_train.X, data_train.Y)
        # prepare leave-one-label-out by assigning labels to images
        image_indicators = np.hstack([np.repeat(i, len(x)) for i, x in
                                      enumerate(X)])
        # go down to only 5 "folds"
        labels = image_indicators % 5
        X, y = np.vstack(X), np.hstack(y)

        cv = LeavePLabelOut(labels=labels, p=1)
        param_grid = {'C': 10. ** np.arange(-3, 3)}
        scorer = Scorer(recall_score, average="macro")
        grid_search = GridSearchCV(svm, param_grid=param_grid, cv=cv,
                                   verbose=10, scoring=scorer, n_jobs=-1)
        grid_search.fit(X, y)
    else:
        data_train = load_pascal("train")
        X, y = np.vstack(data_train.X), np.hstack(data_train.Y)
        svm.fit(X, y)
        print(svm.score(X, y))
        eval_on_sp(ds, data_train, [svm.predict(x) for x in data_train.X],
                   print_results=True)

        data_val = load_pascal("val")
        eval_on_sp(ds, data_val, [svm.predict(x) for x in data_val.X],
                   print_results=True)
Ejemplo n.º 10
0
def main():
    from pascal.pascal_helpers import load_pascal
    from datasets.pascal import PascalSegmentation
    from utils import add_edges
    from scipy.misc import imsave
    from skimage.segmentation import mark_boundaries

    ds = PascalSegmentation()
    data = load_pascal("train1")

    data = add_edges(data, independent=False)
    # X, Y, image_names, images, all_superpixels = load_data(
    # "train", independent=False)
    for x, name, sps in zip(data.X, data.file_names, data.superpixels):
        segments = get_km_segments(x, ds.get_image(name), sps, n_segments=25)
        boundary_image = mark_boundaries(mark_boundaries(ds.get_image(name), sps), segments[sps], color=[1, 0, 0])
        imsave("hierarchy_sp_own_25/%s.png" % name, boundary_image)
Ejemplo n.º 11
0
def eval_segment_best_possible():
    ds = PascalSegmentation()
    print("loading")
    data = load_pascal('train')
    print("getting edges")
    data = add_edges(data)
    print("computing segments")
    segments = [get_km_segments(x, ds.get_image(image_name), sps,
                                n_segments=25) for x, image_name, sps in
                zip(data.X, data.file_names, data.superpixels)]
    print("combining superpixels")
    segments = [seg[sp] for seg, sp in zip(segments, data.superpixels)]
    predictions = [gt_in_sp(ds, f, seg)[seg]
                   for seg, f in zip(segments, data.file_names)]
    Y_true = [ds.get_ground_truth(f) for f in data.file_names]
    hamming, jaccard = eval_on_pixels(ds, Y_true, predictions,
                                      print_results=True)
    tracer()
Ejemplo n.º 12
0
def visualize_segments():
    pascal = PascalSegmentation()
    train_files = pascal.get_split()

    for image_file in train_files:
        print(image_file)
        image = pascal.get_image(image_file)
        segments, superpixels = superpixels_segments(image_file)
        new_regions, correspondences = merge_small_sp(image, superpixels)
        clean_regions = morphological_clean_sp(image, new_regions, 4)
        new_regions, correspondences = merge_small_sp(image, superpixels)
        clean_regions = morphological_clean_sp(image, new_regions, 4)
        marked = mark_boundaries(image, clean_regions)
        edges = create_segment_sp_graph(segments, clean_regions)
        edges = np.array(edges)
        n_segments = segments.shape[2]
        segment_centers = [regionprops(segments.astype(np.int)[:, :, i],
                                       ['Centroid'])[0]['Centroid'] for i in
                           range(n_segments)]
        segment_centers = np.vstack(segment_centers)[:, ::-1]
        superpixel_centers = get_superpixel_centers(clean_regions)
        grr = min(n_segments, 10)
        fig, axes = plt.subplots(3, grr // 3, figsize=(30, 30))

        for i, ax in enumerate(axes.ravel()):
            ax.imshow(mark_boundaries(marked, segments[:, :, i], (1, 0, 0)))
            ax.scatter(segment_centers[:, 0], segment_centers[:, 1],
                       color='red')
            ax.scatter(superpixel_centers[:, 0], superpixel_centers[:, 1],
                       color='blue')
            this_edges = edges[edges[:, 1] == i]
            for edge in this_edges:
                ax.plot([superpixel_centers[edge[0]][0],
                         segment_centers[edge[1]][0]],
                        [superpixel_centers[edge[0]][1],
                         segment_centers[edge[1]][1]], c='black')
            ax.set_xlim(0, superpixels.shape[1])
            ax.set_ylim(superpixels.shape[0], 0)
            ax.set_xticks(())
            ax.set_yticks(())
        #plt.show()
        #imsave("segments_test/%s.png" % image_file, marked)
        plt.savefig("segments_test/%s.png" % image_file)
        plt.close()
Ejemplo n.º 13
0
def main():
    from pascal.pascal_helpers import load_pascal
    from datasets.pascal import PascalSegmentation
    from utils import add_edges
    from scipy.misc import imsave
    from skimage.segmentation import mark_boundaries

    ds = PascalSegmentation()
    data = load_pascal("train1")

    data = add_edges(data, independent=False)
    #X, Y, image_names, images, all_superpixels = load_data(
    #"train", independent=False)
    for x, name, sps in zip(data.X, data.file_names, data.superpixels):
        segments = get_km_segments(x, ds.get_image(name), sps, n_segments=25)
        boundary_image = mark_boundaries(mark_boundaries(
            ds.get_image(name), sps),
                                         segments[sps],
                                         color=[1, 0, 0])
        imsave("hierarchy_sp_own_25/%s.png" % name, boundary_image)
Ejemplo n.º 14
0
def main():
    ds = PascalSegmentation()
    # load training data
    edge_type = "pairwise"
    which = "kTrain"
    data_train = load_pascal(which=which, sp_type="cpmc")

    data_train = add_edges(data_train, edge_type)
    data_train = add_edge_features(ds, data_train)
    data_train = discard_void(ds, data_train, ds.void_label)

    X, Y = data_train.X, data_train.Y

    class_weights = 1. / np.bincount(np.hstack(Y))
    class_weights *= 21. / np.sum(class_weights)

    model = crfs.EdgeFeatureGraphCRF(class_weight=class_weights,
                                     symmetric_edge_features=[0, 1],
                                     antisymmetric_edge_features=[2],
                                     inference_method='qpbo')

    ssvm = learners.NSlackSSVM(model, C=0.01, n_jobs=-1)
    ssvm.fit(X, Y)
Ejemplo n.º 15
0
def train_svm(C=0.1, grid=False):
    pascal = PascalSegmentation()

    files_train = pascal.get_split("kTrain")
    superpixels = [
        slic_n(pascal.get_image(f), n_superpixels=100, compactness=10)
        for f in files_train
    ]
    bow = SiftBOW(pascal, n_words=1000, color_sift=True)
    data_train = bow.fit_transform(files_train, superpixels)

    data_train = add_global_descriptor(data_train)

    svm = LinearSVC(C=C, dual=False, class_weight='auto')
    chi2 = AdditiveChi2Sampler()

    X, y = np.vstack(data_train.X), np.hstack(data_train.Y)
    X = chi2.fit_transform(X)
    svm.fit(X, y)
    print(svm.score(X, y))
    eval_on_sp(pascal,
               data_train,
               [svm.predict(chi2.transform(x)) for x in data_train.X],
               print_results=True)

    files_val = pascal.get_split("kVal")
    superpixels_val = [
        slic_n(pascal.get_image(f), n_superpixels=100, compactness=10)
        for f in files_val
    ]
    data_val = bow.transform(files_val, superpixels_val)
    data_val = add_global_descriptor(data_val)
    eval_on_sp(pascal,
               data_val, [svm.predict(chi2.transform(x)) for x in data_val.X],
               print_results=True)

    tracer()
Ejemplo n.º 16
0
def main(C=1, test=False):
    ds = PascalSegmentation()
    # load training data
    edge_type = "pairwise"
    if test:
        which = "train"
    else:
        which = "kTrain"
    data_train = load_pascal(which=which, sp_type="cpmc")

    data_train = add_edges(data_train, edge_type)
    data_train = add_edge_features(ds, data_train)
    data_train = discard_void(ds, data_train, ds.void_label)

    print("number of samples: %s" % len(data_train.X))
    class_weights = 1. / np.bincount(np.hstack(data_train.Y))
    class_weights *= 21. / np.sum(class_weights)
    print(class_weights)
    #model = crfs.GraphCRF(n_states=n_states,
    #n_features=data_train.X[0][0].shape[1],
    #inference_method='qpbo', class_weight=class_weights)
    model = crfs.EdgeFeatureGraphCRF(inference_method='qpbo',
                                     class_weight=class_weights,
                                     symmetric_edge_features=[0, 1],
                                     antisymmetric_edge_features=[2])
    experiment_name = "cpmc_edge_features_trainval_new_%f" % C
    #warm_start = True
    warm_start = False
    ssvm = learners.OneSlackSSVM(model,
                                 verbose=2,
                                 C=C,
                                 max_iter=100000,
                                 n_jobs=-1,
                                 tol=0.0001,
                                 show_loss_every=50,
                                 inference_cache=50,
                                 cache_tol='auto',
                                 logger=SaveLogger(experiment_name + ".pickle",
                                                   save_every=100),
                                 inactive_threshold=1e-5,
                                 break_on_bad=False,
                                 inactive_window=50,
                                 switch_to=None)
    #ssvm = learners.SubgradientSSVM(
    #model, verbose=3, C=C, max_iter=10000, n_jobs=-1, show_loss_every=10,
    #logger=SaveLogger(experiment_name + ".pickle", save_every=10),
    #momentum=0, learning_rate=0.1, decay_exponent=1, decay_t0=100)

    if warm_start:
        ssvm = SaveLogger(experiment_name + ".pickle").load()
        ssvm.logger = SaveLogger(file_name=experiment_name + "_refit.pickle",
                                 save_every=10)
        #ssvm.learning_rate = 0.000001

        ssvm.model.inference_method = 'ad3bb'
        #ssvm.n_jobs = 1

    ssvm.fit(data_train.X, data_train.Y, warm_start=warm_start)
    return

    print("fit finished!")
    if test:
        data_val = load_pascal('val')
    else:
        data_val = load_pascal('kVal')

    data_val = add_edges(data_val, edge_type)
    data_val = add_edge_features(ds, data_val, more_colors=True)
    eval_on_sp(ds, data_val, ssvm.predict(data_val.X), print_results=True)
Ejemplo n.º 17
0
def main():
    argv = sys.argv
    print("loading %s ..." % argv[1])
    ssvm = SaveLogger(file_name=argv[1]).load()
    if hasattr(ssvm, 'problem'):
        ssvm.model = ssvm.problem
    print(ssvm)
    if hasattr(ssvm, 'base_ssvm'):
        ssvm = ssvm.base_ssvm
    print("Iterations: %d" % len(ssvm.objective_curve_))
    print("Objective: %f" % ssvm.objective_curve_[-1])
    inference_run = None
    if hasattr(ssvm, 'cached_constraint_'):
        inference_run = ~np.array(ssvm.cached_constraint_)
        print("Gap: %f" %
              (np.array(ssvm.primal_objective_curve_)[inference_run][-1] -
               ssvm.objective_curve_[-1]))

    if len(argv) <= 2:
        argv.append("acc")

    if len(argv) <= 3:
        dataset = 'nyu'
    else:
        dataset = argv[3]

    if argv[2] == 'acc':

        ssvm.n_jobs = 1

        for data_str, title in zip(["train", "val"],
                                   ["TRAINING SET", "VALIDATION SET"]):
            print(title)
            edge_type = "pairwise"

            if dataset == 'msrc':
                ds = MSRC21Dataset()
                data = msrc_helpers.load_data(data_str, which="piecewise_new")
                #data = add_kraehenbuehl_features(data, which="train_30px")
                data = msrc_helpers.add_kraehenbuehl_features(data, which="train")
            elif dataset == 'pascal':
                ds = PascalSegmentation()
                data = pascal_helpers.load_pascal(data_str, sp_type="cpmc")
                #data = pascal_helpers.load_pascal(data_str)
            elif dataset == 'nyu':
                ds = NYUSegmentation()
                data = nyu_helpers.load_nyu(data_str, n_sp=500, sp='rgbd')
            else:
                raise ValueError("Excepted dataset to be 'nyu', 'pascal' or 'msrc',"
                                 " got %s." % dataset)

            if type(ssvm.model).__name__ == "LatentNodeCRF":
                print("making data hierarchical")
                data = pascal_helpers.make_cpmc_hierarchy(ds, data)
                #data = make_hierarchical_data(
                    #ds, data, lateral=True, latent=True, latent_lateral=False,
                    #add_edge_features=False)
            else:
                data = add_edges(data, edge_type)

            if type(ssvm.model).__name__ == 'EdgeFeatureGraphCRF':
                data = add_edge_features(ds, data, depth_diff=True, normal_angles=True)

            if type(ssvm.model).__name__ == "EdgeFeatureLatentNodeCRF":
                data = add_edge_features(ds, data)
                data = make_hierarchical_data(
                    ds, data, lateral=True, latent=True, latent_lateral=False,
                    add_edge_features=True)
            #ssvm.model.inference_method = "qpbo"
            Y_pred = ssvm.predict(data.X)

            if isinstance(ssvm.model, LatentNodeCRF):
                Y_pred = [ssvm.model.label_from_latent(h) for h in Y_pred]
            Y_flat = np.hstack(data.Y)

            print("superpixel accuracy: %.2f"
                  % (np.mean((np.hstack(Y_pred) == Y_flat)[Y_flat != ds.void_label]) * 100))

            if dataset == 'msrc':
                res = msrc_helpers.eval_on_pixels(data, Y_pred,
                                                  print_results=True)
                print("global: %.2f, average: %.2f" % (res['global'] * 100,
                                                       res['average'] * 100))
                #msrc_helpers.plot_confusion_matrix(res['confusion'])
            else:
                hamming, jaccard = eval_on_sp(ds, data, Y_pred,
                                              print_results=True)
                print("Jaccard: %.2f, Hamming: %.2f" % (jaccard.mean(),
                                                        hamming.mean()))

        plt.show()

    elif argv[2] == 'plot':
        data_str = 'val'
        if len(argv) <= 4:
            raise ValueError("Need a folder name for plotting.")
        if dataset == "msrc":
            ds = MSRC21Dataset()
            data = msrc_helpers.load_data(data_str, which="piecewise")
            data = add_edges(data, independent=False)
            data = msrc_helpers.add_kraehenbuehl_features(
                data, which="train_30px")
            data = msrc_helpers.add_kraehenbuehl_features(
                data, which="train")

        elif dataset == "pascal":
            ds = PascalSegmentation()
            data = pascal_helpers.load_pascal("val")
            data = add_edges(data)

        elif dataset == "nyu":
            ds = NYUSegmentation()
            data = nyu_helpers.load_nyu("test")
            data = add_edges(data)

        if type(ssvm.model).__name__ == 'EdgeFeatureGraphCRF':
            data = add_edge_features(ds, data, depth_diff=True, normal_angles=True)
        Y_pred = ssvm.predict(data.X)

        plot_results(ds, data, Y_pred, argv[4])
Ejemplo n.º 18
0
def eval_spixel_best_possible():
    data = load_pascal('kTrain', sp_type='cpmc')
    pascal = PascalSegmentation()
    hamming, jaccard = eval_on_sp(pascal, data, data.Y, print_results=True)
Ejemplo n.º 19
0
def get_kraehenbuehl_pot_sp(filename, superpixels):
    probs = load_kraehenbuehl(filename)
    ds = PascalSegmentation()
    return probabilities_on_sp(ds, probs, superpixels)
Ejemplo n.º 20
0
def svm_on_segments(C=.1, learning_rate=.001, subgradient=False):
    data_file = "data_train_XY.pickle"
    ds = PascalSegmentation()
    if os.path.exists(data_file):
        X_, Y_ = cPickle.load(open(data_file))
    else:
        # load and prepare data
        data_train = load_pascal("train", sp_type="cpmc")
        data_train = make_cpmc_hierarchy(ds, data_train)
        data_train = discard_void(ds, data_train)
        X_, Y_ = data_train.X, data_train.Y
        cPickle.dump((X_, Y_), open(data_file, 'wb'), -1)

    class_weights = 1. / np.bincount(np.hstack(Y_))
    class_weights *= 21. / np.sum(class_weights)
    experiment_name = ("latent_25_cpmc_%f_qpbo_n_slack_blub3" % C)
    logger = SaveLogger(experiment_name + ".pickle", save_every=10)
    model = LatentNodeCRF(n_hidden_states=25,
                          inference_method='qpbo',
                          class_weight=class_weights,
                          latent_node_features=False)
    if subgradient:
        ssvm = learners.LatentSubgradientSSVM(model,
                                              C=C,
                                              verbose=1,
                                              show_loss_every=10,
                                              logger=logger,
                                              n_jobs=-1,
                                              learning_rate=learning_rate,
                                              decay_exponent=1,
                                              momentum=0.,
                                              max_iter=100000,
                                              decay_t0=100)
    else:
        latent_logger = SaveLogger("lssvm_" + experiment_name + "_%d.pickle",
                                   save_every=1)
        #base_ssvm = learners.OneSlackSSVM(
        #model, verbose=2, C=C, max_iter=100, n_jobs=-1, tol=0.001,
        #show_loss_every=200, inference_cache=50, logger=logger,
        #cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False,
        #switch_to=('ogm', {'alg': 'dd'}))
        base_ssvm = learners.NSlackSSVM(model,
                                        verbose=4,
                                        C=C,
                                        n_jobs=-1,
                                        tol=0.1,
                                        show_loss_every=20,
                                        logger=logger,
                                        inactive_threshold=1e-8,
                                        break_on_bad=False,
                                        batch_size=36,
                                        inactive_window=10,
                                        switch_to=('ad3', {
                                            'branch_and_bound': True
                                        }))
        ssvm = learners.LatentSSVM(base_ssvm,
                                   logger=latent_logger,
                                   latent_iter=3)
    #warm_start = True
    warm_start = False
    if warm_start:
        ssvm = logger.load()
        ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle",
                                 save_every=10)
        ssvm.max_iter = 10000
        ssvm.decay_exponent = 1
        #ssvm.decay_t0 = 1000
        #ssvm.learning_rate = 0.00001
        #ssvm.momentum = 0

    X_, Y_ = shuffle(X_, Y_)
    #ssvm.fit(data_train.X, data_train.Y)
    ssvm.fit(X_, Y_)
    #H_init = [np.hstack([y, np.random.randint(21, 26)]) for y in Y_]
    #ssvm.fit(X_, Y_, H_init=H_init)
    print("fit finished!")