コード例 #1
0
def load_data_global_probs(dataset="train", latent=False):
    def padded_vstack(blub):
        a, b = blub
        if a.shape[0] > b.shape[0]:
            b = np.hstack([b, np.zeros((b.shape[0], a.shape[1] - b.shape[1]))])
        return np.vstack([a, b])

    data = load_data(dataset=dataset, which="piecewise")
    data = add_kraehenbuehl_features(data, which="train_30px")
    data = add_kraehenbuehl_features(data, which="train")
    data = add_edges(data)
    if latent:
        data = add_top_node(data)
    descs = np.load("/home/user/amueller/checkout/superpixel_crf/"
                    "global_probs_%s.npy" % dataset)
    X = []
    for x, glob_desc in zip(data.X, descs):
        if latent:
            x_ = padded_vstack([x[0],
                                np.repeat(sigm(glob_desc)[np.newaxis, 1:],
                                          x[2], axis=0)])
        else:
            x_ = np.hstack([x[0], np.repeat(sigm(glob_desc)[np.newaxis, :],
                                            x[0].shape[0], axis=0)])
            # add features for latent node
        if len(x) == 3:
            X.append((x_, x[1], x[2]))
        else:
            X.append((x_, x[1]))

    return DataBunch(X, data.Y, data.file_names, data.superpixels)
コード例 #2
0
def with_aureliens_potentials_svm(test=False):
    data = load_data('train', independent=True)
    data = add_kraehenbuehl_features(data)
    features = [x[0] for x in data.X]
    y = np.hstack(data.Y)

    if test:
        data_ = load_data('val', independent=True)
        data_ = add_kraehenbuehl_features(data_)
        features.extend([x[0] for x in data.X])
        y = np.hstack([y, np.hstack(data_.Y)])

    new_features_flat = np.vstack(features)
    from sklearn.svm import LinearSVC
    print("training svm")
    svm = LinearSVC(C=.001, dual=False, class_weight='auto')
    svm.fit(new_features_flat[y != 21], y[y != 21])
    print(svm.score(new_features_flat[y != 21], y[y != 21]))
    print("evaluating")
    eval_on_pixels(data, [svm.predict(x) for x in features])

    if test:
        print("test data")
        data_val = load_data('test', independent=True)
    else:
        data_val = load_data('val', independent=True)

    data_val = add_kraehenbuehl_features(data_val)
    features_val = [x[0] for x in data_val.X]
    eval_on_pixels(data_val, [svm.predict(x) for x in features_val])
コード例 #3
0
def with_aureliens_potentials_svm(test=False):
    data = load_data('train', independent=True)
    data = add_kraehenbuehl_features(data)
    features = [x[0] for x in data.X]
    y = np.hstack(data.Y)

    if test:
        data_ = load_data('val', independent=True)
        data_ = add_kraehenbuehl_features(data_)
        features.extend([x[0] for x in data.X])
        y = np.hstack([y, np.hstack(data_.Y)])

    new_features_flat = np.vstack(features)
    from sklearn.svm import LinearSVC
    print("training svm")
    svm = LinearSVC(C=.001, dual=False, class_weight='auto')
    svm.fit(new_features_flat[y != 21], y[y != 21])
    print(svm.score(new_features_flat[y != 21], y[y != 21]))
    print("evaluating")
    eval_on_pixels(data, [svm.predict(x) for x in features])

    if test:
        print("test data")
        data_val = load_data('test', independent=True)
    else:
        data_val = load_data('val', independent=True)

    data_val = add_kraehenbuehl_features(data_val)
    features_val = [x[0] for x in data_val.X]
    eval_on_pixels(data_val, [svm.predict(x) for x in features_val])
コード例 #4
0
ファイル: msrc_svm.py プロジェクト: scutan90/segmentation-1
def train_svm(test=False, C=0.01, gamma=.1, grid=False):
    which = "piecewise"

    data_train = load_data(which=which)
    data_train = add_kraehenbuehl_features(data_train, which="train_30px")
    data_train = add_kraehenbuehl_features(data_train, which="train")
    data_train_novoid = discard_void(data_train, 21)
    if grid and test:
        raise ValueError("Don't you dare grid-search on the test-set!")

    svm = LinearSVC(C=C, class_weight='auto', multi_class='crammer_singer',
                    dual=False)
    #svm = LogisticRegression(C=C, class_weight='auto')
    data_val = load_data('val', which=which)
    data_val = add_kraehenbuehl_features(data_val, which="train_30px")
    data_val = add_kraehenbuehl_features(data_val, which="train")
    data_val_novoid = discard_void(data_val, 21)

    if grid:
        n_samples_train = len(np.hstack(data_train_novoid.Y))
        n_samples_val = len(np.hstack(data_val_novoid.Y))
        cv = SimpleSplitCV(n_samples_train, n_samples_val)
        data_trainval = concatenate_datasets(data_train_novoid,
                                             data_val_novoid)

        from sklearn.grid_search import GridSearchCV
        #from sklearn.grid_search import RandomizedSearchCV
        #from scipy.stats import expon, gamma
        #param_grid = {'C': 10. ** np.arange(1, 4), 'gamma': 10. **
                      #np.arange(-3, 1)}
        param_grid = {'C': 10. ** np.arange(-6, 2)}
        scorer = PixelwiseScorer(data=data_val)
        grid = GridSearchCV(svm, param_grid=param_grid, verbose=10, n_jobs=-1,
                            cv=cv, scoring=scorer, refit=False)
        grid.fit(np.vstack(data_trainval.X),
                 np.hstack(data_trainval.Y))
        print(grid.best_params_)
        print(grid.best_score_)
    else:
        print(svm)
        if test:
            data_train_novoid = concatenate_datasets(data_train_novoid,
                                                     data_val_novoid)

        print(np.vstack(data_train_novoid.X).shape)
        svm.fit(np.vstack(data_train_novoid.X), np.hstack(data_train_novoid.Y))
        if test:
            data_test = load_data("test", which=which)
        else:
            data_test = load_data("val", which=which)
        data_test = add_kraehenbuehl_features(data_test, which="train_30px")
        data_test = add_kraehenbuehl_features(data_test, which="train")
        scorer = PixelwiseScorer(data=data_test)
        scorer(svm, None, None)

    return svm
コード例 #5
0
def load_data_global_probs(dataset="train", latent=False):
    def padded_vstack(blub):
        a, b = blub
        if a.shape[0] > b.shape[0]:
            b = np.hstack([b, np.zeros((b.shape[0], a.shape[1] - b.shape[1]))])
        return np.vstack([a, b])

    data = load_data(dataset=dataset, which="piecewise")
    data = add_kraehenbuehl_features(data, which="train_30px")
    data = add_kraehenbuehl_features(data, which="train")
    data = add_edges(data)
    if latent:
        data = add_top_node(data)
    descs = np.load("/home/user/amueller/checkout/superpixel_crf/"
                    "global_probs_%s.npy" % dataset)
    X = []
    for x, glob_desc in zip(data.X, descs):
        if latent:
            x_ = padded_vstack([
                x[0],
                np.repeat(sigm(glob_desc)[np.newaxis, 1:], x[2], axis=0)
            ])
        else:
            x_ = np.hstack([
                x[0],
                np.repeat(sigm(glob_desc)[np.newaxis, :],
                          x[0].shape[0],
                          axis=0)
            ])
            # add features for latent node
        if len(x) == 3:
            X.append((x_, x[1], x[2]))
        else:
            X.append((x_, x[1]))

    return DataBunch(X, data.Y, data.file_names, data.superpixels)
コード例 #6
0
ファイル: msrc_crf.py プロジェクト: scutan90/segmentation-1
def main(C=1, test=False):
    # load training data
    #independent = True
    independent = False
    data_train = load_data(which="piecewise")
    data_train = add_edges(data_train,
                           independent=independent,
                           fully_connected=True)
    data_train = add_kraehenbuehl_features(data_train, which="train_30px")
    data_train = add_kraehenbuehl_features(data_train, which="train")

    #data_train = load_data_global_probs()

    if not independent:
        data_train = add_edge_features(data_train)

    data_train = discard_void(data_train, 21)

    if test:
        data_val = load_data("val", which="piecewise_train")
        data_val = add_edges(data_val, independent=independent)
        data_val = add_kraehenbuehl_features(data_val, which="train_30px")
        data_val = add_kraehenbuehl_features(data_val, which="train")
        data_val = add_edge_features(data_val)
        data_val = discard_void(data_val, 21)
        data_train = concatenate_datasets(data_train, data_val)

        #X_.extend(data_val.X)
        #Y_.extend(data_val.Y)

    n_states = 21
    print("number of samples: %s" % len(data_train.X))
    class_weights = 1. / np.bincount(np.hstack(data_train.Y))
    #class_weights[21] = 0
    class_weights *= 21. / np.sum(class_weights)
    #class_weights = np.ones(n_states)
    print(class_weights)
    #model = crfs.GraphCRF(n_states=n_states,
    #n_features=data_train.X[0][0].shape[1],
    #inference_method='qpbo', class_weight=class_weights)
    model = crfs.EdgeFeatureGraphCRF(n_states=n_states,
                                     n_features=data_train.X[0][0].shape[1],
                                     inference_method='qpbo',
                                     class_weight=class_weights,
                                     n_edge_features=3,
                                     symmetric_edge_features=[0, 1],
                                     antisymmetric_edge_features=[2])
    experiment_name = "fully_connected_%f" % C
    #warm_start = True
    warm_start = False
    ssvm = learners.OneSlackSSVM(model,
                                 verbose=2,
                                 C=C,
                                 max_iter=100000,
                                 n_jobs=-1,
                                 tol=0.0001,
                                 show_loss_every=50,
                                 inference_cache=50,
                                 cache_tol='auto',
                                 logger=SaveLogger(experiment_name + ".pickle",
                                                   save_every=100),
                                 inactive_threshold=1e-5,
                                 break_on_bad=False,
                                 inactive_window=50,
                                 switch_to_ad3=False)
    #ssvm = learners.SubgradientSSVM(
    #model, verbose=3, C=C, max_iter=10000, n_jobs=-1, show_loss_every=10,
    #logger=SaveLogger(experiment_name + ".pickle", save_every=10),
    #momentum=0, learning_rate=0.001, decay_exponent=1)

    if warm_start:
        ssvm = SaveLogger(experiment_name + ".pickle").load()
        ssvm.logger = SaveLogger(file_name=experiment_name + "_refit.pickle",
                                 save_every=10)
        ssvm.learning_rate = 0.000001
        #ssvm.model.inference_method = 'ad3'
        #ssvm.n_jobs = 1

    ssvm.fit(data_train.X, data_train.Y, warm_start=warm_start)
    print("fit finished!")
    return
コード例 #7
0
def svm_on_segments(C=.1, learning_rate=.001, subgradient=True):
    # load and prepare data
    lateral = True
    latent = True
    test = False
    #data_train = load_data(which="piecewise")
    #data_train = add_edges(data_train, independent=False)
    #data_train = add_kraehenbuehl_features(data_train, which="train_30px")
    #data_train = add_kraehenbuehl_features(data_train, which="train")
    #if lateral:
        #data_train = add_edge_features(data_train)
    data_train = load_data_global_probs(latent=latent)
    X_org_ = data_train.X
    #data_train = make_hierarchical_data(data_train, lateral=lateral,
                                        #latent=latent, latent_lateral=True)
    data_train = discard_void(data_train, 21, latent_features=True)
    X_, Y_ = data_train.X, data_train.Y
    # remove edges
    if not lateral:
        X_org_ = [(x[0], np.zeros((0, 2), dtype=np.int)) for x in X_org_]

    if test:
        data_val = load_data('val', which="piecewise")
        data_val = add_edges(data_val, independent=False)
        data_val = add_kraehenbuehl_features(data_val)
        data_val = make_hierarchical_data(data_val, lateral=lateral,
                                          latent=latent)
        data_val = discard_void(data_val, 21)

        X_.extend(data_val.X)
        Y_.extend(data_val.Y)

    n_states = 21
    class_weights = 1. / np.bincount(np.hstack(Y_))
    class_weights *= 21. / np.sum(class_weights)
    experiment_name = ("latent5_features_C%f_top_node" % C)
    logger = SaveLogger(experiment_name + ".pickle", save_every=10)
    if latent:
        model = LatentNodeCRF(n_labels=n_states,
                              n_features=data_train.X[0][0].shape[1],
                              n_hidden_states=5, inference_method='qpbo' if
                              lateral else 'dai', class_weight=class_weights,
                              latent_node_features=True)
        if subgradient:
            ssvm = learners.LatentSubgradientSSVM(
                model, C=C, verbose=1, show_loss_every=10, logger=logger,
                n_jobs=-1, learning_rate=learning_rate, decay_exponent=1,
                momentum=0., max_iter=100000)
        else:
            latent_logger = SaveLogger("lssvm_" + experiment_name +
                                       "_%d.pickle", save_every=1)
            base_ssvm = learners.OneSlackSSVM(
                model, verbose=2, C=C, max_iter=100000, n_jobs=-1, tol=0.001,
                show_loss_every=200, inference_cache=50, logger=logger,
                cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False,
                switch_to_ad3=True)
            ssvm = learners.LatentSSVM(base_ssvm, logger=latent_logger)
        warm_start = False
        if warm_start:
            ssvm = logger.load()
            ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle",
                                     save_every=10)
            ssvm.max_iter = 100000
            ssvm.learning_rate = 0.00001
            ssvm.momentum = 0
    else:
        #model = GraphCRF(n_states=n_states,
                         #n_features=data_train.X[0][0].shape[1],
                         #inference_method='qpbo' if lateral else 'dai',
                         #class_weight=class_weights)
        model = EdgeFeatureGraphCRF(n_states=n_states,
                                    n_features=data_train.X[0][0].shape[1],
                                    inference_method='qpbo' if lateral else
                                    'dai', class_weight=class_weights,
                                    n_edge_features=4,
                                    symmetric_edge_features=[0, 1],
                                    antisymmetric_edge_features=[2])
        ssvm = learners.OneSlackSSVM(
            model, verbose=2, C=C, max_iter=100000, n_jobs=-1,
            tol=0.0001, show_loss_every=200, inference_cache=50, logger=logger,
            cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False)

    #ssvm = logger.load()

    X_, Y_ = shuffle(X_, Y_)
    #ssvm.fit(data_train.X, data_train.Y)
    #ssvm.fit(X_, Y_, warm_start=warm_start)
    ssvm.fit(X_, Y_)
    print("fit finished!")
コード例 #8
0
ファイル: msrc_crf.py プロジェクト: amueller/segmentation
def main(C=1, test=False):
    # load training data
    #independent = True
    independent = False
    data_train = load_data(which="piecewise")
    data_train = add_edges(data_train, independent=independent,
                           fully_connected=True)
    data_train = add_kraehenbuehl_features(data_train, which="train_30px")
    data_train = add_kraehenbuehl_features(data_train, which="train")

    #data_train = load_data_global_probs()

    if not independent:
        data_train = add_edge_features(data_train)

    data_train = discard_void(data_train, 21)

    if test:
        data_val = load_data("val", which="piecewise_train")
        data_val = add_edges(data_val, independent=independent)
        data_val = add_kraehenbuehl_features(data_val, which="train_30px")
        data_val = add_kraehenbuehl_features(data_val, which="train")
        data_val = add_edge_features(data_val)
        data_val = discard_void(data_val, 21)
        data_train = concatenate_datasets(data_train, data_val)

        #X_.extend(data_val.X)
        #Y_.extend(data_val.Y)

    n_states = 21
    print("number of samples: %s" % len(data_train.X))
    class_weights = 1. / np.bincount(np.hstack(data_train.Y))
    #class_weights[21] = 0
    class_weights *= 21. / np.sum(class_weights)
    #class_weights = np.ones(n_states)
    print(class_weights)
    #model = crfs.GraphCRF(n_states=n_states,
                          #n_features=data_train.X[0][0].shape[1],
                          #inference_method='qpbo', class_weight=class_weights)
    model = crfs.EdgeFeatureGraphCRF(n_states=n_states,
                                     n_features=data_train.X[0][0].shape[1],
                                     inference_method='qpbo',
                                     class_weight=class_weights,
                                     n_edge_features=3,
                                     symmetric_edge_features=[0, 1],
                                     antisymmetric_edge_features=[2])
    experiment_name = "fully_connected_%f" % C
    #warm_start = True
    warm_start = False
    ssvm = learners.OneSlackSSVM(
        model, verbose=2, C=C, max_iter=100000, n_jobs=-1,
        tol=0.0001, show_loss_every=50, inference_cache=50, cache_tol='auto',
        logger=SaveLogger(experiment_name + ".pickle", save_every=100),
        inactive_threshold=1e-5, break_on_bad=False, inactive_window=50,
        switch_to_ad3=False)
    #ssvm = learners.SubgradientSSVM(
        #model, verbose=3, C=C, max_iter=10000, n_jobs=-1, show_loss_every=10,
        #logger=SaveLogger(experiment_name + ".pickle", save_every=10),
        #momentum=0, learning_rate=0.001, decay_exponent=1)

    if warm_start:
        ssvm = SaveLogger(experiment_name + ".pickle").load()
        ssvm.logger = SaveLogger(
            file_name=experiment_name + "_refit.pickle",
            save_every=10)
        ssvm.learning_rate = 0.000001
        #ssvm.model.inference_method = 'ad3'
        #ssvm.n_jobs = 1

    ssvm.fit(data_train.X, data_train.Y, warm_start=warm_start)
    print("fit finished!")
    return
コード例 #9
0
def svm_on_segments(C=.1, learning_rate=.001, subgradient=True):
    # load and prepare data
    lateral = True
    latent = True
    test = False
    #data_train = load_data(which="piecewise")
    #data_train = add_edges(data_train, independent=False)
    #data_train = add_kraehenbuehl_features(data_train, which="train_30px")
    #data_train = add_kraehenbuehl_features(data_train, which="train")
    #if lateral:
    #data_train = add_edge_features(data_train)
    data_train = load_data_global_probs(latent=latent)
    X_org_ = data_train.X
    #data_train = make_hierarchical_data(data_train, lateral=lateral,
    #latent=latent, latent_lateral=True)
    data_train = discard_void(data_train, 21, latent_features=True)
    X_, Y_ = data_train.X, data_train.Y
    # remove edges
    if not lateral:
        X_org_ = [(x[0], np.zeros((0, 2), dtype=np.int)) for x in X_org_]

    if test:
        data_val = load_data('val', which="piecewise")
        data_val = add_edges(data_val, independent=False)
        data_val = add_kraehenbuehl_features(data_val)
        data_val = make_hierarchical_data(data_val,
                                          lateral=lateral,
                                          latent=latent)
        data_val = discard_void(data_val, 21)

        X_.extend(data_val.X)
        Y_.extend(data_val.Y)

    n_states = 21
    class_weights = 1. / np.bincount(np.hstack(Y_))
    class_weights *= 21. / np.sum(class_weights)
    experiment_name = ("latent5_features_C%f_top_node" % C)
    logger = SaveLogger(experiment_name + ".pickle", save_every=10)
    if latent:
        model = LatentNodeCRF(n_labels=n_states,
                              n_features=data_train.X[0][0].shape[1],
                              n_hidden_states=5,
                              inference_method='qpbo' if lateral else 'dai',
                              class_weight=class_weights,
                              latent_node_features=True)
        if subgradient:
            ssvm = learners.LatentSubgradientSSVM(model,
                                                  C=C,
                                                  verbose=1,
                                                  show_loss_every=10,
                                                  logger=logger,
                                                  n_jobs=-1,
                                                  learning_rate=learning_rate,
                                                  decay_exponent=1,
                                                  momentum=0.,
                                                  max_iter=100000)
        else:
            latent_logger = SaveLogger("lssvm_" + experiment_name +
                                       "_%d.pickle",
                                       save_every=1)
            base_ssvm = learners.OneSlackSSVM(model,
                                              verbose=2,
                                              C=C,
                                              max_iter=100000,
                                              n_jobs=-1,
                                              tol=0.001,
                                              show_loss_every=200,
                                              inference_cache=50,
                                              logger=logger,
                                              cache_tol='auto',
                                              inactive_threshold=1e-5,
                                              break_on_bad=False,
                                              switch_to_ad3=True)
            ssvm = learners.LatentSSVM(base_ssvm, logger=latent_logger)
        warm_start = False
        if warm_start:
            ssvm = logger.load()
            ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle",
                                     save_every=10)
            ssvm.max_iter = 100000
            ssvm.learning_rate = 0.00001
            ssvm.momentum = 0
    else:
        #model = GraphCRF(n_states=n_states,
        #n_features=data_train.X[0][0].shape[1],
        #inference_method='qpbo' if lateral else 'dai',
        #class_weight=class_weights)
        model = EdgeFeatureGraphCRF(
            n_states=n_states,
            n_features=data_train.X[0][0].shape[1],
            inference_method='qpbo' if lateral else 'dai',
            class_weight=class_weights,
            n_edge_features=4,
            symmetric_edge_features=[0, 1],
            antisymmetric_edge_features=[2])
        ssvm = learners.OneSlackSSVM(model,
                                     verbose=2,
                                     C=C,
                                     max_iter=100000,
                                     n_jobs=-1,
                                     tol=0.0001,
                                     show_loss_every=200,
                                     inference_cache=50,
                                     logger=logger,
                                     cache_tol='auto',
                                     inactive_threshold=1e-5,
                                     break_on_bad=False)

    #ssvm = logger.load()

    X_, Y_ = shuffle(X_, Y_)
    #ssvm.fit(data_train.X, data_train.Y)
    #ssvm.fit(X_, Y_, warm_start=warm_start)
    ssvm.fit(X_, Y_)
    print("fit finished!")