Example #1
0
def main(C=1):
    dataset = NYUSegmentation()
    # load training data
    data_train = load_nyu(n_sp=500, sp='rgbd')
    data_train = add_edges(data_train)
    data_train = add_edge_features(dataset, data_train, depth_diff=True, normal_angles=True)

    data_train = discard_void(dataset, data_train)

    n_states = 4.
    print("number of samples: %s" % len(data_train.X))
    class_weights = 1. / np.bincount(np.hstack(data_train.Y))
    class_weights *= n_states / np.sum(class_weights)
    #class_weights = np.ones(n_states)
    print(class_weights)
    #model = crfs.GraphCRF(n_states=n_states,
                          #n_features=data_train.X[0][0].shape[1],
                          #inference_method='qpbo', class_weight=class_weights)
    model = crfs.EdgeFeatureGraphCRF(inference_method='qpbo',
                                     class_weight=class_weights,
                                     n_edge_features=5,
                                     symmetric_edge_features=[0, 1])
    experiment_name = "rgbd_test%f" % C
    ssvm = learners.OneSlackSSVM(
        model, verbose=2, C=C, max_iter=100000, n_jobs=-1,
        tol=0.001, show_loss_every=100, inference_cache=50, cache_tol='auto',
        logger=SaveLogger(experiment_name + ".pickle", save_every=100),
        inactive_threshold=1e-5, break_on_bad=False, inactive_window=50,
        switch_to=("ad3", {'branch_and_bound':True}))

    ssvm.fit(data_train.X, data_train.Y)
    print("fit finished!")
    return
def svm_on_segments(C=.1, learning_rate=.001, subgradient=False):
    data_file = "data_train_XY.pickle"
    ds = PascalSegmentation()
    if os.path.exists(data_file):
        X_, Y_ = cPickle.load(open(data_file))
    else:
        # load and prepare data
        data_train = load_pascal("train", sp_type="cpmc")
        data_train = make_cpmc_hierarchy(ds, data_train)
        data_train = discard_void(ds, data_train)
        X_, Y_ = data_train.X, data_train.Y
        cPickle.dump((X_, Y_), open(data_file, 'wb'), -1)



    class_weights = 1. / np.bincount(np.hstack(Y_))
    class_weights *= 21. / np.sum(class_weights)
    experiment_name = ("latent_25_cpmc_%f_qpbo_n_slack_blub3" % C)
    logger = SaveLogger(experiment_name + ".pickle", save_every=10)
    model = LatentNodeCRF(n_hidden_states=25,
                          inference_method='qpbo',
                          class_weight=class_weights,
                          latent_node_features=False)
    if subgradient:
        ssvm = learners.LatentSubgradientSSVM(
            model, C=C, verbose=1, show_loss_every=10, logger=logger,
            n_jobs=-1, learning_rate=learning_rate, decay_exponent=1,
            momentum=0., max_iter=100000, decay_t0=100)
    else:
        latent_logger = SaveLogger("lssvm_" + experiment_name +
                                   "_%d.pickle", save_every=1)
        #base_ssvm = learners.OneSlackSSVM(
            #model, verbose=2, C=C, max_iter=100, n_jobs=-1, tol=0.001,
            #show_loss_every=200, inference_cache=50, logger=logger,
            #cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False,
            #switch_to=('ogm', {'alg': 'dd'}))
        base_ssvm = learners.NSlackSSVM(
            model, verbose=4, C=C, n_jobs=-1, tol=0.1,
            show_loss_every=20, logger=logger, inactive_threshold=1e-8,
            break_on_bad=False, batch_size=36, inactive_window=10,
            switch_to=('ad3', {'branch_and_bound': True}))
        ssvm = learners.LatentSSVM(base_ssvm, logger=latent_logger,
                                   latent_iter=3)
    #warm_start = True
    warm_start = False
    if warm_start:
        ssvm = logger.load()
        ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle",
                                 save_every=10)
        ssvm.max_iter = 10000
        ssvm.decay_exponent = 1
        #ssvm.decay_t0 = 1000
        #ssvm.learning_rate = 0.00001
        #ssvm.momentum = 0

    X_, Y_ = shuffle(X_, Y_)
    #ssvm.fit(data_train.X, data_train.Y)
    ssvm.fit(X_, Y_)
    #H_init = [np.hstack([y, np.random.randint(21, 26)]) for y in Y_]
    #ssvm.fit(X_, Y_, H_init=H_init)
    print("fit finished!")
Example #3
0
def main(C=1, test=False):
    ds = PascalSegmentation()
    # load training data
    edge_type = "pairwise"
    if test:
        which = "train"
    else:
        which = "kTrain"
    data_train = load_pascal(which=which, sp_type="cpmc")

    data_train = add_edges(data_train, edge_type)
    data_train = add_edge_features(ds, data_train)
    data_train = discard_void(ds, data_train, ds.void_label)

    print("number of samples: %s" % len(data_train.X))
    class_weights = 1. / np.bincount(np.hstack(data_train.Y))
    class_weights *= 21. / np.sum(class_weights)
    print(class_weights)
    #model = crfs.GraphCRF(n_states=n_states,
                          #n_features=data_train.X[0][0].shape[1],
                          #inference_method='qpbo', class_weight=class_weights)
    model = crfs.EdgeFeatureGraphCRF(inference_method='qpbo',
                                     class_weight=class_weights,
                                     symmetric_edge_features=[0, 1],
                                     antisymmetric_edge_features=[2])
    experiment_name = "cpmc_edge_features_trainval_new_%f" % C
    #warm_start = True
    warm_start = False
    ssvm = learners.OneSlackSSVM(
        model, verbose=2, C=C, max_iter=100000, n_jobs=-1,
        tol=0.0001, show_loss_every=50, inference_cache=50, cache_tol='auto',
        logger=SaveLogger(experiment_name + ".pickle", save_every=100),
        inactive_threshold=1e-5, break_on_bad=False, inactive_window=50,
        switch_to=None)
    #ssvm = learners.SubgradientSSVM(
        #model, verbose=3, C=C, max_iter=10000, n_jobs=-1, show_loss_every=10,
        #logger=SaveLogger(experiment_name + ".pickle", save_every=10),
        #momentum=0, learning_rate=0.1, decay_exponent=1, decay_t0=100)

    if warm_start:
        ssvm = SaveLogger(experiment_name + ".pickle").load()
        ssvm.logger = SaveLogger(
            file_name=experiment_name + "_refit.pickle",
            save_every=10)
        #ssvm.learning_rate = 0.000001

        ssvm.model.inference_method = 'ad3bb'
        #ssvm.n_jobs = 1

    ssvm.fit(data_train.X, data_train.Y, warm_start=warm_start)
    return

    print("fit finished!")
    if test:
        data_val = load_pascal('val')
    else:
        data_val = load_pascal('kVal')

    data_val = add_edges(data_val, edge_type)
    data_val = add_edge_features(ds, data_val, more_colors=True)
    eval_on_sp(ds, data_val, ssvm.predict(data_val.X), print_results=True)
def svm_on_segments(C=.1, learning_rate=.001, subgradient=True):
    # load and prepare data
    lateral = True
    latent = True
    test = False
    #data_train = load_data(which="piecewise")
    #data_train = add_edges(data_train, independent=False)
    #data_train = add_kraehenbuehl_features(data_train, which="train_30px")
    #data_train = add_kraehenbuehl_features(data_train, which="train")
    #if lateral:
        #data_train = add_edge_features(data_train)
    data_train = load_data_global_probs(latent=latent)
    X_org_ = data_train.X
    #data_train = make_hierarchical_data(data_train, lateral=lateral,
                                        #latent=latent, latent_lateral=True)
    data_train = discard_void(data_train, 21, latent_features=True)
    X_, Y_ = data_train.X, data_train.Y
    # remove edges
    if not lateral:
        X_org_ = [(x[0], np.zeros((0, 2), dtype=np.int)) for x in X_org_]

    if test:
        data_val = load_data('val', which="piecewise")
        data_val = add_edges(data_val, independent=False)
        data_val = add_kraehenbuehl_features(data_val)
        data_val = make_hierarchical_data(data_val, lateral=lateral,
                                          latent=latent)
        data_val = discard_void(data_val, 21)

        X_.extend(data_val.X)
        Y_.extend(data_val.Y)

    n_states = 21
    class_weights = 1. / np.bincount(np.hstack(Y_))
    class_weights *= 21. / np.sum(class_weights)
    experiment_name = ("latent5_features_C%f_top_node" % C)
    logger = SaveLogger(experiment_name + ".pickle", save_every=10)
    if latent:
        model = LatentNodeCRF(n_labels=n_states,
                              n_features=data_train.X[0][0].shape[1],
                              n_hidden_states=5, inference_method='qpbo' if
                              lateral else 'dai', class_weight=class_weights,
                              latent_node_features=True)
        if subgradient:
            ssvm = learners.LatentSubgradientSSVM(
                model, C=C, verbose=1, show_loss_every=10, logger=logger,
                n_jobs=-1, learning_rate=learning_rate, decay_exponent=1,
                momentum=0., max_iter=100000)
        else:
            latent_logger = SaveLogger("lssvm_" + experiment_name +
                                       "_%d.pickle", save_every=1)
            base_ssvm = learners.OneSlackSSVM(
                model, verbose=2, C=C, max_iter=100000, n_jobs=-1, tol=0.001,
                show_loss_every=200, inference_cache=50, logger=logger,
                cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False,
                switch_to_ad3=True)
            ssvm = learners.LatentSSVM(base_ssvm, logger=latent_logger)
        warm_start = False
        if warm_start:
            ssvm = logger.load()
            ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle",
                                     save_every=10)
            ssvm.max_iter = 100000
            ssvm.learning_rate = 0.00001
            ssvm.momentum = 0
    else:
        #model = GraphCRF(n_states=n_states,
                         #n_features=data_train.X[0][0].shape[1],
                         #inference_method='qpbo' if lateral else 'dai',
                         #class_weight=class_weights)
        model = EdgeFeatureGraphCRF(n_states=n_states,
                                    n_features=data_train.X[0][0].shape[1],
                                    inference_method='qpbo' if lateral else
                                    'dai', class_weight=class_weights,
                                    n_edge_features=4,
                                    symmetric_edge_features=[0, 1],
                                    antisymmetric_edge_features=[2])
        ssvm = learners.OneSlackSSVM(
            model, verbose=2, C=C, max_iter=100000, n_jobs=-1,
            tol=0.0001, show_loss_every=200, inference_cache=50, logger=logger,
            cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False)

    #ssvm = logger.load()

    X_, Y_ = shuffle(X_, Y_)
    #ssvm.fit(data_train.X, data_train.Y)
    #ssvm.fit(X_, Y_, warm_start=warm_start)
    ssvm.fit(X_, Y_)
    print("fit finished!")
Example #5
0
def svm_on_segments(C=.1, learning_rate=.001, subgradient=True):
    # load and prepare data
    lateral = True
    latent = True
    test = False
    #data_train = load_data(which="piecewise")
    #data_train = add_edges(data_train, independent=False)
    #data_train = add_kraehenbuehl_features(data_train, which="train_30px")
    #data_train = add_kraehenbuehl_features(data_train, which="train")
    #if lateral:
    #data_train = add_edge_features(data_train)
    data_train = load_data_global_probs(latent=latent)
    X_org_ = data_train.X
    #data_train = make_hierarchical_data(data_train, lateral=lateral,
    #latent=latent, latent_lateral=True)
    data_train = discard_void(data_train, 21, latent_features=True)
    X_, Y_ = data_train.X, data_train.Y
    # remove edges
    if not lateral:
        X_org_ = [(x[0], np.zeros((0, 2), dtype=np.int)) for x in X_org_]

    if test:
        data_val = load_data('val', which="piecewise")
        data_val = add_edges(data_val, independent=False)
        data_val = add_kraehenbuehl_features(data_val)
        data_val = make_hierarchical_data(data_val,
                                          lateral=lateral,
                                          latent=latent)
        data_val = discard_void(data_val, 21)

        X_.extend(data_val.X)
        Y_.extend(data_val.Y)

    n_states = 21
    class_weights = 1. / np.bincount(np.hstack(Y_))
    class_weights *= 21. / np.sum(class_weights)
    experiment_name = ("latent5_features_C%f_top_node" % C)
    logger = SaveLogger(experiment_name + ".pickle", save_every=10)
    if latent:
        model = LatentNodeCRF(n_labels=n_states,
                              n_features=data_train.X[0][0].shape[1],
                              n_hidden_states=5,
                              inference_method='qpbo' if lateral else 'dai',
                              class_weight=class_weights,
                              latent_node_features=True)
        if subgradient:
            ssvm = learners.LatentSubgradientSSVM(model,
                                                  C=C,
                                                  verbose=1,
                                                  show_loss_every=10,
                                                  logger=logger,
                                                  n_jobs=-1,
                                                  learning_rate=learning_rate,
                                                  decay_exponent=1,
                                                  momentum=0.,
                                                  max_iter=100000)
        else:
            latent_logger = SaveLogger("lssvm_" + experiment_name +
                                       "_%d.pickle",
                                       save_every=1)
            base_ssvm = learners.OneSlackSSVM(model,
                                              verbose=2,
                                              C=C,
                                              max_iter=100000,
                                              n_jobs=-1,
                                              tol=0.001,
                                              show_loss_every=200,
                                              inference_cache=50,
                                              logger=logger,
                                              cache_tol='auto',
                                              inactive_threshold=1e-5,
                                              break_on_bad=False,
                                              switch_to_ad3=True)
            ssvm = learners.LatentSSVM(base_ssvm, logger=latent_logger)
        warm_start = False
        if warm_start:
            ssvm = logger.load()
            ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle",
                                     save_every=10)
            ssvm.max_iter = 100000
            ssvm.learning_rate = 0.00001
            ssvm.momentum = 0
    else:
        #model = GraphCRF(n_states=n_states,
        #n_features=data_train.X[0][0].shape[1],
        #inference_method='qpbo' if lateral else 'dai',
        #class_weight=class_weights)
        model = EdgeFeatureGraphCRF(
            n_states=n_states,
            n_features=data_train.X[0][0].shape[1],
            inference_method='qpbo' if lateral else 'dai',
            class_weight=class_weights,
            n_edge_features=4,
            symmetric_edge_features=[0, 1],
            antisymmetric_edge_features=[2])
        ssvm = learners.OneSlackSSVM(model,
                                     verbose=2,
                                     C=C,
                                     max_iter=100000,
                                     n_jobs=-1,
                                     tol=0.0001,
                                     show_loss_every=200,
                                     inference_cache=50,
                                     logger=logger,
                                     cache_tol='auto',
                                     inactive_threshold=1e-5,
                                     break_on_bad=False)

    #ssvm = logger.load()

    X_, Y_ = shuffle(X_, Y_)
    #ssvm.fit(data_train.X, data_train.Y)
    #ssvm.fit(X_, Y_, warm_start=warm_start)
    ssvm.fit(X_, Y_)
    print("fit finished!")
def svm_on_segments(C=.1, learning_rate=.001, subgradient=False):
    data_file = "data_train_XY.pickle"
    ds = PascalSegmentation()
    if os.path.exists(data_file):
        X_, Y_ = cPickle.load(open(data_file))
    else:
        # load and prepare data
        data_train = load_pascal("train", sp_type="cpmc")
        data_train = make_cpmc_hierarchy(ds, data_train)
        data_train = discard_void(ds, data_train)
        X_, Y_ = data_train.X, data_train.Y
        cPickle.dump((X_, Y_), open(data_file, 'wb'), -1)

    class_weights = 1. / np.bincount(np.hstack(Y_))
    class_weights *= 21. / np.sum(class_weights)
    experiment_name = ("latent_25_cpmc_%f_qpbo_n_slack_blub3" % C)
    logger = SaveLogger(experiment_name + ".pickle", save_every=10)
    model = LatentNodeCRF(n_hidden_states=25,
                          inference_method='qpbo',
                          class_weight=class_weights,
                          latent_node_features=False)
    if subgradient:
        ssvm = learners.LatentSubgradientSSVM(model,
                                              C=C,
                                              verbose=1,
                                              show_loss_every=10,
                                              logger=logger,
                                              n_jobs=-1,
                                              learning_rate=learning_rate,
                                              decay_exponent=1,
                                              momentum=0.,
                                              max_iter=100000,
                                              decay_t0=100)
    else:
        latent_logger = SaveLogger("lssvm_" + experiment_name + "_%d.pickle",
                                   save_every=1)
        #base_ssvm = learners.OneSlackSSVM(
        #model, verbose=2, C=C, max_iter=100, n_jobs=-1, tol=0.001,
        #show_loss_every=200, inference_cache=50, logger=logger,
        #cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False,
        #switch_to=('ogm', {'alg': 'dd'}))
        base_ssvm = learners.NSlackSSVM(model,
                                        verbose=4,
                                        C=C,
                                        n_jobs=-1,
                                        tol=0.1,
                                        show_loss_every=20,
                                        logger=logger,
                                        inactive_threshold=1e-8,
                                        break_on_bad=False,
                                        batch_size=36,
                                        inactive_window=10,
                                        switch_to=('ad3', {
                                            'branch_and_bound': True
                                        }))
        ssvm = learners.LatentSSVM(base_ssvm,
                                   logger=latent_logger,
                                   latent_iter=3)
    #warm_start = True
    warm_start = False
    if warm_start:
        ssvm = logger.load()
        ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle",
                                 save_every=10)
        ssvm.max_iter = 10000
        ssvm.decay_exponent = 1
        #ssvm.decay_t0 = 1000
        #ssvm.learning_rate = 0.00001
        #ssvm.momentum = 0

    X_, Y_ = shuffle(X_, Y_)
    #ssvm.fit(data_train.X, data_train.Y)
    ssvm.fit(X_, Y_)
    #H_init = [np.hstack([y, np.random.randint(21, 26)]) for y in Y_]
    #ssvm.fit(X_, Y_, H_init=H_init)
    print("fit finished!")