Ejemplo n.º 1
0
def train_svm(C=0.1, grid=False):
    ds = NYUSegmentation()
    data_train = load_nyu("train", n_sp=500, sp='rgbd')
    svm = LinearSVC(C=C, dual=False, class_weight='auto')
    #N_train = []
    #for f, sp in zip(data_train.file_names, data_train.superpixels):
        #normals = ds.get_pointcloud_normals(f)[:, :, 3:]
        #mean_normals = get_sp_normals(normals, sp)
        #N_train.append(mean_normals * .1)
    #N_flat_train = np.vstack(N_train)

    X, y = np.vstack(data_train.X), np.hstack(data_train.Y)
    #X = np.hstack([X, N_flat_train])
    svm.fit(X, y)
    print(svm.score(X, y))
    eval_on_sp(ds, data_train, [svm.predict(x)
                                for x in data_train.X],
               print_results=True)

    data_val = load_nyu("val", n_sp=500, sp='rgbd')
    #N_val = []
    #for f, sp in zip(data_val.file_names, data_val.superpixels):
        #normals = ds.get_pointcloud_normals(f)[:, :, 3:]
        #mean_normals = get_sp_normals(normals, sp)
        #N_val.append(mean_normals * .1)
    eval_on_sp(ds, data_val, [svm.predict(x)
                                for x in data_val.X],
               print_results=True)
Ejemplo n.º 2
0
def main(C=1):
    dataset = NYUSegmentation()
    # load training data
    data_train = load_nyu(n_sp=500, sp='rgbd')
    data_train = add_edges(data_train)
    data_train = add_edge_features(dataset, data_train, depth_diff=True, normal_angles=True)

    data_train = discard_void(dataset, data_train)

    n_states = 4.
    print("number of samples: %s" % len(data_train.X))
    class_weights = 1. / np.bincount(np.hstack(data_train.Y))
    class_weights *= n_states / np.sum(class_weights)
    #class_weights = np.ones(n_states)
    print(class_weights)
    #model = crfs.GraphCRF(n_states=n_states,
                          #n_features=data_train.X[0][0].shape[1],
                          #inference_method='qpbo', class_weight=class_weights)
    model = crfs.EdgeFeatureGraphCRF(inference_method='qpbo',
                                     class_weight=class_weights,
                                     n_edge_features=5,
                                     symmetric_edge_features=[0, 1])
    experiment_name = "rgbd_test%f" % C
    ssvm = learners.OneSlackSSVM(
        model, verbose=2, C=C, max_iter=100000, n_jobs=-1,
        tol=0.001, show_loss_every=100, inference_cache=50, cache_tol='auto',
        logger=SaveLogger(experiment_name + ".pickle", save_every=100),
        inactive_threshold=1e-5, break_on_bad=False, inactive_window=50,
        switch_to=("ad3", {'branch_and_bound':True}))

    ssvm.fit(data_train.X, data_train.Y)
    print("fit finished!")
    return
Ejemplo n.º 3
0
def main(C=1):
    dataset = NYUSegmentation()
    # load training data
    data_train = load_nyu('train', n_sp=500, sp='rgbd')
    data_train = add_edges(data_train)
    data_train = add_edge_features(dataset,
                                   data_train,
                                   depth_diff=True,
                                   normal_angles=True)
    data_train = make_hierarchical_data(dataset, data_train)
    data_train = discard_void(dataset, data_train)

    n_states = 4.
    print("number of samples: %s" % len(data_train.X))
    class_weights = 1. / np.bincount(np.hstack(data_train.Y))
    class_weights *= n_states / np.sum(class_weights)
    #class_weights = np.ones(n_states)
    print(class_weights)
    #model = crfs.GraphCRF(n_states=n_states,
    #n_features=data_train.X[0][0].shape[1],
    #inference_method='qpbo', class_weight=class_weights)
    model = crfs.EdgeFeatureLatentNodeCRF(n_hidden_states=5,
                                          n_edge_features=5,
                                          inference_method='qpbo',
                                          class_weight=class_weights,
                                          symmetric_edge_features=[0, 1],
                                          latent_node_features=False,
                                          n_labels=4)
    experiment_name = "rgbd_normal_angles_fold1_strong_reweight%f" % C
    base_ssvm = learners.OneSlackSSVM(model,
                                      verbose=2,
                                      C=C,
                                      max_iter=100000,
                                      n_jobs=1,
                                      tol=0.001,
                                      show_loss_every=100,
                                      inference_cache=50,
                                      cache_tol='auto',
                                      logger=SaveLogger(experiment_name +
                                                        ".pickle",
                                                        save_every=100),
                                      inactive_threshold=1e-5,
                                      break_on_bad=False,
                                      inactive_window=50,
                                      switch_to=("ad3", {
                                          'branch_and_bound': True
                                      }))
    latent_logger = SaveLogger("lssvm_" + experiment_name + "_%d.pickle",
                               save_every=1)
    ssvm = learners.LatentSSVM(base_ssvm, logger=latent_logger, latent_iter=3)

    ssvm.fit(data_train.X, data_train.Y)
    print("fit finished!")
    return
Ejemplo n.º 4
0
def train_svm(C=0.1, grid=False):
    ds = NYUSegmentation()
    data_train = load_nyu("train", n_sp=500, sp="rgbd")
    svm = LinearSVC(C=C, dual=False, class_weight="auto")
    # N_train = []
    # for f, sp in zip(data_train.file_names, data_train.superpixels):
    # normals = ds.get_pointcloud_normals(f)[:, :, 3:]
    # mean_normals = get_sp_normals(normals, sp)
    # N_train.append(mean_normals * .1)
    # N_flat_train = np.vstack(N_train)

    X, y = np.vstack(data_train.X), np.hstack(data_train.Y)
    # X = np.hstack([X, N_flat_train])
    svm.fit(X, y)
    print(svm.score(X, y))
    eval_on_sp(ds, data_train, [svm.predict(x) for x in data_train.X], print_results=True)

    data_val = load_nyu("val", n_sp=500, sp="rgbd")
    # N_val = []
    # for f, sp in zip(data_val.file_names, data_val.superpixels):
    # normals = ds.get_pointcloud_normals(f)[:, :, 3:]
    # mean_normals = get_sp_normals(normals, sp)
    # N_val.append(mean_normals * .1)
    eval_on_sp(ds, data_val, [svm.predict(x) for x in data_val.X], print_results=True)
Ejemplo n.º 5
0
def eval_sp_prediction():
    dataset = NYUSegmentation()
    data = load_nyu('val', n_sp=500, sp='rgbd')
    predictions = [np.argmax(x, axis=-1) for x in data.X]
    #predictions = data.Y
    hamming, jaccard = eval_on_sp(dataset, data, predictions, print_results=True)
Ejemplo n.º 6
0
def eval_sp_prediction():
    dataset = NYUSegmentation()
    data = load_nyu("val", n_sp=500, sp="rgbd")
    predictions = [np.argmax(x, axis=-1) for x in data.X]
    # predictions = data.Y
    hamming, jaccard = eval_on_sp(dataset, data, predictions, print_results=True)