def train_svm(C=0.1, grid=False): ds = PascalSegmentation() svm = LinearSVC(C=C, dual=False, class_weight='auto') if grid: data_train = load_pascal("kTrain") X, y = shuffle(data_train.X, data_train.Y) # prepare leave-one-label-out by assigning labels to images image_indicators = np.hstack([np.repeat(i, len(x)) for i, x in enumerate(X)]) # go down to only 5 "folds" labels = image_indicators % 5 X, y = np.vstack(X), np.hstack(y) cv = LeavePLabelOut(labels=labels, p=1) param_grid = {'C': 10. ** np.arange(-3, 3)} scorer = Scorer(recall_score, average="macro") grid_search = GridSearchCV(svm, param_grid=param_grid, cv=cv, verbose=10, scoring=scorer, n_jobs=-1) grid_search.fit(X, y) else: data_train = load_pascal("train") X, y = np.vstack(data_train.X), np.hstack(data_train.Y) svm.fit(X, y) print(svm.score(X, y)) eval_on_sp(ds, data_train, [svm.predict(x) for x in data_train.X], print_results=True) data_val = load_pascal("val") eval_on_sp(ds, data_val, [svm.predict(x) for x in data_val.X], print_results=True)
def train_svm(C=0.1, grid=False): ds = NYUSegmentation() data_train = load_nyu("train", n_sp=500, sp='rgbd') svm = LinearSVC(C=C, dual=False, class_weight='auto') #N_train = [] #for f, sp in zip(data_train.file_names, data_train.superpixels): #normals = ds.get_pointcloud_normals(f)[:, :, 3:] #mean_normals = get_sp_normals(normals, sp) #N_train.append(mean_normals * .1) #N_flat_train = np.vstack(N_train) X, y = np.vstack(data_train.X), np.hstack(data_train.Y) #X = np.hstack([X, N_flat_train]) svm.fit(X, y) print(svm.score(X, y)) eval_on_sp(ds, data_train, [svm.predict(x) for x in data_train.X], print_results=True) data_val = load_nyu("val", n_sp=500, sp='rgbd') #N_val = [] #for f, sp in zip(data_val.file_names, data_val.superpixels): #normals = ds.get_pointcloud_normals(f)[:, :, 3:] #mean_normals = get_sp_normals(normals, sp) #N_val.append(mean_normals * .1) eval_on_sp(ds, data_val, [svm.predict(x) for x in data_val.X], print_results=True)
def train_svm(C=0.1, grid=False): pascal = PascalSegmentation() files_train = pascal.get_split("kTrain") superpixels = [slic_n(pascal.get_image(f), n_superpixels=100, compactness=10) for f in files_train] bow = SiftBOW(pascal, n_words=1000, color_sift=True) data_train = bow.fit_transform(files_train, superpixels) data_train = add_global_descriptor(data_train) svm = LinearSVC(C=C, dual=False, class_weight='auto') chi2 = AdditiveChi2Sampler() X, y = np.vstack(data_train.X), np.hstack(data_train.Y) X = chi2.fit_transform(X) svm.fit(X, y) print(svm.score(X, y)) eval_on_sp(pascal, data_train, [svm.predict(chi2.transform(x)) for x in data_train.X], print_results=True) files_val = pascal.get_split("kVal") superpixels_val = [slic_n(pascal.get_image(f), n_superpixels=100, compactness=10) for f in files_val] data_val = bow.transform(files_val, superpixels_val) data_val = add_global_descriptor(data_val) eval_on_sp(pascal, data_val, [svm.predict(chi2.transform(x)) for x in data_val.X], print_results=True) tracer()
def train_svm(C=0.1, grid=False): pascal = PascalSegmentation() files_train = pascal.get_split("kTrain") superpixels = [ slic_n(pascal.get_image(f), n_superpixels=100, compactness=10) for f in files_train ] bow = SiftBOW(pascal, n_words=1000, color_sift=True) data_train = bow.fit_transform(files_train, superpixels) data_train = add_global_descriptor(data_train) svm = LinearSVC(C=C, dual=False, class_weight='auto') chi2 = AdditiveChi2Sampler() X, y = np.vstack(data_train.X), np.hstack(data_train.Y) X = chi2.fit_transform(X) svm.fit(X, y) print(svm.score(X, y)) eval_on_sp(pascal, data_train, [svm.predict(chi2.transform(x)) for x in data_train.X], print_results=True) files_val = pascal.get_split("kVal") superpixels_val = [ slic_n(pascal.get_image(f), n_superpixels=100, compactness=10) for f in files_val ] data_val = bow.transform(files_val, superpixels_val) data_val = add_global_descriptor(data_val) eval_on_sp(pascal, data_val, [svm.predict(chi2.transform(x)) for x in data_val.X], print_results=True) tracer()
def train_svm(C=0.1, grid=False): ds = NYUSegmentation() data_train = load_nyu("train", n_sp=500, sp="rgbd") svm = LinearSVC(C=C, dual=False, class_weight="auto") # N_train = [] # for f, sp in zip(data_train.file_names, data_train.superpixels): # normals = ds.get_pointcloud_normals(f)[:, :, 3:] # mean_normals = get_sp_normals(normals, sp) # N_train.append(mean_normals * .1) # N_flat_train = np.vstack(N_train) X, y = np.vstack(data_train.X), np.hstack(data_train.Y) # X = np.hstack([X, N_flat_train]) svm.fit(X, y) print(svm.score(X, y)) eval_on_sp(ds, data_train, [svm.predict(x) for x in data_train.X], print_results=True) data_val = load_nyu("val", n_sp=500, sp="rgbd") # N_val = [] # for f, sp in zip(data_val.file_names, data_val.superpixels): # normals = ds.get_pointcloud_normals(f)[:, :, 3:] # mean_normals = get_sp_normals(normals, sp) # N_val.append(mean_normals * .1) eval_on_sp(ds, data_val, [svm.predict(x) for x in data_val.X], print_results=True)
def main(C=1, test=False): ds = PascalSegmentation() # load training data edge_type = "pairwise" if test: which = "train" else: which = "kTrain" data_train = load_pascal(which=which, sp_type="cpmc") data_train = add_edges(data_train, edge_type) data_train = add_edge_features(ds, data_train) data_train = discard_void(ds, data_train, ds.void_label) print("number of samples: %s" % len(data_train.X)) class_weights = 1. / np.bincount(np.hstack(data_train.Y)) class_weights *= 21. / np.sum(class_weights) print(class_weights) #model = crfs.GraphCRF(n_states=n_states, #n_features=data_train.X[0][0].shape[1], #inference_method='qpbo', class_weight=class_weights) model = crfs.EdgeFeatureGraphCRF(inference_method='qpbo', class_weight=class_weights, symmetric_edge_features=[0, 1], antisymmetric_edge_features=[2]) experiment_name = "cpmc_edge_features_trainval_new_%f" % C #warm_start = True warm_start = False ssvm = learners.OneSlackSSVM(model, verbose=2, C=C, max_iter=100000, n_jobs=-1, tol=0.0001, show_loss_every=50, inference_cache=50, cache_tol='auto', logger=SaveLogger(experiment_name + ".pickle", save_every=100), inactive_threshold=1e-5, break_on_bad=False, inactive_window=50, switch_to=None) #ssvm = learners.SubgradientSSVM( #model, verbose=3, C=C, max_iter=10000, n_jobs=-1, show_loss_every=10, #logger=SaveLogger(experiment_name + ".pickle", save_every=10), #momentum=0, learning_rate=0.1, decay_exponent=1, decay_t0=100) if warm_start: ssvm = SaveLogger(experiment_name + ".pickle").load() ssvm.logger = SaveLogger(file_name=experiment_name + "_refit.pickle", save_every=10) #ssvm.learning_rate = 0.000001 ssvm.model.inference_method = 'ad3bb' #ssvm.n_jobs = 1 ssvm.fit(data_train.X, data_train.Y, warm_start=warm_start) return print("fit finished!") if test: data_val = load_pascal('val') else: data_val = load_pascal('kVal') data_val = add_edges(data_val, edge_type) data_val = add_edge_features(ds, data_val, more_colors=True) eval_on_sp(ds, data_val, ssvm.predict(data_val.X), print_results=True)
def eval_spixel_best_possible(): data = load_pascal('kTrain', sp_type='cpmc') pascal = PascalSegmentation() hamming, jaccard = eval_on_sp(pascal, data, data.Y, print_results=True)
def eval_sp_prediction(): data = load_pascal('val') predictions = [np.argmax(x, axis=-1) for x in data.X] hamming, jaccard = eval_on_sp(data, predictions, print_results=True) tracer()
def main(C=1, test=False): ds = PascalSegmentation() # load training data edge_type = "pairwise" if test: which = "train" else: which = "kTrain" data_train = load_pascal(which=which, sp_type="cpmc") data_train = add_edges(data_train, edge_type) data_train = add_edge_features(ds, data_train) data_train = discard_void(ds, data_train, ds.void_label) print("number of samples: %s" % len(data_train.X)) class_weights = 1. / np.bincount(np.hstack(data_train.Y)) class_weights *= 21. / np.sum(class_weights) print(class_weights) #model = crfs.GraphCRF(n_states=n_states, #n_features=data_train.X[0][0].shape[1], #inference_method='qpbo', class_weight=class_weights) model = crfs.EdgeFeatureGraphCRF(inference_method='qpbo', class_weight=class_weights, symmetric_edge_features=[0, 1], antisymmetric_edge_features=[2]) experiment_name = "cpmc_edge_features_trainval_new_%f" % C #warm_start = True warm_start = False ssvm = learners.OneSlackSSVM( model, verbose=2, C=C, max_iter=100000, n_jobs=-1, tol=0.0001, show_loss_every=50, inference_cache=50, cache_tol='auto', logger=SaveLogger(experiment_name + ".pickle", save_every=100), inactive_threshold=1e-5, break_on_bad=False, inactive_window=50, switch_to=None) #ssvm = learners.SubgradientSSVM( #model, verbose=3, C=C, max_iter=10000, n_jobs=-1, show_loss_every=10, #logger=SaveLogger(experiment_name + ".pickle", save_every=10), #momentum=0, learning_rate=0.1, decay_exponent=1, decay_t0=100) if warm_start: ssvm = SaveLogger(experiment_name + ".pickle").load() ssvm.logger = SaveLogger( file_name=experiment_name + "_refit.pickle", save_every=10) #ssvm.learning_rate = 0.000001 ssvm.model.inference_method = 'ad3bb' #ssvm.n_jobs = 1 ssvm.fit(data_train.X, data_train.Y, warm_start=warm_start) return print("fit finished!") if test: data_val = load_pascal('val') else: data_val = load_pascal('kVal') data_val = add_edges(data_val, edge_type) data_val = add_edge_features(ds, data_val, more_colors=True) eval_on_sp(ds, data_val, ssvm.predict(data_val.X), print_results=True)
def eval_sp_prediction(): dataset = NYUSegmentation() data = load_nyu('val', n_sp=500, sp='rgbd') predictions = [np.argmax(x, axis=-1) for x in data.X] #predictions = data.Y hamming, jaccard = eval_on_sp(dataset, data, predictions, print_results=True)
def eval_sp_prediction(): dataset = NYUSegmentation() data = load_nyu("val", n_sp=500, sp="rgbd") predictions = [np.argmax(x, axis=-1) for x in data.X] # predictions = data.Y hamming, jaccard = eval_on_sp(dataset, data, predictions, print_results=True)