def main(C=1, test=False): ds = PascalSegmentation() # load training data edge_type = "pairwise" if test: which = "train" else: which = "kTrain" data_train = load_pascal(which=which, sp_type="cpmc") data_train = add_edges(data_train, edge_type) data_train = add_edge_features(ds, data_train) data_train = discard_void(ds, data_train, ds.void_label) print("number of samples: %s" % len(data_train.X)) class_weights = 1. / np.bincount(np.hstack(data_train.Y)) class_weights *= 21. / np.sum(class_weights) print(class_weights) #model = crfs.GraphCRF(n_states=n_states, #n_features=data_train.X[0][0].shape[1], #inference_method='qpbo', class_weight=class_weights) model = crfs.EdgeFeatureGraphCRF(inference_method='qpbo', class_weight=class_weights, symmetric_edge_features=[0, 1], antisymmetric_edge_features=[2]) experiment_name = "cpmc_edge_features_trainval_new_%f" % C #warm_start = True warm_start = False ssvm = learners.OneSlackSSVM(model, verbose=2, C=C, max_iter=100000, n_jobs=-1, tol=0.0001, show_loss_every=50, inference_cache=50, cache_tol='auto', logger=SaveLogger(experiment_name + ".pickle", save_every=100), inactive_threshold=1e-5, break_on_bad=False, inactive_window=50, switch_to=None) #ssvm = learners.SubgradientSSVM( #model, verbose=3, C=C, max_iter=10000, n_jobs=-1, show_loss_every=10, #logger=SaveLogger(experiment_name + ".pickle", save_every=10), #momentum=0, learning_rate=0.1, decay_exponent=1, decay_t0=100) if warm_start: ssvm = SaveLogger(experiment_name + ".pickle").load() ssvm.logger = SaveLogger(file_name=experiment_name + "_refit.pickle", save_every=10) #ssvm.learning_rate = 0.000001 ssvm.model.inference_method = 'ad3bb' #ssvm.n_jobs = 1 ssvm.fit(data_train.X, data_train.Y, warm_start=warm_start) return print("fit finished!") if test: data_val = load_pascal('val') else: data_val = load_pascal('kVal') data_val = add_edges(data_val, edge_type) data_val = add_edge_features(ds, data_val, more_colors=True) eval_on_sp(ds, data_val, ssvm.predict(data_val.X), print_results=True)
def main(): argv = sys.argv print("loading %s ..." % argv[1]) ssvm = SaveLogger(file_name=argv[1]).load() if hasattr(ssvm, 'problem'): ssvm.model = ssvm.problem print(ssvm) if hasattr(ssvm, 'base_ssvm'): ssvm = ssvm.base_ssvm print("Iterations: %d" % len(ssvm.objective_curve_)) print("Objective: %f" % ssvm.objective_curve_[-1]) inference_run = None if hasattr(ssvm, 'cached_constraint_'): inference_run = ~np.array(ssvm.cached_constraint_) print("Gap: %f" % (np.array(ssvm.primal_objective_curve_)[inference_run][-1] - ssvm.objective_curve_[-1])) if len(argv) <= 2: argv.append("acc") if len(argv) <= 3: dataset = 'nyu' else: dataset = argv[3] if argv[2] == 'acc': ssvm.n_jobs = 1 for data_str, title in zip(["train", "val"], ["TRAINING SET", "VALIDATION SET"]): print(title) edge_type = "pairwise" if dataset == 'msrc': ds = MSRC21Dataset() data = msrc_helpers.load_data(data_str, which="piecewise_new") #data = add_kraehenbuehl_features(data, which="train_30px") data = msrc_helpers.add_kraehenbuehl_features(data, which="train") elif dataset == 'pascal': ds = PascalSegmentation() data = pascal_helpers.load_pascal(data_str, sp_type="cpmc") #data = pascal_helpers.load_pascal(data_str) elif dataset == 'nyu': ds = NYUSegmentation() data = nyu_helpers.load_nyu(data_str, n_sp=500, sp='rgbd') else: raise ValueError("Excepted dataset to be 'nyu', 'pascal' or 'msrc'," " got %s." % dataset) if type(ssvm.model).__name__ == "LatentNodeCRF": print("making data hierarchical") data = pascal_helpers.make_cpmc_hierarchy(ds, data) #data = make_hierarchical_data( #ds, data, lateral=True, latent=True, latent_lateral=False, #add_edge_features=False) else: data = add_edges(data, edge_type) if type(ssvm.model).__name__ == 'EdgeFeatureGraphCRF': data = add_edge_features(ds, data, depth_diff=True, normal_angles=True) if type(ssvm.model).__name__ == "EdgeFeatureLatentNodeCRF": data = add_edge_features(ds, data) data = make_hierarchical_data( ds, data, lateral=True, latent=True, latent_lateral=False, add_edge_features=True) #ssvm.model.inference_method = "qpbo" Y_pred = ssvm.predict(data.X) if isinstance(ssvm.model, LatentNodeCRF): Y_pred = [ssvm.model.label_from_latent(h) for h in Y_pred] Y_flat = np.hstack(data.Y) print("superpixel accuracy: %.2f" % (np.mean((np.hstack(Y_pred) == Y_flat)[Y_flat != ds.void_label]) * 100)) if dataset == 'msrc': res = msrc_helpers.eval_on_pixels(data, Y_pred, print_results=True) print("global: %.2f, average: %.2f" % (res['global'] * 100, res['average'] * 100)) #msrc_helpers.plot_confusion_matrix(res['confusion']) else: hamming, jaccard = eval_on_sp(ds, data, Y_pred, print_results=True) print("Jaccard: %.2f, Hamming: %.2f" % (jaccard.mean(), hamming.mean())) plt.show() elif argv[2] == 'plot': data_str = 'val' if len(argv) <= 4: raise ValueError("Need a folder name for plotting.") if dataset == "msrc": ds = MSRC21Dataset() data = msrc_helpers.load_data(data_str, which="piecewise") data = add_edges(data, independent=False) data = msrc_helpers.add_kraehenbuehl_features( data, which="train_30px") data = msrc_helpers.add_kraehenbuehl_features( data, which="train") elif dataset == "pascal": ds = PascalSegmentation() data = pascal_helpers.load_pascal("val") data = add_edges(data) elif dataset == "nyu": ds = NYUSegmentation() data = nyu_helpers.load_nyu("test") data = add_edges(data) if type(ssvm.model).__name__ == 'EdgeFeatureGraphCRF': data = add_edge_features(ds, data, depth_diff=True, normal_angles=True) Y_pred = ssvm.predict(data.X) plot_results(ds, data, Y_pred, argv[4])
def main(C=1, test=False): ds = PascalSegmentation() # load training data edge_type = "pairwise" if test: which = "train" else: which = "kTrain" data_train = load_pascal(which=which, sp_type="cpmc") data_train = add_edges(data_train, edge_type) data_train = add_edge_features(ds, data_train) data_train = discard_void(ds, data_train, ds.void_label) print("number of samples: %s" % len(data_train.X)) class_weights = 1. / np.bincount(np.hstack(data_train.Y)) class_weights *= 21. / np.sum(class_weights) print(class_weights) #model = crfs.GraphCRF(n_states=n_states, #n_features=data_train.X[0][0].shape[1], #inference_method='qpbo', class_weight=class_weights) model = crfs.EdgeFeatureGraphCRF(inference_method='qpbo', class_weight=class_weights, symmetric_edge_features=[0, 1], antisymmetric_edge_features=[2]) experiment_name = "cpmc_edge_features_trainval_new_%f" % C #warm_start = True warm_start = False ssvm = learners.OneSlackSSVM( model, verbose=2, C=C, max_iter=100000, n_jobs=-1, tol=0.0001, show_loss_every=50, inference_cache=50, cache_tol='auto', logger=SaveLogger(experiment_name + ".pickle", save_every=100), inactive_threshold=1e-5, break_on_bad=False, inactive_window=50, switch_to=None) #ssvm = learners.SubgradientSSVM( #model, verbose=3, C=C, max_iter=10000, n_jobs=-1, show_loss_every=10, #logger=SaveLogger(experiment_name + ".pickle", save_every=10), #momentum=0, learning_rate=0.1, decay_exponent=1, decay_t0=100) if warm_start: ssvm = SaveLogger(experiment_name + ".pickle").load() ssvm.logger = SaveLogger( file_name=experiment_name + "_refit.pickle", save_every=10) #ssvm.learning_rate = 0.000001 ssvm.model.inference_method = 'ad3bb' #ssvm.n_jobs = 1 ssvm.fit(data_train.X, data_train.Y, warm_start=warm_start) return print("fit finished!") if test: data_val = load_pascal('val') else: data_val = load_pascal('kVal') data_val = add_edges(data_val, edge_type) data_val = add_edge_features(ds, data_val, more_colors=True) eval_on_sp(ds, data_val, ssvm.predict(data_val.X), print_results=True)
def main(): argv = sys.argv print("loading %s ..." % argv[1]) ssvm1 = SaveLogger(file_name=argv[1]).load() ssvm2 = SaveLogger(file_name=argv[2]).load() data_str = 'val' if len(argv) <= 3: raise ValueError("Need a folder name for plotting.") print("loading data...") data = load_nyu(data_str, n_sp=500) dataset = NYUSegmentation() print("done") data1 = add_edges(data, kind="pairwise") data2 = add_edges(data, kind="pairwise") data1 = add_edge_features(dataset, data1) data2 = add_edge_features(dataset, data2, depth_diff=True) Y_pred1 = ssvm1.predict(data1.X) Y_pred2 = ssvm2.predict(data2.X) folder = argv[3] if not os.path.exists(folder): os.mkdir(folder) np.random.seed(0) for image_name, superpixels, y_pred1, y_pred2 in zip(data.file_names, data.superpixels, Y_pred1, Y_pred2): if np.all(y_pred1 == y_pred2): continue gt = dataset.get_ground_truth(image_name) perf1 = eval_on_pixels(dataset, [gt], [y_pred1[superpixels]], print_results=False)[0] perf1 = np.mean(perf1[np.isfinite(perf1)]) perf2 = eval_on_pixels(dataset, [gt], [y_pred2[superpixels]], print_results=False)[0] perf2 = np.mean(perf2[np.isfinite(perf2)]) if np.abs(perf1 - perf2) < 2: continue image = dataset.get_image(image_name) fig, axes = plt.subplots(2, 3, figsize=(12, 6)) axes[0, 0].imshow(image) axes[0, 0].imshow((y_pred1 != y_pred2)[superpixels], vmin=0, vmax=1, alpha=.7) axes[0, 1].set_title("ground truth") axes[0, 1].imshow(image) axes[0, 1].imshow(gt, alpha=.7, cmap=dataset.cmap, vmin=0, vmax=dataset.cmap.N) axes[1, 0].set_title("%.2f" % perf1) axes[1, 0].imshow(image) axes[1, 0].imshow(y_pred1[superpixels], vmin=0, vmax=dataset.cmap.N, alpha=.7, cmap=dataset.cmap) axes[1, 1].set_title("%.2f" % perf2) axes[1, 1].imshow(image) axes[1, 1].imshow(y_pred2[superpixels], alpha=.7, cmap=dataset.cmap, vmin=0, vmax=dataset.cmap.N) present_y = np.unique(np.hstack([y_pred1, y_pred2, np.unique(gt)])) present_y = np.array([y_ for y_ in present_y if y_ != dataset.void_label]) axes[0, 2].imshow(present_y[:, np.newaxis], interpolation='nearest', cmap=dataset.cmap, vmin=0, vmax=dataset.cmap.N) for i, c in enumerate(present_y): axes[0, 2].text(1, i, dataset.classes[c]) for ax in axes.ravel(): ax.set_xticks(()) ax.set_yticks(()) axes[1, 2].set_visible(False) fig.savefig(folder + "/%s.png" % image_name, bbox_inches="tight") plt.close(fig)