def test_logging(): iris = load_iris() X, y = iris.data, iris.target X_ = [(np.atleast_2d(x), np.empty((0, 2), dtype=np.int)) for x in X] Y = y.reshape(-1, 1) X_train, X_test, y_train, y_test = train_test_split(X_, Y, random_state=1) _, file_name = mkstemp() pbl = GraphCRF(n_features=4, n_states=3, inference_method=inference_method) logger = SaveLogger(file_name) svm = NSlackSSVM(pbl, C=100, n_jobs=1, logger=logger) svm.fit(X_train, y_train) score_current = svm.score(X_test, y_test) score_auto_saved = logger.load().score(X_test, y_test) alt_file_name = file_name + "alt" logger.save(svm, alt_file_name) logger.file_name = alt_file_name logger.load() score_manual_saved = logger.load().score(X_test, y_test) assert_less(.97, score_current) assert_less(.97, score_auto_saved) assert_less(.97, score_manual_saved) assert_almost_equal(score_auto_saved, score_manual_saved)
def plot_results(): data = load_data("val", independent=False) data = make_hierarchical_data(data, lateral=False, latent=True) logger = SaveLogger("test_latent_2.0001.pickle", save_every=100) ssvm = logger.load() plot_results_hierarchy(data, ssvm.predict(data.X), folder="latent_results_val_50_states_no_lateral")
def test_n_slack_svm_as_crf_pickling(): iris = load_iris() X, y = iris.data, iris.target X_ = [(np.atleast_2d(x), np.empty((0, 2), dtype=np.int)) for x in X] Y = y.reshape(-1, 1) X_train, X_test, y_train, y_test = train_test_split(X_, Y, random_state=1) _, file_name = mkstemp() pbl = GraphCRF(n_features=4, n_states=3, inference_method='lp') logger = SaveLogger(file_name) svm = NSlackSSVM(pbl, C=100, n_jobs=1, logger=logger) svm.fit(X_train, y_train) assert_less(.97, svm.score(X_test, y_test)) assert_less(.97, logger.load().score(X_test, y_test))
def test_svm_as_crf_pickling_batch(): iris = load_iris() X, y = iris.data, iris.target X_ = [(np.atleast_2d(x), np.empty((0, 2), dtype=np.int)) for x in X] Y = y.reshape(-1, 1) X_train, X_test, y_train, y_test = train_test_split(X_, Y, random_state=1) _, file_name = mkstemp() pbl = GraphCRF(n_features=4, n_states=3, inference_method='unary') logger = SaveLogger(file_name) svm = FrankWolfeSSVM(pbl, C=10, logger=logger, max_iter=50, batch_mode=False) svm.fit(X_train, y_train) assert_less(.97, svm.score(X_test, y_test)) assert_less(.97, logger.load().score(X_test, y_test))
def test_subgradient_svm_as_crf_pickling(): iris = load_iris() X, y = iris.data, iris.target X_ = [(np.atleast_2d(x), np.empty((0, 2), dtype=np.int)) for x in X] Y = y.reshape(-1, 1) X_train, X_test, y_train, y_test = train_test_split(X_, Y, random_state=1) _, file_name = mkstemp() pbl = GraphCRF(n_features=4, n_states=3, inference_method='lp') logger = SaveLogger(file_name, verbose=1) svm = SubgradientSSVM(pbl, verbose=0, C=100, n_jobs=1, logger=logger, max_iter=50, momentum=0, learning_rate=0.01) svm.fit(X_train, y_train) assert_less(.97, svm.score(X_test, y_test)) assert_less(.97, logger.load().score(X_test, y_test))
def svm_on_segments(C=.1, learning_rate=.001, subgradient=False): data_file = "data_train_XY.pickle" ds = PascalSegmentation() if os.path.exists(data_file): X_, Y_ = cPickle.load(open(data_file)) else: # load and prepare data data_train = load_pascal("train", sp_type="cpmc") data_train = make_cpmc_hierarchy(ds, data_train) data_train = discard_void(ds, data_train) X_, Y_ = data_train.X, data_train.Y cPickle.dump((X_, Y_), open(data_file, 'wb'), -1) class_weights = 1. / np.bincount(np.hstack(Y_)) class_weights *= 21. / np.sum(class_weights) experiment_name = ("latent_25_cpmc_%f_qpbo_n_slack_blub3" % C) logger = SaveLogger(experiment_name + ".pickle", save_every=10) model = LatentNodeCRF(n_hidden_states=25, inference_method='qpbo', class_weight=class_weights, latent_node_features=False) if subgradient: ssvm = learners.LatentSubgradientSSVM( model, C=C, verbose=1, show_loss_every=10, logger=logger, n_jobs=-1, learning_rate=learning_rate, decay_exponent=1, momentum=0., max_iter=100000, decay_t0=100) else: latent_logger = SaveLogger("lssvm_" + experiment_name + "_%d.pickle", save_every=1) #base_ssvm = learners.OneSlackSSVM( #model, verbose=2, C=C, max_iter=100, n_jobs=-1, tol=0.001, #show_loss_every=200, inference_cache=50, logger=logger, #cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False, #switch_to=('ogm', {'alg': 'dd'})) base_ssvm = learners.NSlackSSVM( model, verbose=4, C=C, n_jobs=-1, tol=0.1, show_loss_every=20, logger=logger, inactive_threshold=1e-8, break_on_bad=False, batch_size=36, inactive_window=10, switch_to=('ad3', {'branch_and_bound': True})) ssvm = learners.LatentSSVM(base_ssvm, logger=latent_logger, latent_iter=3) #warm_start = True warm_start = False if warm_start: ssvm = logger.load() ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle", save_every=10) ssvm.max_iter = 10000 ssvm.decay_exponent = 1 #ssvm.decay_t0 = 1000 #ssvm.learning_rate = 0.00001 #ssvm.momentum = 0 X_, Y_ = shuffle(X_, Y_) #ssvm.fit(data_train.X, data_train.Y) ssvm.fit(X_, Y_) #H_init = [np.hstack([y, np.random.randint(21, 26)]) for y in Y_] #ssvm.fit(X_, Y_, H_init=H_init) print("fit finished!")
def svm_on_segments(C=.1, learning_rate=.001, subgradient=True): # load and prepare data lateral = True latent = True test = False #data_train = load_data(which="piecewise") #data_train = add_edges(data_train, independent=False) #data_train = add_kraehenbuehl_features(data_train, which="train_30px") #data_train = add_kraehenbuehl_features(data_train, which="train") #if lateral: #data_train = add_edge_features(data_train) data_train = load_data_global_probs(latent=latent) X_org_ = data_train.X #data_train = make_hierarchical_data(data_train, lateral=lateral, #latent=latent, latent_lateral=True) data_train = discard_void(data_train, 21, latent_features=True) X_, Y_ = data_train.X, data_train.Y # remove edges if not lateral: X_org_ = [(x[0], np.zeros((0, 2), dtype=np.int)) for x in X_org_] if test: data_val = load_data('val', which="piecewise") data_val = add_edges(data_val, independent=False) data_val = add_kraehenbuehl_features(data_val) data_val = make_hierarchical_data(data_val, lateral=lateral, latent=latent) data_val = discard_void(data_val, 21) X_.extend(data_val.X) Y_.extend(data_val.Y) n_states = 21 class_weights = 1. / np.bincount(np.hstack(Y_)) class_weights *= 21. / np.sum(class_weights) experiment_name = ("latent5_features_C%f_top_node" % C) logger = SaveLogger(experiment_name + ".pickle", save_every=10) if latent: model = LatentNodeCRF(n_labels=n_states, n_features=data_train.X[0][0].shape[1], n_hidden_states=5, inference_method='qpbo' if lateral else 'dai', class_weight=class_weights, latent_node_features=True) if subgradient: ssvm = learners.LatentSubgradientSSVM( model, C=C, verbose=1, show_loss_every=10, logger=logger, n_jobs=-1, learning_rate=learning_rate, decay_exponent=1, momentum=0., max_iter=100000) else: latent_logger = SaveLogger("lssvm_" + experiment_name + "_%d.pickle", save_every=1) base_ssvm = learners.OneSlackSSVM( model, verbose=2, C=C, max_iter=100000, n_jobs=-1, tol=0.001, show_loss_every=200, inference_cache=50, logger=logger, cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False, switch_to_ad3=True) ssvm = learners.LatentSSVM(base_ssvm, logger=latent_logger) warm_start = False if warm_start: ssvm = logger.load() ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle", save_every=10) ssvm.max_iter = 100000 ssvm.learning_rate = 0.00001 ssvm.momentum = 0 else: #model = GraphCRF(n_states=n_states, #n_features=data_train.X[0][0].shape[1], #inference_method='qpbo' if lateral else 'dai', #class_weight=class_weights) model = EdgeFeatureGraphCRF(n_states=n_states, n_features=data_train.X[0][0].shape[1], inference_method='qpbo' if lateral else 'dai', class_weight=class_weights, n_edge_features=4, symmetric_edge_features=[0, 1], antisymmetric_edge_features=[2]) ssvm = learners.OneSlackSSVM( model, verbose=2, C=C, max_iter=100000, n_jobs=-1, tol=0.0001, show_loss_every=200, inference_cache=50, logger=logger, cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False) #ssvm = logger.load() X_, Y_ = shuffle(X_, Y_) #ssvm.fit(data_train.X, data_train.Y) #ssvm.fit(X_, Y_, warm_start=warm_start) ssvm.fit(X_, Y_) print("fit finished!")
int(overlap)) output_folder = '../output_testset' # read all images from input folder #pages_data = bookfunctions.get_pages_and_data_from_book(input_folder) pages_data = bookfunctions.get_pages_and_data_from_folder(input_folder) #pages_data = pages_data[379:381] true_labels = bookfunctions.get_all_labels(pages_data, \ number_of_blocks, overlap) features = bookfunctions.get_features_from_pages_data(pages_data, \ number_of_blocks, overlap, svm_path) # put features in ssvm logger = SaveLogger(ssvm_path) ssvm = logger.load() predicted_labels = np.array(ssvm.predict(features)) prfs = precision_recall_fscore_support(true_labels.flatten(), \ predicted_labels.flatten()) cm = confusion_matrix(true_labels.flatten(), predicted_labels.flatten()) print """ Precision: Image: %f Text: %f Recall: Image: %f Text: %f Fscore: Image: %f Text: %f Support:
int(overlap)) output_folder = '../output_testset' # read all images from input folder #pages_data = bookfunctions.get_pages_and_data_from_book(input_folder) pages_data = bookfunctions.get_pages_and_data_from_folder(input_folder) #pages_data = pages_data[379:381] true_labels = bookfunctions.get_all_labels(pages_data, \ number_of_blocks, overlap) features = bookfunctions.get_features_from_pages_data(pages_data, \ number_of_blocks, overlap, svm_path) # put features in ssvm logger = SaveLogger(ssvm_path) ssvm = logger.load() predicted_labels = np.array(ssvm.predict(features)) prfs = precision_recall_fscore_support(true_labels.flatten(), \ predicted_labels.flatten()) cm = confusion_matrix(true_labels.flatten(), predicted_labels.flatten()) print """ Precision: Image: %f Text: %f Recall: Image: %f Text: %f Fscore: Image: %f Text: %f
from pystruct.learners import LatentSSVM, OneSlackSSVM, SubgradientSSVM, FrankWolfeSSVM from pystruct.utils import make_grid_edges, SaveLogger from skimage import img_as_ubyte from matplotlib import pyplot as plt from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score from skimage.transform import resize from skimage.filters import threshold_otsu # Pixel Classes - Black, White horsePixelClasses = [numpy.array([0., 0., 0.]), numpy.array([1., 1., 1.])] # Load trained Model horseModelLogger = SaveLogger('save/imagesegmentation-horse-hog_96_lbp.model', save_every=1) horseCRF = horseModelLogger.load() ###################################### # Compute S_0 score ###################################### def foregroundQualityScore(a, b): TP = TN = FP = FN = 0.0 for i in range(0, len(a)): if a[i] == b[i]: if a[i] == 0: TN += 1 else: TP += 1 else:
def svm_on_segments(C=.1, learning_rate=.001, subgradient=True): # load and prepare data lateral = True latent = True test = False #data_train = load_data(which="piecewise") #data_train = add_edges(data_train, independent=False) #data_train = add_kraehenbuehl_features(data_train, which="train_30px") #data_train = add_kraehenbuehl_features(data_train, which="train") #if lateral: #data_train = add_edge_features(data_train) data_train = load_data_global_probs(latent=latent) X_org_ = data_train.X #data_train = make_hierarchical_data(data_train, lateral=lateral, #latent=latent, latent_lateral=True) data_train = discard_void(data_train, 21, latent_features=True) X_, Y_ = data_train.X, data_train.Y # remove edges if not lateral: X_org_ = [(x[0], np.zeros((0, 2), dtype=np.int)) for x in X_org_] if test: data_val = load_data('val', which="piecewise") data_val = add_edges(data_val, independent=False) data_val = add_kraehenbuehl_features(data_val) data_val = make_hierarchical_data(data_val, lateral=lateral, latent=latent) data_val = discard_void(data_val, 21) X_.extend(data_val.X) Y_.extend(data_val.Y) n_states = 21 class_weights = 1. / np.bincount(np.hstack(Y_)) class_weights *= 21. / np.sum(class_weights) experiment_name = ("latent5_features_C%f_top_node" % C) logger = SaveLogger(experiment_name + ".pickle", save_every=10) if latent: model = LatentNodeCRF(n_labels=n_states, n_features=data_train.X[0][0].shape[1], n_hidden_states=5, inference_method='qpbo' if lateral else 'dai', class_weight=class_weights, latent_node_features=True) if subgradient: ssvm = learners.LatentSubgradientSSVM(model, C=C, verbose=1, show_loss_every=10, logger=logger, n_jobs=-1, learning_rate=learning_rate, decay_exponent=1, momentum=0., max_iter=100000) else: latent_logger = SaveLogger("lssvm_" + experiment_name + "_%d.pickle", save_every=1) base_ssvm = learners.OneSlackSSVM(model, verbose=2, C=C, max_iter=100000, n_jobs=-1, tol=0.001, show_loss_every=200, inference_cache=50, logger=logger, cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False, switch_to_ad3=True) ssvm = learners.LatentSSVM(base_ssvm, logger=latent_logger) warm_start = False if warm_start: ssvm = logger.load() ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle", save_every=10) ssvm.max_iter = 100000 ssvm.learning_rate = 0.00001 ssvm.momentum = 0 else: #model = GraphCRF(n_states=n_states, #n_features=data_train.X[0][0].shape[1], #inference_method='qpbo' if lateral else 'dai', #class_weight=class_weights) model = EdgeFeatureGraphCRF( n_states=n_states, n_features=data_train.X[0][0].shape[1], inference_method='qpbo' if lateral else 'dai', class_weight=class_weights, n_edge_features=4, symmetric_edge_features=[0, 1], antisymmetric_edge_features=[2]) ssvm = learners.OneSlackSSVM(model, verbose=2, C=C, max_iter=100000, n_jobs=-1, tol=0.0001, show_loss_every=200, inference_cache=50, logger=logger, cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False) #ssvm = logger.load() X_, Y_ = shuffle(X_, Y_) #ssvm.fit(data_train.X, data_train.Y) #ssvm.fit(X_, Y_, warm_start=warm_start) ssvm.fit(X_, Y_) print("fit finished!")
# Script to predict edge map # Importing Libraries from pystruct.utils import SaveLogger import scipy.io as sio # Getting parameters params = sio.loadmat( '/home/dell/Desktop/sarvaswa/objectness-release-v2.2/Trial_Pascal/testSet/exp1/salprop-v1.0/matpy/params.mat' ) matpyfiles = params['params'][0]['matpyfiles'][0] featureFile = matpyfiles['featureFile'][0][0][0] edgesFile = matpyfiles['edgesFile'][0][0][0] predictFile = matpyfiles['predictFile'][0][0][0] modelFile = params['params'][0]['modelFileCRF'][0][0] # Loading required Files modelLogger = SaveLogger(modelFile) crf = modelLogger.load() feat = sio.loadmat(featureFile) feat = feat['feat'] edges = sio.loadmat(edgesFile) edges = edges['edges'] - 1 # Make Prediction inData = [(feat, edges)] prediction = crf.predict(inData) # Save Prediction; sio.savemat(predictFile, mdict={'prediction': prediction})
def svm_on_segments(C=.1, learning_rate=.001, subgradient=False): data_file = "data_train_XY.pickle" ds = PascalSegmentation() if os.path.exists(data_file): X_, Y_ = cPickle.load(open(data_file)) else: # load and prepare data data_train = load_pascal("train", sp_type="cpmc") data_train = make_cpmc_hierarchy(ds, data_train) data_train = discard_void(ds, data_train) X_, Y_ = data_train.X, data_train.Y cPickle.dump((X_, Y_), open(data_file, 'wb'), -1) class_weights = 1. / np.bincount(np.hstack(Y_)) class_weights *= 21. / np.sum(class_weights) experiment_name = ("latent_25_cpmc_%f_qpbo_n_slack_blub3" % C) logger = SaveLogger(experiment_name + ".pickle", save_every=10) model = LatentNodeCRF(n_hidden_states=25, inference_method='qpbo', class_weight=class_weights, latent_node_features=False) if subgradient: ssvm = learners.LatentSubgradientSSVM(model, C=C, verbose=1, show_loss_every=10, logger=logger, n_jobs=-1, learning_rate=learning_rate, decay_exponent=1, momentum=0., max_iter=100000, decay_t0=100) else: latent_logger = SaveLogger("lssvm_" + experiment_name + "_%d.pickle", save_every=1) #base_ssvm = learners.OneSlackSSVM( #model, verbose=2, C=C, max_iter=100, n_jobs=-1, tol=0.001, #show_loss_every=200, inference_cache=50, logger=logger, #cache_tol='auto', inactive_threshold=1e-5, break_on_bad=False, #switch_to=('ogm', {'alg': 'dd'})) base_ssvm = learners.NSlackSSVM(model, verbose=4, C=C, n_jobs=-1, tol=0.1, show_loss_every=20, logger=logger, inactive_threshold=1e-8, break_on_bad=False, batch_size=36, inactive_window=10, switch_to=('ad3', { 'branch_and_bound': True })) ssvm = learners.LatentSSVM(base_ssvm, logger=latent_logger, latent_iter=3) #warm_start = True warm_start = False if warm_start: ssvm = logger.load() ssvm.logger = SaveLogger(experiment_name + "_retrain.pickle", save_every=10) ssvm.max_iter = 10000 ssvm.decay_exponent = 1 #ssvm.decay_t0 = 1000 #ssvm.learning_rate = 0.00001 #ssvm.momentum = 0 X_, Y_ = shuffle(X_, Y_) #ssvm.fit(data_train.X, data_train.Y) ssvm.fit(X_, Y_) #H_init = [np.hstack([y, np.random.randint(21, 26)]) for y in Y_] #ssvm.fit(X_, Y_, H_init=H_init) print("fit finished!")
depth = load_data(usc_shadow_path+"Image Pairs","_z") gt = load_data(usc_shadow_path+"Ground Truth","_gt") # X, Y = generate_crosses_explicit(n_samples=50, noise=10) X = rgb Y = gt / 255 crf = GridCRF(neighborhood=4, inference_method= 'ad3') # clf = ssvm.OneSlackSSVM(model=crf, C=100, inference_cache=10, # tol=.1, verbose= True, max_iter= 10, # logger=SaveLogger("ShadowLogger_%d", save_every=1, verbose=1)) # clf.fit(X, Y) logger = SaveLogger('ShadowLogger_9') clf = logger.load() Y_pred = np.array(clf.predict(X)) print("overall accuracy (training set): %f" % clf.score(X, Y)) # plot one example for i in range(5): x, y, y_pred = X[0,:,:,i], Y[0,:,:,i], Y_pred[i] y_pred = y_pred.reshape(x.shape[:2]) fig, plots = plt.subplots(1, 4, figsize=(12, 4)) plots[0].matshow(y) plots[0].set_title("ground truth") plots[1].matshow(x) plots[1].set_title("input") plots[2].matshow(y_pred) plots[2].set_title("prediction")