from data_pkg import data_fns as df from functionals_pkg import feature_fns as ff import numpy as np import pickle import os print "TRAINING : " path_videos = '/usr/local/data/sejacob/ANOMALY/data/UCSD/UCSD_Anomaly_Dataset.v1p2/UCSDped1/Train' train_test = 'Train' list_cuboids, all_cuboids = df.make_cuboids_of_videos(path_videos, train_test, 11, 11, 5) mean = all_cuboids.mean(axis=0) std = all_cuboids.std(axis=0) all_cuboids = (all_cuboids - mean) / std np.save(os.path.join('data_stored', 'cuboid_train_mean.npy'), mean) np.save(os.path.join('data_stored', 'cuboid_train_std.npy'), std) np.save(os.path.join('data_stored', 'all_cuboids_normed.npy'), all_cuboids) with open(os.path.join('data_stored', 'list_cuboids.pkl'), 'wb') as f: pickle.dump(list_cuboids, f) len_local_feats = 13 feats_local = ff.make_all_local_feats(len_local_feats, all_cuboids=list_cuboids, mean=mean, std=std)
print "LOADING CUBOIDS AND FEATS" all_cuboids_normed = np.load(os.path.join('data_stored_temporal', 'all_cuboids_normed.npy')) all_local_feats_normed = np.load(os.path.join('data_stored_temporal', 'all_local_feats_normed.npy')) mean_data = np.load(os.path.join('data_stored_temporal', 'cuboid_train_mean.npy')) std_data = np.load(os.path.join('data_stored_temporal', 'cuboid_train_std.npy')) path_videos = '/usr/local/data/sejacob/ANOMALY/data/UCSD/UCSD_Anomaly_Dataset.v1p2/UCSDped1/Test' train_test = 'Test' size_axis = 12 n_frames = 5 list_cuboids_test, _, _ = df.make_cuboids_of_videos(path_videos, train_test, size_axis, size_axis, n_frames) print "GET LOCAL FEATURE THRESHOLDS" alpha_local, theta_local, mean_local, cov_inv_local = models.make_thresholds(all_local_feats_normed, 0.3) del(all_local_feats_normed) print "#######################" print "LOCAL FEATURES" print "#######################" print "ALPHA_LOCAL:", alpha_local, " THETA_LOCAL:", theta_local print "#######################" thresholds_local = [alpha_local, theta_local] print "SETTING LOCAL ANOMS" list_cuboids_test = ff.set_anomaly_status_local_temporal(list_cuboids_test, thresholds_local, mean_local, cov_inv_local, mean_data, std_data) print "SAVING LOCAL ANOMS" with open(os.path.join('data_stored_temporal', 'list_cuboids_test_local.pkl'), 'wb') as f:
print "$$$$$$$$$$$$$$$$$$$$$$$" print "FITTING THE KMEANS OBJECT" print "$$$$$$$$$$$$$$$$$$$$$$$" kmeans_obj = KMeans(n_clusters=int(metric['-n']), verbose=1,n_jobs=-1) kmeans_obj.fit(all_global_feats) print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" print "LOADING THE TRAIN LIST CUBOIDS:" print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" train = 'Train' list_cuboids_train, _, _ = df.make_cuboids_of_videos(path_videos_train, train, size_axis, size_axis, n_frames) print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" print "CREATE DICTIONARY:" print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" dictionary = ff.make_dictionary(list_cuboids_train, kmeans_obj, model,mean_data, std_data,mean_feats,std_feats) del(list_cuboids_train) print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" print "NUMBER OF DICTIONARY ENTRIES:", len(dictionary) print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" print "Making rows into tuples" dictionary = [tuple(row) for row in dictionary]