def init(args): if not exists(args.dir_path + '/particlenet/'): mkdir(args.dir_path + '/particlenet/') args_dict = vars(args) dirs = ['cmodels', 'closses', 'cargs', 'couts'] for dir in dirs: args_dict[dir + '_path'] = args.dir_path + '/particlenet/' + dir + '/' if not exists(args_dict[dir + '_path']): mkdir(args_dict[dir + '_path']) args = utils.objectview(args_dict) args.datasets_path = args.dir_path + '/datasets/' args.outs_path = args.dir_path + '/outs/' setup.init_logging(args) prev_models = [f[:-4] for f in listdir(args.cargs_path)] # removing txt part if (args.name in prev_models): logging.info("name already used") # if(not args.load_model): # sys.exit() else: try: mkdir(args.closses_path + args.name) except FileExistsError: logging.debug("losses dir exists") try: mkdir(args.cmodels_path + args.name) except FileExistsError: logging.debug("models dir exists") if (not args.load_model): f = open(args.cargs_path + args.name + ".txt", "w+") f.write(str(vars(args))) f.close() else: temp = args.start_epoch, args.num_epochs f = open(args.cargs_path + args.name + ".txt", "r") args_dict = vars(args) load_args_dict = eval(f.read()) for key in load_args_dict: args_dict[key] = load_args_dict[key] args = utils.objectview(args_dict) f.close() args.load_model = True args.start_epoch, args.num_epochs = temp args.device = device return args
def load_args(args): if args.load_model: if args.start_epoch == -1: prev_models = [ int(f[:-3].split('_')[-1]) for f in listdir(args.models_path + args.name + '/') ] if len(prev_models): args.start_epoch = max(prev_models) else: logging.debug("No model to load from") args.start_epoch = 0 args.load_model = False if args.start_epoch == 0: args.load_model = False else: args.start_epoch = 0 if (not args.load_model): f = open(args.args_path + args.name + ".txt", "w+") f.write(str(vars(args))) f.close() elif (not args.override_args): temp = args.start_epoch, args.num_epochs f = open(args.args_path + args.name + ".txt", "r") args_dict = vars(args) load_args_dict = eval(f.read()) for key in load_args_dict: args_dict[key] = load_args_dict[key] args = utils.objectview(args_dict) f.close() args.load_model = True args.start_epoch, args.num_epochs = temp return args
def init_project_dirs(args): if args.dir_path == "": if args.n: args.dir_path = "/graphganvol/mnist_graph_gan/jets" elif args.lx: args.dir_path = "/eos/user/r/rkansal/mnist_graph_gan/jets" else: args.dir_path = dirname(realpath(__file__)) args_dict = vars(args) dirs = [ 'models', 'losses', 'args', 'figs', 'datasets', 'err', 'evaluation', 'outs', 'noise' ] for dir in dirs: args_dict[dir + '_path'] = args.dir_path + '/' + dir + '/' if not exists(args_dict[dir + '_path']): mkdir(args_dict[dir + '_path']) args = utils.objectview(args_dict) return args
# # realw1m = [0.00584264, 0.00556786, 0.0014096] # realw1std = [0.00214083, 0.00204827, 0.00051136] batch_size = 128 normal_dist = Normal(torch.tensor(0.).to(device), torch.tensor(0.2).to(device)) dir = './' # dir = '/graphganvol/mnist_graph_gan/jets/' args = utils.objectview({ 'dataset_path': dir + 'datasets/', 'num_hits': 30, 'coords': 'polarrel', 'latent_node_size': 32, 'clabels': 0, 'jets': 'g', 'norm': 1, 'mask': False }) X = JetsDataset(args) labels = X[:][1] # X_loaded = DataLoader(X, shuffle=True, batch_size=32, pin_memory=True) X = X[:][0] N = len(X) rng = np.random.default_rng() num_samples = 100000
args_txt = { 'g': 'args/218_g30_mask_c_dea_no_pos_diffs.txt', 't': 'args/206_t30_mask_c_lrx2_dea_no_pos_diffs.txt', 'q': 'args/230_q30_mask_c_lrx05_dea_no_pos_diffs.txt' } samples_dict = {'g': {}, 't': {}, 'q': {}} for dataset in samples_dict.keys(): print(dataset) args = eval(open(args_txt[dataset]).read()) args['device'] = torch.device('cuda') args['datasets_path'] = './datasets/' args['fpnd_batch_size'] = 512 args['evaluation_path'] = './evaluation/' args = utils.objectview(args) C, mu2, sigma2 = evaluation.load(args) X = JetsDataset(args, train=False) rng = np.random.default_rng() X_loaded = DataLoader(TensorDataset(torch.tensor(X[:50000][0])), batch_size=256) C.eval() for i, gen_jets in tqdm(enumerate(X_loaded), total=len(X_loaded)): gen_jets = gen_jets[0] if args.mask: mask = gen_jets[:, :, 3:4] >= 0 gen_jets = (gen_jets * mask)[:, :, :3] if (i == 0):
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') batch_size = 128 normal_dist = Normal(torch.tensor(0.).to(device), torch.tensor(0.2).to(device)) # dir = './' dir = '/graphganvol/mnist_graph_gan/jets/' args = { 'dataset_path': dir + 'datasets/', 'num_hits': 30, 'coords': 'polarrel', 'latent_node_size': 32 } X = JetsDataset(utils.objectview(args)) N = len(X) name = '7_batch_size_128_coords_polarrel' full_path = dir + 'models/' + name + '/' rng = np.random.default_rng() num_samples = np.array([100, 1000, 10000]) num_batches = np.array(100000 / num_samples, dtype=int) num_batches epochs = 980
s1 = s1[:-2] + "]" s2 = s2[:-2] + "]" print(s1) print(s2) np.argsort(np.linalg.norm(w1m[:, :3], axis=1))[:20] * 5 # realw1m = [0.00584264, 0.00556786, 0.0014096] # realw1std = [0.00214083, 0.00204827, 0.00051136] batch_size = 128 normal_dist = Normal(torch.tensor(0.).to(device), torch.tensor(0.2).to(device)) dir = './' args = utils.objectview({'datasets_path': dir + 'datasets/', 'figs_path': dir + 'figs/' + str(model), 'node_feat_size': 3, 'num_hits': 30, 'coords': 'polarrel', 'latent_node_size': 32, 'clabels': 1, 'jets': 'g', 'norm': 1, 'mask': False, 'mask_manual': False, 'real_only': False, 'mask_feat': False}) args = eval(open("./args/" + "179_t30_graphcnngan_knn_20.txt").read()) args['device'] = device args['datasets_path'] = dir + 'datasets/' # args['mask_feat'] = False # args['mask_learn'] = False # args['mask_c'] = False args['figs_path'] = dir + 'figs/' + str(model) + '/' + str(epoch) args = utils.objectview(args) args X = JetsDataset(args) loadX = DataLoader(X, batch_size=128)
def init(args): torch.manual_seed(4) torch.autograd.set_detect_anomaly(True) args.model_path = args.dir_path + '/models/' args.losses_path = args.dir_path + '/losses/' args.args_path = args.dir_path + '/args/' args.figs_path = args.dir_path + '/figs/' args.dataset_path = args.dir_path + '/datasets/' args.err_path = args.dir_path + '/err/' args.eval_path = args.dir_path + '/evaluation/' args.noise_path = args.dir_path + '/noise/' if (not exists(args.model_path)): mkdir(args.model_path) if (not exists(args.losses_path)): mkdir(args.losses_path) if (not exists(args.args_path)): mkdir(args.args_path) if (not exists(args.figs_path)): mkdir(args.figs_path) if (not exists(args.err_path)): mkdir(args.err_path) # if(not exists(args.noise_path)): # mkdir(args.noise_path) if (not exists(args.dataset_path)): mkdir(args.dataset_path) prev_models = [f[:-4] for f in listdir(args.args_path)] # removing .txt if (args.name in prev_models): print("name already used") # if(not args.load_model): # sys.exit() else: mkdir(args.losses_path + args.name) mkdir(args.model_path + args.name) mkdir(args.figs_path + args.name) if args.load_model: if args.start_epoch == -1: prev_models = [ int(f[:-3].split('_')[-1]) for f in listdir(args.model_path + args.name + '/') ] if len(prev_models): args.start_epoch = max(prev_models) else: print("No model to load from") args.start_epoch = 0 args.load_model = False if args.start_epoch == 0: args.load_model = False else: args.start_epoch = 0 if (not args.load_model): f = open(args.args_path + args.name + ".txt", "w+") f.write(str(vars(args))) f.close() elif (not args.override_args): temp = args.start_epoch, args.num_epochs f = open(args.args_path + args.name + ".txt", "r") args_dict = vars(args) load_args_dict = eval(f.read()) for key in load_args_dict: args_dict[key] = load_args_dict[key] args = utils.objectview(args_dict) f.close() args.load_model = True args.start_epoch, args.num_epochs = temp args.device = device return args
import evaluation import torch from torch.utils.data import DataLoader num_samples = 50000 samples_dict = {} for dataset in ['g', 't', 'q']: args = utils.objectview({ 'datasets_path': 'datasets/', 'ttsplit': 0.7, 'node_feat_size': 3, 'num_hits': 30, 'coords': 'polarrel', 'dataset': 'jets', 'clabels': 0, 'jets': dataset, 'norm': 1, 'mask': True, 'real_only': False }) X = JetsDataset(args, train=False) X = X[:][0] X_rn, mask_real = utils.unnorm_data(args, X[:num_samples].cpu().detach().numpy(), real=True) samples_dict[dataset] = (X_rn, mask_real) efps = {} for dataset in samples_dict.keys():
def init(args): torch.manual_seed(4) torch.autograd.set_detect_anomaly(True) args.model_path = args.dir_path + '/models/' args.losses_path = args.dir_path + '/losses/' args.args_path = args.dir_path + '/args/' args.figs_path = args.dir_path + '/figs/' args.dataset_path = args.dir_path + '/raw/' if not args.sparse_mnist else args.dir_path + '/mnist_dataset/' args.err_path = args.dir_path + '/err/' args.eval_path = args.dir_path + '/evaluation/' args.noise_path = args.dir_path + '/noise/' if (not exists(args.model_path)): mkdir(args.model_path) if (not exists(args.losses_path)): mkdir(args.losses_path) if (not exists(args.args_path)): mkdir(args.args_path) if (not exists(args.figs_path)): mkdir(args.figs_path) if (not exists(args.err_path)): mkdir(args.err_path) if (not exists(args.noise_path)): mkdir(args.noise_path) if (not exists(args.dataset_path)): mkdir(args.dataset_path) print("Downloading dataset") if (not args.sparse_mnist): import tarfile, urllib # url = 'http://ls7-www.cs.uni-dortmund.de/cvpr_geometric_dl/mnist_superpixels.tar.gz' url = 'https://ls7-www.cs.tu-dortmund.de/fileadmin/ls7-www/misc/cvpr/mnist_superpixels.tar.gz' try: # python2 file_tmp = urllib.urlretrieve(url)[0] except: # python3 file_tmp = urllib.request.urlretrieve(url)[0] tar = tarfile.open(file_tmp) tar.extractall(args.dataset_path) else: import requests r = requests.get( 'https://pjreddie.com/media/files/mnist_train.csv', allow_redirects=True) open(args.dataset_path + 'mnist_train.csv', 'wb').write(r.content) r = requests.get('https://pjreddie.com/media/files/mnist_test.csv', allow_redirects=True) open(args.dataset_path + 'mnist_test.csv', 'wb').write(r.content) print("Downloaded dataset") prev_models = [f[:-4] for f in listdir(args.args_path)] # removing .txt if (args.name in prev_models): print("name already used") # if(not args.load_model): # sys.exit() else: mkdir(args.losses_path + args.name) mkdir(args.model_path + args.name) mkdir(args.figs_path + args.name) if args.load_model: if args.start_epoch == -1: prev_models = [ int(f[:-3].split('_')[-1]) for f in listdir(args.model_path + args.name + '/') ] if len(prev_models): args.start_epoch = max(prev_models) else: print("No model to load from") args.start_epoch = 0 args.load_model = False else: args.start_epoch = 0 if (not args.load_model): f = open(args.args_path + args.name + ".txt", "w+") f.write(str(vars(args))) f.close() elif (not args.override_args): temp = args.start_epoch, args.num_epochs f = open(args.args_path + args.name + ".txt", "r") args_dict = vars(args) load_args_dict = eval(f.read()) for key in load_args_dict: args_dict[key] = load_args_dict[key] args = utils.objectview(args_dict) f.close() args.load_model = True args.start_epoch, args.num_epochs = temp args.device = device return args
plt.ylabel('True label') plt.xlim(-1, len(target_names)) plt.ylim(-1, len(target_names)) plt.xlabel('Predicted label\naccuracy={:0.4f}; misclass={:0.4f}'.format( accuracy, misclass)) plt.tight_layout() plt.savefig(fname + '.pdf') plt.close(fig) return fig, ax args = utils.objectview({ 'mask': False, 'datasets_path': dir_path + 'datasets/', 'node_feat_size': 3, 'num_hits': 30 }) test_dataset = JetsClassifierDataset(args, train=False) print("dataset loaded") test_loaded = DataLoader(test_dataset) C = ParticleNet(args.num_hits, args.node_feat_size, num_classes=5, device=device).to(device) C.load_state_dict( torch.load(dir_path + 'particlenet/cmodels/c5_pnet_adam/C_18.pt',