# data controls, can go up to 2000000 total for full dataset train, val, test = 75000, 10000, 15000 # train, val, test = 1000000, 200000, 200000 # network architecture parameters Phi_sizes, F_sizes = (100, 100, 128), (100, 100, 100) # Phi_sizes, F_sizes = (100, 100, 256), (100, 100, 100) # network training parameters num_epoch = 5 batch_size = 500 ############################################################################### # load data X, y = qg_jets.load(train + val + test) # ignore pid information X = X[:, :, :3] # convert labels to categorical Y = to_categorical(y, num_classes=2) print('Loaded quark and gluon jets') # preprocess by centering jets and normalizing pts for x in X: mask = x[:, 0] > 0 yphi_avg = np.average(x[mask, 1:3], weights=x[mask, 0], axis=0) x[mask, 1:3] -= yphi_avg x[mask, 0] /= x[:, 0].sum()
# data controls num_data = 100000 val_frac, test_frac = 0.1, 0.15 # network architecture parameters ppm_sizes = (100, 100) dense_sizes = (100, 100) # network training parameters num_epoch = 5 batch_size = 100 ################################################################################ # load data X, y = qg_jets.load(num_data=num_data) # ignore pid information X = X[:, :, :3] # convert labels to categorical Y = to_categorical(y, num_classes=2) print() print('Loaded quark and gluon jets') # preprocess by centering jets and normalizing pts for x in X: mask = x[:, 0] > 0 yphi_avg = np.average(x[mask, 1:3], weights=x[mask, 0], axis=0) x[mask, 1:3] -= yphi_avg