# # infoset = np.append(np.load(datadir + 'DS3_pad/' + 'track_infoset.npy'), # np.load(datadir + 'DS4_pad/' + 'track_infoset.npy'), axis = 0) dataname = 'DS3/' ocdbdir = DEFAULTS.ocdbdir datadir = DEFAULTS.datadir + dataname plotdir = DEFAULTS.plotdir X = np.load(datadir + 'tracklet_dataset.npy') / 1024 infoset = np.load(datadir + 'tracklet_infoset.npy') print("Loaded: %s \n" % datadir) (X, infoset), (Xv, valid_infoset), (Xt, test_infoset) = DATA.TVT_split_(X, infoset) T = infoset[:, 0].astype(int) Tv = valid_infoset[:, 0].astype(int) Tt = test_infoset[:, 0].astype(int) I = infoset Iv = valid_infoset It = test_infoset (cs_1, cs_2, d1_1, d1_2) = (8, 16, 256, 128) dropout = 0.45 stamp = datetime.datetime.now().strftime("%d-%m-%H%M%S") + "_" mname = "track_V_C_chamber_dropout%.2f" % dropout tensorboard, csvlogger = LOG.logger_(dataname, stamp, mname) input = Input(shape=X.shape[1:], name="X-in")
from tensorflow.keras.layers import Input, Dense, Dropout, Flatten, Conv2D, MaxPool2D, concatenate, GaussianNoise matplotlib.rcParams.update({'font.size': 16}) matplotlib.rcParams['text.usetex'] = True dataname = 'all_tracks_6_tracklets_even_chamber_calib/' ocdbdir = DEFAULTS.ocdbdir datadir = DEFAULTS.datadir + dataname plotdir = DEFAULTS.plotdir dataset = np.load(datadir + 'track_dataset.npy').reshape(-1, 17, 24, 1) infoset = np.repeat(np.load(datadir + 'track_infoset.npy'), 6, axis=0) print("Loaded: %s \n" % datadir) columns = DEFAULTS.info_cols_tracklet_ + DEFAULTS.ocdb_cols1 + DEFAULTS.ocdb_cols2 (X, infoset), (Xv, valid_infoset), (Xt, test_infoset) = DATA.TVT_split_( dataset, infoset) T = infoset[:, 0].astype(int) Tv = valid_infoset[:, 0].astype(int) Tt = test_infoset[:, 0].astype(int) tracklet_pid_model = custom_models.ComplexConvTrackletPID() tracklet_pid_model.compile( optimizer=tf.train.AdamOptimizer(learning_rate=0.001), loss='binary_crossentropy', metrics=[ML.pion_con], ) history = tracklet_pid_model.fit( X, T,
from TOOLS import PLOT, METRICS, DATA, DEFAULTS matplotlib.rcParams.update({'font.size': 16}) matplotlib.rcParams['text.usetex'] = True from keras.models import load_model from keras.utils import CustomObjectScope from keras.initializers import glorot_uniform from scipy.signal import convolve2d directory = DEFAULTS.datadir + 'DS2/' raw_data = np.load(directory + '0_tracks.npy') raw_info = np.load(directory + '0_info_set.npy') print("Loaded: %s" % directory) sumtracks = [np.sum(raw_info[:, 6] == i) for i in range(7)] #Sanity check dataset, infoset, coord = DATA.process_tracklet_(raw_data, raw_info) X, y = DATA.shuffle_(dataset / 1024, infoset[:, 0]) plt.imshow(X[0][:, :, 0]) modldir = "saved_models/" conv_sizes1 = [16] conv_sizes2 = [64] dense_sizes1 = [1024] dense_sizes2 = [256] for conv_size1 in conv_sizes1: for conv_size2 in conv_sizes2: for dense_size1 in dense_sizes1: for dense_size2 in dense_sizes2: mname = "conv-%d-%d-filters-dense-%d-%d-nodes-" % (
# X = np.append(np.load(datadir + 'DS3/' + 'track_dataset.npy'), # np.load(datadir + 'DS4/' + 'track_dataset.npy'), axis = 0) # # infoset = np.append(np.load(datadir + 'DS3/' + 'track_infoset.npy'), # np.load(datadir + 'DS4/' + 'track_infoset.npy'), axis = 0) dataname = 'DS3/' ocdbdir = DEFAULTS.ocdbdir datadir = DEFAULTS.datadir + dataname plotdir = DEFAULTS.plotdir X = np.load(datadir + 'track_dataset.npy') / 1024 infoset = np.load(datadir + 'track_infoset.npy') print("Loaded: %s \n" % datadir) (X, infoset), (Xv, valid_infoset) = DATA.TV_split_(X, infoset) T = infoset[:, 0].astype(int) Tv = valid_infoset[:, 0].astype(int) I = infoset Iv = valid_infoset (cs_1, cs_2, d1_1, d1_2) = (8, 16, 128, 64) dropout = 0.45 stamp = datetime.datetime.now().strftime("%d-%m-%H%M%S") + "_" mname = "track_V_U_" tensorboard, csvlogger = LOG.logger_(dataname, stamp, mname) input = Input(shape=X.shape[1:], name="X-in") x = Conv2D(cs_1, [2, 3], activation='relu', padding='same')(input) x = MaxPool2D([2, 2], 2, padding='valid')(x)
import numpy as np import pandas as pd from TOOLS import DEFAULTS, DATA datadir = DEFAULTS.datadir + 'DS1/'#'000%d/all/'%run_no ocdbdir = DEFAULTS.ocdbdir raw_data = np.load(datadir + '0_tracks.npy') raw_info = np.load(datadir + '0_info_set.npy') # det ()14:20) row (20:26) col (26:32) presence (32:38) dataset, infoset, coordinates = DATA.process_track_(raw_data, raw_info) dataset, infoset, ocdbinfo = ocdb_track_(dataset, infoset, coordinates, DEFAULTS.ocdbdir) info_cols = ["label", "nsigmae", "nsigmap", "PT", "{dE}/{dx}", "Momenta [GeV]", "$\\eta$", "$\\theta$", "$\\phi$","run_no", "event", "V0trackID", "track"] # def calib_track_(dataset, infoset, ) # gainglob = np.zeros((dataset.shape[0],17,24)) #gain for chambers # gainpads = np.zeros((dataset.shape[0],17,24)) #gain for individual pads # ocdbinfo = np.zeros((dataset.shape[0], chamber.values.shape[1]))
from tensorflow.keras.models import Model from tensorflow.keras.layers import Input, Dense, Dropout, Flatten, Conv2D, MaxPool2D, concatenate, GaussianNoise matplotlib.rcParams.update({'font.size': 16}) matplotlib.rcParams['text.usetex'] = True dataname = 'DS3/' ocdbdir = DEFAULTS.ocdbdir datadir = DEFAULTS.datadir + dataname plotdir = DEFAULTS.plotdir X = np.load(datadir + 'tracklet_dataset.npy') / 1024 infoset = np.load(datadir + 'tracklet_infoset.npy') print("Loaded: %s \n" % datadir) (X, infoset), (Xv, valid_infoset) = DATA.TV_split_(X, infoset) T = infoset[:, 0].astype(int) Tv = valid_infoset[:, 0].astype(int) I = infoset Iv = valid_infoset (cs_1, cs_2, d1_1, d1_2) = (8, 16, 128, 64) mname = "tracklet_V_U" stamp = datetime.datetime.now().strftime("%d-%m-%H%M%S") + "_" tensorboard, csvlogger = LOG.logger_(dataname, stamp, mname) input = Input(shape=X.shape[1:], ) # name="X-in") x = Conv2D(cs_1, [2, 3], activation='relu', padding='same')(input) x = MaxPool2D([2, 2], 2, padding='valid')(x) x = Conv2D(cs_2, [2, 3], activation='relu', padding='same')(x) x = MaxPool2D([2, 2], 2, padding='valid')(x)
import numpy as np import matplotlib.pyplot as plt from TOOLS import DATA, MODELS, LOG, METRICS import random, matplotlib, datetime, os run_no = '000265378/' dataname = 'test/' directory = 'data/output/' + run_no + dataname raw_data = np.load(directory + '0_tracks.npy') raw_info = np.load(directory + '0_info_set.npy') print("Loaded: %s" % directory) sumtracks = [np.sum(raw_info[:, 6] == i) for i in range(7)] #Sanity check dataset, infoset = DATA.process_1(raw_data, raw_info) X, y = DATA.shuffle_(dataset / 1024, infoset[:, 0]) print("Electron occurence: %.2f" % (100 * sum(y) / len(y)))
X_[str(l)] = np.append(dataset[:, 0][mask].reshape(-1, l), dataset[:, 6][mask].reshape(-1, l), axis=1) P1_ = {} P2_ = {} Xt_ = {} It_ = {} epoch = [100] * 6 batch = [2**10] * 6 pioncon1 = np.zeros(6) Upioncon1 = np.zeros(6) pioncon2 = np.zeros(6) Upioncon2 = np.zeros(6) for l in range(1, 7): X_[str(l)], I_[str(l)] = DATA.shuffle_(X_[str(l)], I_[str(l)]) (X, I), (Xv, Iv), (Xt, It) = DATA.TVT_split_(X_[str(l)], I_[str(l)], test_split=0.35, valid_split=0.15) Xt_[str(l)] = Xt It_[str(l)] = It T = I[:, 0].astype(int) Tv = Iv[:, 0].astype(int) Tt = It[:, 0].astype(int) print(X.shape) input = Input(shape=X[:, :l].shape[1:]) x = Dense(36, activation='relu')(input) x = Dropout(0.5)(x) x = Dense(216, activation='relu')(x)
from sklearn import preprocessing from tensorflow.keras import backend as K from tensorflow.keras.models import Model from tensorflow.keras.layers import Input, Dense, Dropout, Flatten, Conv2D, MaxPool2D, concatenate, GaussianNoise matplotlib.rcParams.update({'font.size': 16}) matplotlib.rcParams['text.usetex'] = True infoset = np.load(DEFAULTS.datadir + 'DS4/' + 'tracklet_infoset.npy') dataset = np.load(DEFAULTS.modldir + 'P-DS4-tracklet_V_U_wbce5.npy') (I[:,13] ==5.0).sum() mask = I[:,13] == 6.0 I = I[mask] X = X[mask] (X, I), (Xv, Iv), (Xt, It)= DATA.TVT_split_(X, I) T = I[:,0].astype(int) Tv = Iv[:,0].astype(int) Tt = It[:,0].astype(int) A = I[:,14:] Av = Iv[:,14:] At = It[:,14:] input_X = Input(shape=X.shape[1:], name="info") x = Dense(64)(input_X) x = Dense(8)(x) output_aux = Dense(1, activation='sigmoid')(x) model = Model(inputs=input_X, outputs=output_aux) #model.summary() model.compile(optimizer=tf.train.AdamOptimizer(learning_rate=1e-3), loss=ML.WBCE,metrics=[ML.pion_con])
# dataname = 'all_tracks_6_tracklets_even_chamber_calib/' # dataname = 'DS1/' for i in range(1, 5): dataname = 'DS%d/' % i ocdbdir = DEFAULTS.ocdbdir datadir = DEFAULTS.datadir + dataname plotdir = DEFAULTS.plotdir dataset = np.load(datadir + '0_tracks.npy') infoset = np.load(datadir + '0_info_set.npy') # dataset, infoset = DATA.shuffle_(dataset, infoset) print("Loaded: %s \n" % datadir) dataset, infoset = DATA.process_tracklet_(dataset, infoset) # dataset, infoset = DATA.ocdb_tracklet_(dataset, infoset, ocdbdir) infoset = DATA.ocdb_expand_(infoset, ocdbdir) np.save(datadir + 'tracklet_dataset.npy', dataset) np.save(datadir + 'tracklet_infoset.npy', infoset) print(dataset.shape) # for i in range(1,5): # dataname = 'DS%d_pad/'%i # ocdbdir = DEFAULTS.ocdbdir # datadir = DEFAULTS.datadir + dataname # plotdir = DEFAULTS.plotdir # # dataset = np.load(datadir + '0_tracks.npy') # infoset = np.load(datadir + '0_info_set.npy')
from tensorflow.keras.models import Model from tensorflow.keras.layers import Input, Dense, Dropout, Flatten, Conv2D, MaxPool2D, concatenate import tensorflow as tf import pandas as pd from sklearn.decomposition import PCA from sklearn import preprocessing from tensorflow.keras.callbacks import TensorBoard, Callback run_no = '000265378/' dataname = 'all/' directory = 'data/output/' + run_no + dataname raw_data = np.load(directory + '0_tracks.npy') raw_info = np.load(directory + '0_info_set.npy') print('Loaded: %s' % directory) dataset, infoset = DATA.process_1(raw_data, raw_info) dataset, infoset = DATA.shuffle_(dataset, infoset) def WBCE(y_true, y_pred, weight=7.0, from_logits=False): y_pred = tf.cast(y_pred, dtype='float32') y_true = tf.cast(y_true, y_pred.dtype) return K.mean(ML.weighted_binary_crossentropy(y_true, y_pred, weight=weight, from_logits=from_logits), axis=-1) input = Input(shape=X.shape[1:], ) # name="X-in") x = Conv2D(cs_1, [2, 3], activation='relu', padding='same')(input)
from tensorflow.keras import backend as K from tensorflow.keras.models import Model from tensorflow.keras.layers import Input, Dense, Dropout, Flatten, Conv2D, MaxPool2D, concatenate, GaussianNoise matplotlib.rcParams.update({'font.size': 16}) matplotlib.rcParams['text.usetex'] = True dataname = 'DS2/' ocdbdir = DEFAULTS.ocdbdir datadir = DEFAULTS.datadir + dataname plotdir = DEFAULTS.plotdir raw_data = np.load(datadir + '0_tracks.npy') raw_info = np.load(datadir + '0_info_set.npy') print("Loaded: %s \n" % datadir) dataset, infoset = DATA.process_tracklet_(raw_data, raw_info) (X, infoset), (Xv, valid_infoset), (Xt, test_infoset) = DATA.TVT_split_( dataset / 1024, infoset) T = infoset[:, 0].astype(int) Tv = valid_infoset[:, 0].astype(int) Tt = test_infoset[:, 0].astype(int) I = infoset Iv = valid_infoset It = test_infoset # I = infoset[:,nx:ny] (cs_1, cs_2, d1_1, d1_2) = (8, 16, 128, 64) weights = [1 / 10, 1.0, 10.0] stamp = datetime.datetime.now().strftime("%d-%m-%H%M%S") + "_" thresholds = np.linspace(0, 1, 1000)
import pandas as pd import seaborn as sns import random, matplotlib from TOOLS import DATA, PLOT, DEFAULTS matplotlib.rcParams.update({'font.size': 16}) matplotlib.rcParams['text.usetex'] = True ocdbdir = DEFAULTS.ocdbdir datadir = DEFAULTS.datadir + 'DS2/' #'000%d/all/'%run_no plotfir = DEFAULTS.plotdir dataset = np.load(datadir + '0_tracks.npy') infoset = np.load(datadir + '0_info_set.npy') print("Loaded: %s \n" % datadir) dataset, infoset, coordinates = DATA.process_tracklet_(dataset, infoset) dataset, infoset = DATA.ocdb_tracklet_(dataset, infoset, coordinates, DEFAULTS.ocdbdir) ocdb_cols = ["Anode Voltage", "Drift Voltage", "Drift Velocity", "ExB"] info_cols = [ "label", "nsigmae", "nsigmap", "PT", "{dE}/{dx}", "Momenta [GeV]", "$\\eta$", "$\\theta$", "$\\phi$", "run_no", "event", "V0trackID", "track" ] info_cols.extend(ocdb_cols) infoframe = pd.DataFrame(data=infoset, columns=info_cols) infoframe.head(20) elec_data, elec_info = DATA.elec_strip_(dataset, infoset) pion_data, pion_info = DATA.pion_strip_(dataset, infoset)
for l in range(1,7): mask = infoset[:,13] == l I_[str(l)] = infoset[mask][::l,:14] X_[str(l)] = dataset[:,0][mask].reshape(-1,l) P_ = {} Xt_ = {} It_ = {} epoch = [100]*6 batch = [2**10]*6 pioncon = np.zeros(6) Upioncon = np.zeros(6) for l in range(1,7): # X_[str(l)], I_[str(l)] = DATA.shuffle_(X_[str(l)], I_[str(l)]) (X, I), (Xv, Iv), (Xt, It) = DATA.TVT_split_(X_[str(l)], I_[str(l)], test_split=0.35, valid_split=0.15) T = I[:,0].astype(int) Tv = Iv[:,0].astype(int) Tt = It[:,0].astype(int) print(X.shape) input = Input(shape=X.shape[1:], name="info") x = Dense(36, activation = 'relu')(input) x = Dropout(0.5)(x) x = Dense(216, activation = 'relu')(x) x = Dropout(0.5)(x) x = Dense(36, activation = 'relu')(x) output = Dense(1, activation='sigmoid')(x) model = Model(inputs=input, outputs=output) model.compile(optimizer=tf.train.AdamOptimizer(learning_rate=1e-3), loss=ML.WBCE, metrics=['accuracy'])
import numpy as np import matplotlib.pyplot as plt from TOOLS import DATA, MODELS, LOG, METRICS import random, matplotlib, datetime, os from sklearn.decomposition import PCA from sklearn import preprocessing import pandas as pd run_no = '000265378/' dataname = 'even/' directory = 'data/output/' + run_no + dataname raw_data = np.load(directory + '0_tracks.npy') raw_info = np.load(directory + '0_info_set.npy') print("Loaded: %s" % directory) raw_data, raw_info = DATA.shuffle_(raw_data, raw_info) columns = ["label", "nsigmae", "nsigmap", "PT", "${dE}/{dx}$", "Momenta [GeV]", "eta", "theta", "phi", "event", "V0trackID", "track"] cnames = ["$\\pi$","$e$"] colour = ['r', 'g'] styles = ['--','-.'] T = raw_info[:,0] nx = 3 ny = 9 columns[nx:ny+1] params = raw_info[:,nx:ny+1] parammean = params.sum(axis=0)/params.shape[0] paramstdv = np.sqrt(np.square((params - parammean)).sum(axis=0)/(params.shape[0]-1)) I = (params - parammean)