def DAQ_sim_ENCODER(sim_info,COMP): param = sim_info['Param'] # Generation of Iterable for pool.map # Mapping Function try: style = param.P['L1']['map_style'] L1_Slice, SiPM_Matrix_I, SiPM_Matrix_O, topology = MAP.SiPM_Mapping(param.P, style) except: # JSON file doesn't include mapping option L1_Slice, SiPM_Matrix_I, SiPM_Matrix_O, topology = MAP.SiPM_Mapping(param.P, 'striped') # Multiprocess Pool Management kargs = {'sim_info':sim_info, 'COMP':COMP} DAQ_map = partial(L1_sch_ENCODER, **kargs) start_time = time.time() # Multiprocess Work pool_size = mp.cpu_count() #// 2 pool = mp.Pool(processes=pool_size) pool_output = pool.map(DAQ_map, [i for i in L1_Slice]) pool.close() pool.join() #pool_output = DAQ_map(L1_Slice[0]) elapsed_time = time.time()-start_time print ("SKYNET GAINED SELF-AWARENESS AFTER %d SECONDS" % elapsed_time) return pool_output,topology
def __init__(self,path,jsonfilename,file_number,encoder_data): self.SIM_CONT = CFG.SIM_DATA(filename=path+jsonfilename+".json",read=True) self.path = self.SIM_CONT.data['ENVIRONMENT']['path_to_files'] self.in_file = self.SIM_CONT.data['ENVIRONMENT']['MC_file_name']+"."+\ str(file_number).zfill(3)+".pet.h5" self.out_file = self.SIM_CONT.data['ENVIRONMENT']['MC_out_file_name']+"."+\ str(file_number).zfill(3)+\ ".h5" self.TE1 = self.SIM_CONT.data['TOFPET']['TE'] self.TE2 = self.SIM_CONT.data['L1']['TE'] self.time_bin = self.SIM_CONT.data['ENVIRONMENT']['time_bin'] self.n_rows = self.SIM_CONT.data['TOPOLOGY']['n_rows'] self.n_L1 = len(self.SIM_CONT.data['L1']['L1_mapping_I'])+\ len(self.SIM_CONT.data['L1']['L1_mapping_O']) # TOPOLOGY self.L1, SiPM_I, SiPM_O, topology = DAQ.SiPM_Mapping(self.SIM_CONT.data, self.SIM_CONT.data['L1']['map_style']) self.sipmtoL1 = np.zeros(topology['n_sipms'],dtype='int32') # Vector with SiPM assignment into L1 # Number of SiPMs per L1 L1_count = 0 for i in self.L1: for j in i: for l in j: self.sipmtoL1[l] = L1_count L1_count += 1 self.COMP = encoder_data self.waves = np.array([]) self.tof = np.array([]) self.extents = np.array([]) self.sensors = np.array([]) self.n_events = 0 self.out_table = np.array([]) self.out_table_tof = np.array([]) self.data_enc = np.array([]) self.data_recons = np.array([]) self.sensors_t = np.array([]) self.gamma1_i1 = np.array([]) self.gamma2_i1 = np.array([]) self.table = None self.h5file = None
def __call__(self,sensors,data,event,ident=False,show_photons=True, MU_LIN=True,TH=0): items = [] L1_Slice, SiPM_Matrix_I, SiPM_Matrix_O, topology = DAQ.SiPM_Mapping(self.data, self.data['L1']['map_style']) for i in list(self.w.items): self.w.removeItem(i) max_light = float(data[event,:].max()) print max_light count=np.zeros(3)+np.array([20,50,50]) cnt=0 # for j in SiPM_Matrix_I: #sensors: # for k in j: # k = k+1000 # Paola's style # i = sensors[int(np.argwhere(sensors[:,0]==k))] # self.SiPM_QT(i[1:].transpose(), # np.arctan2(i[2],i[1]),i[0], # data[event,int(i[0]-1000)], # max_light, # id = ident, # show_photons=show_photons, # MU_LIN=MU_LIN, # TH=TH # ) color_map = [[1,0,0],[0,1,0],[0,0,1], [1,1,0],[0,1,1],[1,0,1], [2.5,0,0],[0,2.5,0],[0,0,2.5], [2.5,2.5,0],[0,2.5,2.5],[2.5,0,2.5]] color_i = 100 for m in L1_Slice: # count = [0,0,0] # cnt = np.random.randint(0,2) # count[cnt] = np.random.randint(80,255) for j in m: #ASICs for k in j: #SiPMS k = k+sensors[0,0] # Paola's style i = sensors[int(np.argwhere(sensors[:,0]==k))] if (data[event,int(i[0]-sensors[0,0])]>0): data_show = data[event,int(i[0]-sensors[0,0])] else: data_show = 0 self.SiPM_QT(i[1:].transpose(), np.arctan2(i[2],i[1]),i[0], data_show, max_light, id = ident, show_photons=show_photons, MU_LIN=MU_LIN, TH=TH, color2=[color_i*color_map[cnt][2], color_i*color_map[cnt][1], color_i*color_map[cnt][0]] #color2=[color_i,color_i,color_i] ) if cnt < 11: cnt += 1 else: cnt = 0
print("Event %d is Valid" % event) else: event_info = np.zeros(9 + self.n_sipms * 2) - 1 return event_info if __name__ == '__main__': # CONFIGURATION READING path = "/volumedisk0/home/viherbos/DAQ_data/" jsonfilename = "CUBE" SIM_CONT = conf.SIM_DATA(filename=path + jsonfilename + ".json", read=True) data = SIM_CONT.data L1_Slice, Matrix_I, Matrix_O, topo = DAQ.SiPM_Mapping( data, data['L1']['map_style']) SIPM = {'n_sipms': 3500, 'first_sipm': 1000, 'tau_sipm': [100, 15000]} # Add SiPM info in .json file # GENERAL PARAMETERS n_files = 50 time_bin = 5 cores = 20 name = "petit_ring_tof_all_tables" path = "/volumedisk0/home/paolafer/vicente/" DATA = np.array([]).reshape(0, 9 + SIPM['n_sipms'] * 2) start_time = time.time() #############################
def __call__(self): # Read first config_file to get n_L1 (same for all files) config_file = self.data_path + self.config_file[0] + ".json" CG = CFG.SIM_DATA(filename=config_file, read=True) CG = CG.data n_L1 = np.array(CG['L1']['L1_mapping_O']).shape[0] n_sipms_int = CG['TOPOLOGY']['sipm_int_row'] * CG['TOPOLOGY']['n_rows'] n_sipms_ext = CG['TOPOLOGY']['sipm_ext_row'] * CG['TOPOLOGY']['n_rows'] n_sipms = n_sipms_int + n_sipms_ext log_ETHOUT = np.array([]).reshape(0, 2) log_FIFOIN = np.array([]).reshape(0, 2) log_asicout = np.array([]).reshape(0, 2) log_channels = np.array([]).reshape(0, 2) lost_ETHOUT = np.array([]).reshape(0, 1) lost_FIFOIN = np.array([]).reshape(0, 1) lost_asicout = np.array([]).reshape(0, 1) lost_channels = np.array([]).reshape(0, 1) lost_producers = np.array([]).reshape(0, 1) in_time = np.array([]).reshape(0, n_sipms) out_time = np.array([]).reshape(0, n_sipms) for i in self.config_file: start = i.rfind("/") jsonname = i[start + 1:] config_file2 = self.data_path + i + ".json" CG = CFG.SIM_DATA(filename=config_file2, read=True) CG = CG.data chain = CG['ENVIRONMENT']['out_file_name'][ CG['ENVIRONMENT']['out_file_name'].rfind("./") + 1:] filename = chain + "_" + jsonname + ".h5" filename = self.data_path + filename log_ETHOUT = np.vstack([ log_ETHOUT, np.array(pd.read_hdf(filename, key='log_ETHOUT')) ]) log_FIFOIN = np.vstack([ log_FIFOIN, np.array(pd.read_hdf(filename, key='log_FIFOIN')) ]) log_asicout = np.vstack([ log_asicout, np.array(pd.read_hdf(filename, key='log_asicout')) ]) log_channels = np.vstack([ log_channels, np.array(pd.read_hdf(filename, key='log_channels')) ]) lost_ETHOUT = np.vstack([ lost_ETHOUT, np.array(pd.read_hdf(filename, key='lost_ETHOUT')) ]) lost_FIFOIN = np.vstack([ lost_FIFOIN, np.array(pd.read_hdf(filename, key='lost_FIFOIN')) ]) lost_asicout = np.vstack([ lost_asicout, np.array(pd.read_hdf(filename, key='lost_asicout')) ]) lost_channels = np.vstack([ lost_channels, np.array(pd.read_hdf(filename, key='lost_channels')) ]) lost_producers = np.vstack([ lost_producers, np.array(pd.read_hdf(filename, key='lost_producers')) ]) in_time = np.vstack( [in_time, np.array(pd.read_hdf(filename, key='in_time'))]) out_time = np.vstack( [out_time, np.array(pd.read_hdf(filename, key='out_time'))]) # Parameter computation latency = np.max((out_time - in_time) / 1E6, axis=1) # CRT ring_length = CG['TOPOLOGY']['sipm_ext_row'] # Mapping Function try: style = CG['L1']['map_style'] L1_Slice, SiPM_Matrix_I, SiPM_Matrix_O, topology = MAP.SiPM_Mapping( CG, style) except: # JSON file doesn't include mapping option L1_Slice, SiPM_Matrix_I, SiPM_Matrix_O, topology = MAP.SiPM_Mapping( CG, 'striped') ring_dim = SiPM_Matrix_O.shape in_time_M = np.ma.MaskedArray(in_time, in_time < 1) gamma1_sipm = np.ma.argmin(in_time_M, axis=1) gamma1_tdc = np.ma.filled(np.ma.min(in_time_M, axis=1)) print gamma1_sipm gamma2_sipm = np.zeros(gamma1_sipm.shape) gamma2_tdc = np.zeros(gamma1_sipm.shape) for i in range(CG['ENVIRONMENT']['n_events']): gamma1_coord = np.where(SiPM_Matrix_O == gamma1_sipm[i]) # Roll SiPM Matrixes to find opposite side of detector Xe = np.roll(SiPM_Matrix_O, -gamma1_coord[1] + ring_dim[1] // 4, axis=1) # Select opposite side of detector Xe_sel = Xe[:, ring_dim[1] // 2:] Xe_sel_1D = Xe_sel.reshape(-1) try: gamma2_tdc[i] = np.ma.min(in_time_M[i, Xe_sel_1D]) except: gamma2_tdc[i] = 0 # print "gamma1_coord = ",gamma1_coord[1] # print "gamma1_tdc = ",gamma1_tdc[i] # print "gamma2_tdc = ",gamma2_tdc[i] # print "TOF = ",gamma1_tdc[i]-gamma2_tdc[i] #gamma2_sipm[i] = Xe_sel_1D[np.argmin(in_time[i,Xe_sel_1D])] # Get rid of singles TOF = (gamma1_tdc - gamma2_tdc) / 2 TOF = TOF[np.logical_not(np.isnan(TOF))] print TOF.shape # Introduce a random sign to symmetrize distribution random_sign = (np.random.rand(TOF.shape[0]) > 0.5) * -1 random_sign = random_sign + random_sign + 1 TOF = TOF * random_sign print("LOST DATA PRODUCER -> CH = %d" % (lost_producers.sum())) print("LOST DATA CHANNELS -> OUTLINK = %d" % (lost_channels.sum())) print("LOST DATA OUTLINK -> L1 = %d" % (lost_asicout.sum())) print("LOST DATA L1A -> L1B = %d" % (lost_FIFOIN.sum())) print("LOST DATA L1 Ethernet Link = %d" % (lost_ETHOUT.sum())) WC_CH_FIFO = float( max(log_channels[:, 0]) / CG['TOFPET']['IN_FIFO_depth']) * 100 WC_OLINK_FIFO = float( max(log_asicout[:, 0]) / CG['TOFPET']['OUT_FIFO_depth']) * 100 WC_L1_A_FIFO = float( max(log_FIFOIN[:, 0]) / CG['L1']['FIFO_L1a_depth']) * 100 WC_L1_B_FIFO = float( max(log_ETHOUT[:, 0]) / CG['L1']['FIFO_L1b_depth']) * 100 print("\n \n BYE \n \n") fit = fit_library.gauss_fit() fig = plt.figure(figsize=(15, 10)) pos = 431 fit(log_channels[:, 0], range(1, CG['TOFPET']['IN_FIFO_depth'] + 2)) fit.plot(axis=fig.add_subplot(pos), title="ASICS Channel Input analog FIFO (4)", xlabel="FIFO Occupancy", ylabel="Hits", res=False, fit=False) fig.add_subplot(pos).set_yscale('log') fig.add_subplot(pos).xaxis.set_major_locator(MaxNLocator(integer=True)) fig.add_subplot(pos).text(0.99,0.97,(("ASIC Input FIFO reached %.1f %%" % \ (WC_CH_FIFO))), fontsize=8, verticalalignment='top', horizontalalignment='right', transform=fig.add_subplot(pos).transAxes) pos = 432 fit(log_asicout[:, 0], CG['TOFPET']['OUT_FIFO_depth'] / 10) fit.plot(axis=fig.add_subplot(pos), title="ASICS Channels -> Outlink", xlabel="FIFO Occupancy", ylabel="Hits", res=False, fit=False) fig.add_subplot(pos).set_yscale('log') fig.add_subplot(pos).xaxis.set_major_locator(MaxNLocator(integer=True)) fig.add_subplot(pos).text(0.99,0.97,(("ASIC Outlink FIFO reached %.1f %%" % \ (WC_OLINK_FIFO))), fontsize=8, verticalalignment='top', horizontalalignment='right', transform=fig.add_subplot(pos).transAxes) pos = 434 fit(log_FIFOIN[:, 0], CG['L1']['FIFO_L1a_depth'] / 10) fit.plot(axis=fig.add_subplot(pos), title="ASICS -> L1A (FIFOIN)", xlabel="FIFO Occupancy", ylabel="Hits", res=False, fit=False) fig.add_subplot(pos).set_yscale('log') fig.add_subplot(pos).xaxis.set_major_locator(MaxNLocator(integer=True)) fig.add_subplot(pos).text(0.99,0.97,(("L1_A FIFO reached %.1f %%" % \ (WC_L1_A_FIFO))), fontsize=8, verticalalignment='top', horizontalalignment='right', transform=fig.add_subplot(pos).transAxes) fig.add_subplot(pos).xaxis.set_major_locator(MaxNLocator(integer=True)) pos = 435 fit(log_ETHOUT[:, 0], CG['L1']['FIFO_L1b_depth'] / 10) fit.plot(axis=fig.add_subplot(pos), title="L1 OUTPUT (ETHOUT)", xlabel="FIFO Occupancy", ylabel="Hits", res=False, fit=False) fig.add_subplot(pos).set_yscale('log') fig.add_subplot(pos).xaxis.set_major_locator(MaxNLocator(integer=True)) fig.add_subplot(pos).text(0.99,0.97,(("L1_B FIFO reached %.1f %%" % \ (WC_L1_B_FIFO))), fontsize=8, verticalalignment='top', horizontalalignment='right', transform=fig.add_subplot(pos).transAxes) fig.add_subplot(pos).xaxis.set_major_locator(MaxNLocator(integer=True)) pos = 436 fit(latency, 80) fit.plot(axis=fig.add_subplot(pos), title="Total Data Latency", xlabel="Latency in microseconds", ylabel="Hits", res=False, fit=False) fig.add_subplot(pos).text(0.99,0.8,(("WORST LATENCY = %f us" % \ (max(latency)))), fontsize=7, verticalalignment='top', horizontalalignment='right', transform=fig.add_subplot(pos).transAxes) fig.add_subplot(pos).text(0.99,0.7,(("MEAN LATENCY = %f us" % \ (np.mean(latency)))), fontsize=7, verticalalignment='top', horizontalalignment='right', transform=fig.add_subplot(pos).transAxes) fig.add_subplot(pos).xaxis.set_major_locator(MaxNLocator(integer=True)) pos = 433 new_axis = fig.add_subplot(pos) new_axis.text(0.05,0.8,(("LOST DATA PRODUCER -> CH = %d\n" + \ "LOST DATA CHANNELS -> OUTLINK = %d\n" + \ "LOST DATA OUTLINK -> L1 = %d\n" + \ "LOST DATA L1A -> L1B = %d\n" + \ "LOST ETHERLINK = %d\n") % \ (lost_producers.sum(), lost_channels.sum(), lost_asicout.sum(), lost_FIFOIN.sum(), lost_ETHOUT.sum()) ), fontsize=8, verticalalignment='top', horizontalalignment='left', transform=new_axis.transAxes) pos = 437 range_tof_h = TOF < 2000 range_tof_l = TOF > -2000 range_tof = range_tof_l * range_tof_h fit(TOF[range_tof], 100) fit.plot(axis=fig.add_subplot(pos), title="Time of Flight ", xlabel="Time Stamp in picoseconds", ylabel="Hits", res=False, fit=True) new_axis.text(0.05,0.9,(("Time of Flight Resolution = %d\n") % \ (lost_producers.sum())), fontsize=8, verticalalignment='top', horizontalalignment='left', transform=new_axis.transAxes) fig.tight_layout() plt.savefig(filename + ".pdf")
def DAQ_sim_CUBE(timing, Param): # Generation of Iterable for pool.map # Mapping Function try: style = Param.P['L1']['map_style'] L1_Slice, SiPM_Matrix_I, SiPM_Matrix_O, topology = MAP.SiPM_Mapping( Param.P, style) except: # JSON file doesn't include mapping option L1_Slice, SiPM_Matrix_I, SiPM_Matrix_O, topology = MAP.SiPM_Mapping( Param.P, 'striped') #L1_exec(L1_Slice[0],sim_info) # Multiprocess Pool Management #kargs = {'DATA': DATA, 'timing': timing, 'TDC':TDC, 'Param': Param } #DAQ_map = partial(L1_exec, **kargs) start_time = time.time() # Multiprocess Work pool_size = mp.cpu_count() #// 2 pool = mp.Pool(processes=pool_size) #pool_output = pool.map(DAQ_map, [i for i in L1_Slice]) pool_output = pool.map( L1_exec_wrapper, it.izip( [i for i in L1_Slice], #it.repeat(DATA), it.repeat(timing), #it.repeat(TDC), it.repeat(Param))) pool.close() pool.join() # {'DATA_out': , 'L1_out': , 'ASICS_out':} # N Blocks of data (N=n_L1) # Create an array with all DATA OUT SIM_OUT = [] lost_producers = np.array([]).reshape(0, 1) lost_channels = np.array([]).reshape(0, 1) lost_outlink = np.array([]).reshape(0, 1) log_channels = np.array([]).reshape(0, 2) log_outlink = np.array([]).reshape(0, 2) lost_FIFOIN = np.array([]).reshape(0, 1) lost_ETHOUT = np.array([]).reshape(0, 1) log_FIFOIN = np.array([]).reshape(0, 2) log_ETHOUT = np.array([]).reshape(0, 2) in_time = np.array([]).reshape(0, 1) out_time = np.array([]).reshape(0, 1) for L1_i in pool_output: for j in range(len(L1_i['DATA_out'])): SIM_OUT.append(L1_i['DATA_out'][j]) # Gather Log information from ASICS layer for L1_i in pool_output: for j in L1_i['ASICS_out']: lost_producers = np.vstack( [lost_producers, np.array(j['lost_producers'])]) lost_channels = np.vstack( [lost_channels, np.array(j['lost_channels'])]) lost_outlink = np.vstack( [lost_outlink, np.array(j['lost_outlink'])]) log_channels = np.vstack( [log_channels, np.array(j['log_channels'])]) log_outlink = np.vstack([log_outlink, np.array(j['log_outlink'])]) # Gather Log information from L1 layer for L1_i in pool_output: lost_FIFOIN = np.vstack( [lost_FIFOIN, np.array(L1_i['L1_out']['lost_FIFOIN'])]) lost_ETHOUT = np.vstack( [lost_ETHOUT, np.array(L1_i['L1_out']['lost_ETHOUT'])]) log_FIFOIN = np.vstack( [log_FIFOIN, np.array(L1_i['L1_out']['log_FIFOIN'])]) log_ETHOUT = np.vstack( [log_ETHOUT, np.array(L1_i['L1_out']['log_ETHOUT'])]) pool_output = { 'DATA_out': SIM_OUT, 'L1_out': { 'lost_FIFOIN': lost_FIFOIN, 'lost_ETHOUT': lost_ETHOUT, 'log_FIFOIN': log_FIFOIN, 'log_ETHOUT': log_ETHOUT }, 'ASICS_out': { 'lost_producers': lost_producers, 'lost_channels': lost_channels, 'lost_outlink': lost_outlink, 'log_channels': log_channels, 'log_outlink': log_outlink } } elapsed_time = time.time() - start_time print("SKYNET GAINED SELF-AWARENESS AFTER %d SECONDS" % elapsed_time) return pool_output, topology
out = DAQ_OUTPUT_processing_ENCODER(SIM_OUT,n_L1,n_asics,COMP) #################### DECODER PROCESSING ############################ n_rows = CG['TOPOLOGY']['n_rows'] kwargs = {'n_rows':n_rows, 'COMP':COMP, 'TE2':CG['L1']['TE'], 'n_sensors':0} ENC = ET.encoder_tools(**kwargs) # Find OFFSETs for thresholds # TH_enc = ENC.encoder(self.L1[0],np.zeros((1,COMP['ENC_weights_A'].shape[0]), # dtype='float'),0)*CG['L1']['Tenc'] L1_box, SiPM_I, SiPM_O, topology = MAP.SiPM_Mapping(CG,CG['L1']['map_style']) data_recons = np.zeros((n_events,n_sipms),dtype='float') # data_encoded = np.array(pd.read_hdf("/home/viherbos/DAQ_DATA/NEUTRINOS/PETit-ring/5mm_pitch/VER5/FASTDAQOUT_OF5mm_TENC150.000.h5", # key='MC_encoded'), dtype = 'float') for i in range(n_events): index_1 = 0 for L1 in L1_box: # Build L1_SiPM matrix L1_SiPM = np.array([],dtype='int').reshape(n_rows,0) for asic in L1: L1_SiPM = np.hstack((L1_SiPM,np.array(asic).reshape((n_rows,-1),order='F'))) data_enc_aux = out['data'][i,:]