def DBLR(pmtrwf, n_baseline=500, thr_trigger=5, discharge_length=5000, acum_tau=2500, acum_compress=0.01): """ Peform Base line Restoration """ DataPMT = DB.DataPMT() NPMT, PMTWL = pmtrwf.shape CWF = np.empty(pmtrwf.shape) ACUM = np.empty(pmtrwf.shape) BSL = np.empty(pmtrwf.shape[0]) BSLE = np.empty(pmtrwf.shape[0]) BSLN = np.empty(pmtrwf.shape[0]) for pmt in range(NPMT): # VH signal_r, acum, baseline, baseline_end, noise_rms = cblr.\ deconvolve_signal_acum(pmtrwf[pmt], n_baseline=500, coef_clean=DataPMT.coeff_c[pmt], coef_blr=DataPMT.coeff_blr[pmt], thr_trigger=thr_trigger, acum_discharge_length=discharge_length) # JJ # signal_r, acum, baseline, baseline_end, noise_rms = cblr.\ # deconvolve_signal_acum_v2(pmtrwf[pmt], # n_baseline=500, # coef_clean=DataPMT.coeff_c[pmt], # coef_blr=DataPMT.coeff_blr[pmt], # thr_trigger=thr_trigger, # acum_tau=acum_tau, # acum_compress=acum_compress, # acum_discharge_length=discharge_length) # signal_r, acum = cblr.deconvolve_signal_acum( # pmtrwf[pmt], # n_baseline=n_baseline, # coef_clean=DataPMT.coeff_c[pmt], # coef_blr=DataPMT.coeff_blr[pmt], # thr_trigger=thr_trigger, # thr_acum=thr_acum, # acum_discharge_length=acum_discharge_length, # acum_tau=acum_tau, # acum_compress=acum_compress) CWF[pmt] = signal_r ACUM[pmt] = acum BSL[pmt] = baseline BSLE[pmt] = baseline_end BSLN[pmt] = noise_rms return CWF, ACUM, BSL, BSLE, BSLN
def __init__(self, gain=FEE_GAIN, c2=C2, c1=C1, r1=R1, zin=Zin, fsample=f_sample, flpf1=f_LPF1, flpf2=f_LPF2, noise_FEEPMB_rms=NOISE_I, noise_DAQ_rms=NOISE_DAQ, lsb=LSB): self.R1 = r1 self.Zin = zin self.C2 = c2 self.C1 = c1 self.GAIN = gain self.A1 = self.R1 * self.Zin/(self.R1 + self.Zin) # ohms self.A2 = gain/self.A1 # ohms/ohms = [] self.R = self.R1 + self.Zin self.Cr = 1. + self.C1/self.C2 self.C = self.C1/self.Cr self.ZC = self.Zin/self.Cr self.f_sample = fsample self.freq_LHPF = 1./(self.R * self.C) self.freq_LPF1 = flpf1*2*np.pi self.freq_LPF2 = flpf2*2*np.pi self.freq_LHPFd = self.freq_LHPF/(self.f_sample*np.pi) self.freq_LPF1d = self.freq_LPF1/(self.f_sample*np.pi) self.freq_LPF2d = self.freq_LPF2/(self.f_sample*np.pi) self.coeff_blr = self.freq_LHPFd*np.pi self.freq_zero = 1./(self.R1*self.C1) self.coeff_c = self.freq_zero/(self.f_sample*np.pi) DataPMT = DB.DataPMT() self.coeff_blr_pmt = DataPMT.coeff_blr.values self.freq_LHPFd_pmt = self.coeff_blr_pmt/np.pi self.coeff_c_pmt = DataPMT.coeff_c.values self.C1_pmt = (self.coeff_blr_pmt/self.coeff_c_pmt)*(self.C2/np.pi) self.R1_pmt = 1./(self.coeff_c_pmt*self.C1_pmt*self.f_sample*np.pi) self.A1_pmt = self.R1_pmt * self.Zin/(self.R1_pmt + self.Zin) # ohms self.A2_pmt = gain/self.A1_pmt # ohms/ohms = [] self.Cr_pmt = 1. + self.C1_pmt/self.C2 self.ZC_pmt = self.Zin/self.Cr_pmt self.noise_FEEPMB_rms = noise_FEEPMB_rms self.LSB = lsb self.voltsToAdc = self.LSB/units.volt self.DAQnoise_rms = noise_DAQ_rms
def test_numberOfPMTs(self): """ Check that we retrieve the correct number of PMTs """ pmts = DB.DataPMT() self.assertEqual(pmts.shape[0], 12)
def DOROTHEA(argv=sys.argv): """ DOROTHEA driver """ CFP = configure(argv) if CFP["INFO"]: print(__doc__) FILE_IN = CFP["FILE_IN"] FILE_OUT = CFP["FILE_OUT"] COMPRESSION = CFP["COMPRESSION"] NEVENTS = CFP["NEVENTS"] logger.info("Debug level = {}".format(CFP["VERBOSITY"])) logger.info("Input file = {}".format(FILE_IN)) logger.info("Output file = {}".format(FILE_OUT)) logger.info("# events requested = {}".format(NEVENTS)) logger.info("Compression library/level = {}".format(COMPRESSION)) # open the input file with tb.open_file(FILE_IN, "r") as h5in: # access the PMT ZS data in file pmtzs_ = h5in.root.ZS.PMT blrzs_ = h5in.root.ZS.BLR sipmzs_ = h5in.root.ZS.SiPM NEVT, NPMT, PMTWL = pmtzs_.shape NEVT, NSIPM, SIPMWL = sipmzs_.shape logger.info("# events in DST: {}".format(NEVT)) logger.info("# PMTs = {}, # SiPMs = {} ".format(NPMT, NSIPM)) logger.info("PMT WFL = {}, SiPM WFL = {}".format(PMTWL, SIPMWL)) pmtdf = DB.DataPMT() sipmdf = DB.DataSiPM() pmt_to_pes = abs(1.0 / pmtdf.adc_to_pes.reshape(NPMT, 1)) sipm_to_pes = abs(1.0 / sipmdf.adc_to_pes.reshape(NSIPM, 1)) # open the output file with tb.open_file(FILE_OUT, "w", filters=tbl.filters(COMPRESSION)) as h5out: # create groups and copy MC data to the new file if "/MC" in h5in: mcgroup = h5out.create_group(h5out.root, "MC") twfgroup = h5out.create_group(h5out.root, "TWF") h5in.root.MC.MCTracks.copy(newparent=mcgroup) h5in.root.MC.FEE.copy(newparent=mcgroup) h5in.root.TWF.PMT.copy(newparent=twfgroup) h5in.root.TWF.SiPM.copy(newparent=twfgroup) pmapsgroup = h5out.create_group(h5out.root, "PMAPS") # create a table to store pmaps (rebined, linked, zs wfs) pmaps_ = h5out.create_table(pmapsgroup, "PMaps", PMAP, "Store for PMaps", tbl.filters(COMPRESSION)) pmaps_blr_ = h5out.create_table(pmapsgroup, "PMapsBLR", PMAP, "Store for PMaps made with BLR", tbl.filters(COMPRESSION)) # add index in event column pmaps_.cols.event.create_index() pmaps_blr_.cols.event.create_index() # LOOP t0 = time() for i in define_event_loop(CFP, NEVT): pmtwf = np.sum(pmtzs_[i] * pmt_to_pes, axis=0) blrwf = np.sum(blrzs_[i] * pmt_to_pes, axis=0) sipmwfs = sipmzs_[i] * sipm_to_pes pmap = build_pmap(pmtwf, sipmwfs) classify_peaks(pmap, **CFP) tbl.store_pmap(pmap, pmaps_, i) pmap_blr = build_pmap(blrwf, sipmwfs) classify_peaks(pmap_blr, **CFP) tbl.store_pmap(pmap_blr, pmaps_blr_, i) t1 = time() dt = t1 - t0 print("DOROTHEA has run over {} events in {} seconds".format( i + 1, dt)) print("Leaving DOROTHEA. Safe travels!")