def main(argv): """ Uses pygama's amazing DataSet class to process runs for different data sets and arbitrary configuration options defined in a JSON file. """ run_db = './runDB.json' # -- parse args -- par = argparse.ArgumentParser(description="data processing suite for MJ60") arg, st, sf = par.add_argument, "store_true", "store_false" arg("-ds", nargs='*', action="store", help="load runs for a DS") arg("-r", "--run", nargs=1, help="load a single run") arg("-t0", "--daq_to_raw", action=st, help="run ProcessRaw on list") arg("-t1", "--raw_to_dsp", action=st, help="run RunDSP on list") arg("-t", "--test", action=st, help="test mode, don't run") arg("-n", "--nevt", nargs='?', default=np.inf, help="limit max num events") arg("-i", "--ioff", nargs='?', default=0, help="start at index [i]") arg("-v", "--verbose", action=st, help="set verbose output") arg("-o", "--ovr", action=st, help="overwrite existing files") arg("-m", "--nomp", action=sf, help="don't use multiprocessing") args = vars(par.parse_args()) # -- standard method to declare the DataSet from cmd line -- ds = pu.get_dataset_from_cmdline(args, "runDB.json", "calDB.json") # -- start processing -- if args["daq_to_raw"]: d2r(ds, args["ovr"], args["nevt"], args["verbose"], args["test"]) if args["raw_to_dsp"]: raw_to_dsp(ds, args["ovr"], args["nevt"], args["ioff"], args["nomp"], args["verbose"], args["test"])
def main(argv): """ Uses pygama's amazing DataSet class to process runs for different data sets and arbitrary configuration options defined in a JSON file. """ # datadir = os.environ["CAGEDATA"] run_db, cal_db = f'./runDB.json', f'./calDB.json' # -- parse args -- par = argparse.ArgumentParser(description="data processing suite for CAGE") arg, st, sf = par.add_argument, "store_true", "store_false" arg("-ds", nargs='*', action="store", help="load runs for a DS") arg("-r", "--run", nargs=1, help="load a single run") arg("-d2r", "--daq_to_raw", action=st, help="run daq_to_raw on list") arg("-r2d", "--raw_to_dsp", action=st, help="run raw_to_dsp on list") arg("-t", "--test", action=st, help="test mode, don't run") arg("-n", "--nevt", nargs='?', default=np.inf, help="limit max num events") arg("-i", "--ioff", nargs='?', default=0, help="start at index [i]") arg("-o", "--ovr", action=st, help="overwrite existing files") arg('-v', '--verbose', default=2, type=int, help= "Verbosity level: 0=silent, 1=basic warnings, 2=verbose output, 3=debug. Default is 2." ) arg('-b', '--block', default=8, type=int, help="Number of waveforms to process simultaneously. Default is 8") arg('-g', '--group', default='/daqdata', help="Name of group in LH5 file. Default is daqdata.") # -- declare the DataSet -- args = par.parse_args() d_args = vars(par.parse_args()) ds = pu.get_dataset_from_cmdline(d_args, run_db, cal_db) # print(ds.runs) # pprint(ds.paths) # -- start processing -- if args.daq_to_raw: daq_to_raw(ds, args.ovr, args.nevt, args.verbose, args.test) if args.raw_to_dsp: raw_to_dsp(ds, args.ovr, args.nevt, args.test, args.verbose, args.block, args.group)
def main(): """ to get the best energy resolution, we want to explore the possible values of our DSP processor list, especially trap filter and RC decay constants. a flexible + easy way to vary a bunch of parameters at once is to create a DataFrame with each row corresponding to a set of parameters. We then use this DF as an input/output for the other functions. it could also easily be extended to loop over individual detectors, or vary any other set of parameters in the processor list ...... """ par = argparse.ArgumentParser(description="pygama dsp optimizer") arg, st, sf = par.add_argument, "store_true", "store_false" arg("-ds", nargs='*', action="store", help="load runs for a DS") arg("-r", "--run", nargs=1, help="load a single run") arg("-g", "--grid", action=st, help="set DSP parameters to be varied") arg("-w", "--window", action=st, help="generate a small waveform file") arg("-p", "--process", action=st, help="run DSP processing") arg("-f", "--fit", action=st, help="fit outputs to peakshape function") arg("-t", "--plot", action=st, help="find optimal parameters & make plots") arg("-v", "--verbose", action=st, help="set verbose mode") args = vars(par.parse_args()) ds = pu.get_dataset_from_cmdline(args, "runDB.json", "calDB.json") # pprint(ds.paths) # set I/O locations d_out = os.path.expanduser('~') + "/Data/cage" f_grid = f"{d_out}/cage_optimizer_grid.h5" f_tier1 = f"{d_out}/cage_optimizer_t1.h5" f_tier2 = f"{d_out}/cage_optimizer_t2.h5" f_opt = f"{d_out}/cage_optimizer_data.h5" # -- run routines -- if args["grid"]: # set the combination of processor params to vary to optimize resolution set_grid(f_grid) if args["window"]: # generate a small single-peak file w/ uncalibrated energy to reanalyze window_ds(ds, f_tier1) if args["process"]: # create a file with DataFrames for each set of parameters process_ds(ds, f_grid, f_opt, f_tier1, f_tier2) if args["fit"]: # fit all outputs to the peakshape function and find the best resolution get_fwhm(f_grid, f_opt, verbose=args["verbose"]) if args["plot"]: # show results plot_fwhm(f_grid)
def main(): """ perform automatic calibration of pygama DataSets. command line options to specify the DataSet are the same as in processing.py save results in a JSON database for access by other routines. """ run_db, cal_db = "runDB.json", "calDB.json" par = argparse.ArgumentParser(description="pygama calibration suite") arg, st, sf = par.add_argument, "store_true", "store_false" arg("-ds", nargs='*', action="store", help="load runs for a DS") arg("-r", "--run", nargs=1, help="load a single run") arg("-s", "--spec", action=st, help="print simple spectrum") arg("-p1", "--pass1", action=st, help="run pass-1 (linear) calibration") arg("-p2", "--pass2", action=st, help="run pass-2 (peakfit) calibration") arg("-m", "--mode", nargs=1, help="set pass-2 calibration mode") arg("-e", "--etype", nargs=1, help="custom energy param (default is e_ftp)") arg("-t", "--test", action=st, help="set verbose (testing) output") arg("-db", "--writeDB", action=st, help="store results in DB") arg("-pr", "--printDB", action=st, help="print calibration results in DB") args = vars(par.parse_args()) # -- standard method to declare the DataSet from cmd line -- ds = pu.get_dataset_from_cmdline(args, "runDB.json", "calDB.json") # -- start calibration routines -- etype = args["etype"][0] if args["etype"] else "e_ftp" if args["printDB"]: show_calDB(cal_db) # print current DB status if args["spec"]: show_spectrum(ds, etype) if args["pass1"]: calibrate_pass1(ds, etype, args["writeDB"], args["test"]) if args["pass2"]: cal_mode = int(args["mode"][0]) if args["mode"] else 0 calibrate_pass2(ds, cal_mode, args["writeDB"])
def main(): """ To get the best energy resolution, we want to explore the possible values of our DSP processor list, especially trap filter and RC decay constants. Inclusion of dedicated functions for the optimization of the ZAC filter. Modified by: V. D'Andrea """ par = argparse.ArgumentParser(description="pygama dsp optimizer") arg, st, sf = par.add_argument, "store_true", "store_false" arg("-d", "--dir", nargs=1, action="store", help="analysis directory") arg("-ds", nargs='*', action="store", help="load runs for a DS") arg("-r", "--run", nargs=1, help="load a single run") arg("-g", "--grid", action=st, help="set DSP parameters to be varied") arg("-w", "--window", action=st, help="generate a small waveform file") arg("-p", "--process", action=st, help="run DSP processing") arg("-f", "--fit", action=st, help="fit outputs to peakshape function") arg("-t", "--plot", action=st, help="find optimal parameters & make plots") arg("-cf", "--compare", action=st, help="compare fwhm") arg("-c", "--case", nargs=1, help="case: 0 = trap filter, 1= ZAC filter") arg("-v", "--verbose", action=st, help="set verbose mode") args = vars(par.parse_args()) local_dir = "." if args["dir"]: local_dir = args["dir"][0] run = args["run"][0] case = 0 if args["case"]: case = int(args["case"][0]) if case == 0: filt = "trap" print("Run trap optimization") elif case == 1: filt = "zac" print("Run ZAC optimization") else: print("Case not valid") return ds = pu.get_dataset_from_cmdline(args, f"{local_dir}/meta/runDB.json", f"{local_dir}/meta/calDB.json") #pprint(ds.paths) d_out = f"{local_dir}/cage" try: os.mkdir(d_out) except FileExistsError: print ("Directory '%s' already exists" % d_out) else: print ("Directory '%s' created" % d_out) d_out = f"{d_out}/run{run}" try: os.mkdir(d_out) except FileExistsError: print ("Directory '%s' already exists" % d_out) else: print ("Directory '%s' created" % d_out) f_tier1 = f"{d_out}/cage_optimizer_raw.h5" f_grid = f"{d_out}/{filt}_optimizer_grid.h5" f_opt = f"{d_out}/{filt}_optimizer_dsp.h5" # -- run routines -- if args["grid"]: set_grid(f_grid,case) # generate a small single-peak file w/ uncalibrated energy to reanalyze if args["window"]: window_ds(ds, f_tier1) # create a file with DataFrames for each set of parameters if args["process"]: process_ds(f_grid, f_opt, f_tier1,case) # fit all outputs to the peakshape function and find the best resolution if args["fit"]: get_fwhm(f_grid, f_opt, case, verbose=args["verbose"]) # show results if args["plot"]: plot_fwhm(f_grid,f_opt,d_out,case) # compare fwhm results if args["compare"]: compare_fwhm(d_out)