Beispiel #1
0
def main():
    # Argument parser for config file name
    parser = argparse.ArgumentParser(description='PETALO Output Comparator.')
    parser.add_argument("-f",
                        "--json_file",
                        action="store_true",
                        help="Control File (json)")
    parser.add_argument('arg1', metavar='N', nargs='?', help='')
    parser.add_argument("-d",
                        "--directory",
                        action="store_true",
                        help="Work directory")
    parser.add_argument('arg2', metavar='N', nargs='?', help='')
    args = parser.parse_args()

    if args.json_file:
        file_name = ''.join(args.arg1)
    else:
        file_name = "sim_config"
    if args.directory:
        path = ''.join(args.arg2)
    else:
        path = "./"

    config_file = file_name + ".json"

    CG = CFG.SIM_DATA(filename=path + config_file, read=True)
    CG = CG.data

    DAQ_outfile = HF.DAQ_IO(
        path=CG['ENVIRONMENT']['path_to_files'],
        daq_filename=CG['ENVIRONMENT']['file_name'],
        ref_filename=CG['ENVIRONMENT']['file_name'] + "0.h5",
        daq_outfile=CG['ENVIRONMENT']['out_file_name'] + "_" + file_name +
        ".h5")

    FASTDAQ_outfile = HF.DAQ_IO(
        path=CG['ENVIRONMENT']['path_to_files'],
        daq_filename=CG['ENVIRONMENT']['file_name'],
        ref_filename=CG['ENVIRONMENT']['file_name'] + "0.h5",
        daq_outfile=CG['ENVIRONMENT']['MC_out_file_name'] + "_" + file_name +
        ".h5")

    data_DAQ, sensors = DAQ_outfile.read()
    data_FASTDAQ, sensors = FASTDAQ_outfile.read()

    i = 0
    for event_F in data_FASTDAQ:

        if (np.any(np.abs(event_F - data_DAQ[i]) != 0)):
            print("Event %d is different in FASTDAQ and DAQ simulation" % i)

        i += 1
    def __init__(self,path,jsonfilename,file_number,encoder_data):
        self.SIM_CONT = CFG.SIM_DATA(filename=path+jsonfilename+".json",read=True)
        self.path     = self.SIM_CONT.data['ENVIRONMENT']['path_to_files']
        self.in_file  = self.SIM_CONT.data['ENVIRONMENT']['MC_file_name']+"."+\
                        str(file_number).zfill(3)+".pet.h5"
        self.out_file = self.SIM_CONT.data['ENVIRONMENT']['MC_out_file_name']+"."+\
                        str(file_number).zfill(3)+\
                        ".h5"
        self.TE1      = self.SIM_CONT.data['TOFPET']['TE']
        self.TE2      = self.SIM_CONT.data['L1']['TE']
        self.time_bin = self.SIM_CONT.data['ENVIRONMENT']['time_bin']
        self.n_rows   = self.SIM_CONT.data['TOPOLOGY']['n_rows']
        self.n_L1     = len(self.SIM_CONT.data['L1']['L1_mapping_I'])+\
                        len(self.SIM_CONT.data['L1']['L1_mapping_O'])
        # TOPOLOGY
        self.L1, SiPM_I, SiPM_O, topology = DAQ.SiPM_Mapping(self.SIM_CONT.data,
                                                        self.SIM_CONT.data['L1']['map_style'])

        self.sipmtoL1 = np.zeros(topology['n_sipms'],dtype='int32')

        # Vector with SiPM assignment into L1
        # Number of SiPMs per L1
        L1_count = 0
        for i in self.L1:
            for j in i:
                for l in j:
                    self.sipmtoL1[l] = L1_count
            L1_count += 1


        self.COMP = encoder_data

        self.waves    = np.array([])
        self.tof      = np.array([])
        self.extents  = np.array([])
        self.sensors  = np.array([])
        self.n_events = 0
        self.out_table = np.array([])
        self.out_table_tof = np.array([])

        self.data_enc = np.array([])
        self.data_recons = np.array([])

        self.sensors_t = np.array([])
        self.gamma1_i1 = np.array([])
        self.gamma2_i1 = np.array([])
        self.table = None
        self.h5file = None
Beispiel #3
0
    if args.file_n:
        file_n = int(''.join(args.arg3))
    else:
        file_n = 0
    if args.show_photons:
        g_opt['photons_id'] = True
    else:
        g_opt['photons_id'] = False
    if args.show_sipmID:
        g_opt['sipm_id'] = True
    else:
        g_opt['sipm_id'] = False

    config_file = file_name + ".json"

    SIM_CONT=conf.SIM_DATA(filename=path + config_file ,read=True)

    path     = SIM_CONT.data['ENVIRONMENT']['path_to_files']
    filename = SIM_CONT.data['ENVIRONMENT']['MC_out_file_name']+'.'+str(file_n).zfill(3)
    #filename = "./VER5/DAQ_OF5mm_test_REAL"

    positions = np.array(pd.read_hdf(path+filename+".h5",key='sensors'))
    data_TE = np.array(pd.read_hdf(path+filename+".h5",key='MC_TE'), dtype = 'int32')
    data_recons = np.array(pd.read_hdf(path+filename+".h5",key='MC_recons'), dtype = 'int32')
    #data_TE = np.array(pd.read_hdf(path+filename+".h5",key='MC'), dtype = 'int32')
    #data_recons = np.array(pd.read_hdf(path+filename+".h5",key='MC'), dtype = 'int32')


    Qtapp  = pg.QtGui.QApplication([])
    window = QtGui.QWidget()
    args = parser.parse_args()

    if args.json_file:
        json_file = ''.join(args.arg1)
    else:
        json_file = "test"
    if args.directory:
        path = ''.join(args.arg2)
    else:
        path="/home/viherbos/DAQ_DATA/NEUTRINOS/PETit-ring/5mm_pitch/"

    #json_file = "./VER2/test"
    #path      = "/home/viherbos/DAQ_DATA/NEUTRINOS/PETit-ring/5mm_pitch/"

    SIM_JSON     = CFG.SIM_DATA(filename = path + json_file +".json",read=True)
    encoder_file = SIM_JSON.data['ENVIRONMENT']['AUTOENCODER_file_name']
    Tenc         = SIM_JSON.data['L1']['Tenc']

    # MATLAB AUTOENCODER DATA READING (Trick to read easily and keep names)
    # COMP={}
    # with pd.HDFStore(path + encoder_file) as store:
    #     keys = store.keys()
    # COMP = {i[1:]:np.array(pd.read_hdf(path + encoder_file,key=i[1:])) for i in keys}



    # Keras Model read hack:
    try:
        with tb.open_file(path + encoder_file[2:] + ".h5") as h5file:
            B=[]
            event_info = np.hstack([true_data, event_info])

            print("Event %d is Valid" % event)

        else:
            event_info = np.zeros(9 + self.n_sipms * 2) - 1

        return event_info


if __name__ == '__main__':

    # CONFIGURATION READING
    path = "/volumedisk0/home/viherbos/DAQ_data/"
    jsonfilename = "CUBE"
    SIM_CONT = conf.SIM_DATA(filename=path + jsonfilename + ".json", read=True)
    data = SIM_CONT.data
    L1_Slice, Matrix_I, Matrix_O, topo = DAQ.SiPM_Mapping(
        data, data['L1']['map_style'])

    SIPM = {'n_sipms': 3500, 'first_sipm': 1000, 'tau_sipm': [100, 15000]}
    # Add SiPM info in .json file

    # GENERAL PARAMETERS
    n_files = 50
    time_bin = 5
    cores = 20

    name = "petit_ring_tof_all_tables"
    path = "/volumedisk0/home/paolafer/vicente/"
Beispiel #6
0
                        help="Work directory")
    parser.add_argument('arg2', metavar='N', nargs='?', help='')
    args = parser.parse_args()

    if args.json_file:
        file_name = ''.join(args.arg1)
    else:
        file_name = "sim_config"
    if args.directory:
        path = ''.join(args.arg2)
    else:
        path = "./"

    config_file = file_name + ".json"

    CG = CFG.SIM_DATA(filename=path + config_file, read=True)
    CG = CG.data
    # Read data from json file

    n_sipms_int = CG['TOPOLOGY']['sipm_int_row'] * CG['TOPOLOGY']['n_rows']
    n_sipms_ext = CG['TOPOLOGY']['sipm_ext_row'] * CG['TOPOLOGY']['n_rows']
    n_sipms = n_sipms_int + n_sipms_ext
    first_sipm = CG['TOPOLOGY']['first_sipm']

    n_files = CG['ENVIRONMENT']['n_files']
    # Number of files to group for data input
    A = HF.hdf_compose(CG['ENVIRONMENT']['path_to_files'],
                       CG['ENVIRONMENT']['file_name'], n_files, n_sipms)
    DATA, TDC, sensors, n_events = A.compose()

    # Number of events for simulation