def read_and_store_data(self): """Read and store KASCADE data matching HiSPARC data This function looks at the HiSPARC event data in the specified datafile and then processes and adds KASCADE data surrounding those events, for later coincidence processing. """ # Determine start and end timestamps from HiSPARC data try: timestamps = self.hisparc.col('timestamp') start = gpstime.gps_to_utc(min(timestamps)) stop = gpstime.gps_to_utc(max(timestamps)) except IndexError: raise RuntimeError("HiSPARC event table is empty") print "Processing data from %s to %s" % (time.ctime(start), time.ctime(stop)) self._process_events_in_range(start, stop)
def helper(hisparc, kascade, kascadefile): """Helper for quickly processing KASCADE data This function looks at the HiSPARC event data in the specified datafile and then processes and adds KASCADE data surrounding those events, for later coincidence processing. Also, existing KASCADE data rows are inspected to determine the time window. :param hisparc: HiSPARC event table :param kascade: KASCADE event table :param kascadefile: KASCADE data file Example:: >>> import tables >>> import hisparc >>> data = tables.openFile('kascade.h5', 'a') >>> data.createGroup('/', 'kascade', "KASCADE data") /kascade (Group) 'KASCADE data' children := [] >>> data.createTable('/kascade', 'events', hisparc.containers.KascadeEvent, "KASCADE events") /kascade/events (Table(0,)) 'KASCADE events' description := { "run_id": Int32Col(shape=(), dflt=0, pos=0), "event_id": Int64Col(shape=(), dflt=0, pos=1), "timestamp": Time32Col(shape=(), dflt=0, pos=2), "nanoseconds": UInt32Col(shape=(), dflt=0, pos=3), "ext_timestamp": UInt64Col(shape=(), dflt=0, pos=4), "energy": Float64Col(shape=(), dflt=0.0, pos=5), "core_pos": Float64Col(shape=(2,), dflt=0.0, pos=6), "zenith": Float64Col(shape=(), dflt=0.0, pos=7), "azimuth": Float64Col(shape=(), dflt=0.0, pos=8), "Num_e": Float64Col(shape=(), dflt=0.0, pos=9), "Num_mu": Float64Col(shape=(), dflt=0.0, pos=10), "dens_e": Float64Col(shape=(4,), dflt=0.0, pos=11), "dens_mu": Float64Col(shape=(4,), dflt=0.0, pos=12), "P200": Float64Col(shape=(), dflt=0.0, pos=13), "T200": Float64Col(shape=(), dflt=0.0, pos=14)} byteorder := 'little' chunkshape := (399,) >>> hisparc.kascade.helper(data.root.hisparc.cluster_kascade.station_601.events, data.root.kascade.events, 'HiSparc.dat.gz') Processing data from Tue Jul 1 16:29:31 2008 to Tue Jul 1 17:15:06 2008 """ # Determine start and end timestamps from HiSPARC data try: ts = hisparc[:]['timestamp'] hstart = gpstime.gps_to_utc(min(ts)) hstop = gpstime.gps_to_utc(max(ts)) except IndexError: print "There is no HiSPARC data yet." return # Determine start and stop timestamps from KASCADE data try: ts = kascade[:]['timestamp'] kstart = min(ts) kstop = max(ts) except ValueError: kstart = None kstop = None if hstart < kstart and kstart is not None: # This should never happen print "WARNING: HiSPARC data found which is earlier than KASCADE" print "data. Please check your timestamps:" print "First HiSPARC data:", hstart print "First KASCADE data:", kstart print "The timestamps should not differ by much." if kstart is None: # There is no KASCADE data yet start, stop = hstart, hstop else: # To avoid duplicates in the case of an aborted previous run, # remove rows which share the latest timestamp, because we want to # reprocess that timestamp l = kascade.getWhereList('timestamp==%d' % kstop) kascade.removeRows(start=l[0], stop=l[-1] + 1) start, stop = kstop, hstop print "Processing data from %s to %s" % (time.ctime(start), time.ctime(stop)) process_events(kascadefile, kascade, start, stop)