Beispiel #1
0
 def test_incomplete_template(self):
     n = 10
     channel_ids = np.arange(n)
     dom_ids = np.arange(n)
     # times = np.arange(n)
     tots = np.arange(n)
     triggereds = np.ones(n)
     d_hits = {
         "channel_id": channel_ids,
         "dom_id": dom_ids,
         # 'time': times,
         "tot": tots,
         "triggered": triggereds,
         "group_id": 0,  # event_id
     }
     with pytest.raises(KeyError):
         tab = Table.from_template(d_hits, "Hits")
         assert tab is not None
     ar_hits = {
         "channel_id": np.ones(n, dtype=int),
         "dom_id": np.ones(n, dtype=int),
         # 'time': np.ones(n, dtype=float),
         "tot": np.ones(n, dtype=float),
         "triggered": np.ones(n, dtype=bool),
         "group_id": np.ones(n, dtype=int),
     }
     with pytest.raises(KeyError):
         tab = Table.from_template(ar_hits, "Hits")
         assert tab is not None
Beispiel #2
0
 def test_template(self):
     n = 10
     channel_ids = np.arange(n)
     dom_ids = np.arange(n)
     times = np.arange(n)
     tots = np.arange(n)
     triggereds = np.ones(n)
     d_hits = {
         'channel_id': channel_ids,
         'dom_id': dom_ids,
         'time': times,
         'tot': tots,
         'triggered': triggereds,
         'group_id': 0,    # event_id
     }
     tab = Table.from_template(d_hits, 'Hits')
     assert tab.name == 'Hits'
     assert tab.split_h5 is True
     assert isinstance(tab, Table)
     ar_hits = {
         'channel_id': np.ones(n, dtype=int),
         'dom_id': np.ones(n, dtype=int),
         'time': np.ones(n, dtype=float),
         'tot': np.ones(n, dtype=float),
         'triggered': np.ones(n, dtype=bool),
         'group_id': np.ones(n, dtype=int),
     }
     tab = Table.from_template(ar_hits, 'Hits')
     assert tab.name == 'Hits'
     assert tab.split_h5 is True
     assert isinstance(tab, Table)
Beispiel #3
0
 def test_incomplete_template(self):
     n = 10
     channel_ids = np.arange(n)
     dom_ids = np.arange(n)
     # times = np.arange(n)
     tots = np.arange(n)
     triggereds = np.ones(n)
     d_hits = {
         'channel_id': channel_ids,
         'dom_id': dom_ids,
     # 'time': times,
         'tot': tots,
         'triggered': triggereds,
         'group_id': 0,    # event_id
     }
     with pytest.raises(KeyError):
         tab = Table.from_template(d_hits, 'Hits')
         assert tab is not None
     ar_hits = {
         'channel_id': np.ones(n, dtype=int),
         'dom_id': np.ones(n, dtype=int),
     # 'time': np.ones(n, dtype=float),
         'tot': np.ones(n, dtype=float),
         'triggered': np.ones(n, dtype=bool),
         'group_id': np.ones(n, dtype=int),
     }
     with pytest.raises(KeyError):
         tab = Table.from_template(ar_hits, 'Hits')
         assert tab is not None
Beispiel #4
0
 def test_init_from_template_with_differently_ordered_dicts(self):
     t1 = Table.from_template(
         {
             "frame_index": 1,
             "slice_id": 2,
             "timestamp": 3,
             "nanoseconds": 4,
             "n_frames": 5,
         },
         "TimesliceInfo",
     )
     t2 = Table.from_template(
         {
             "n_frames": 5,
             "timestamp": 3,
             "nanoseconds": 4,
             "slice_id": 2,
             "frame_index": 1,
         },
         "TimesliceInfo",
     )
     assert t1.dtype == t2.dtype
     assert t1.frame_index[0] == t2.frame_index[0]
     assert t1.slice_id[0] == t2.slice_id[0]
     assert t1.nanoseconds[0] == t2.nanoseconds[0]
     assert t1.n_frames[0] == t2.n_frames[0]
     assert t1.timestamp[0] == t2.timestamp[0]
Beispiel #5
0
 def test_template(self):
     n = 10
     channel_ids = np.arange(n)
     dom_ids = np.arange(n)
     times = np.arange(n)
     tots = np.arange(n)
     triggereds = np.ones(n)
     d_hits = {
         "channel_id": channel_ids,
         "dom_id": dom_ids,
         "time": times,
         "tot": tots,
         "triggered": triggereds,
         "group_id": 0,  # event_id
     }
     tab = Table.from_template(d_hits, "Hits")
     assert tab.name == "Hits"
     assert tab.split_h5 is True
     assert isinstance(tab, Table)
     ar_hits = {
         "channel_id": np.ones(n, dtype=int),
         "dom_id": np.ones(n, dtype=int),
         "time": np.ones(n, dtype=float),
         "tot": np.ones(n, dtype=float),
         "triggered": np.ones(n, dtype=bool),
         "group_id": np.ones(n, dtype=int),
     }
     tab = Table.from_template(ar_hits, "Hits")
     assert tab.name == "Hits"
     assert tab.split_h5 is True
     assert isinstance(tab, Table)
Beispiel #6
0
    def process_event(self, data, blob):
        data_io = BytesIO(data)
        preamble = DAQPreamble(file_obj=data_io)    # noqa
        event = DAQEvent(file_obj=data_io)
        header = event.header

        hits = event.snapshot_hits
        n_hits = event.n_snapshot_hits
        if n_hits == 0:
            return
        dom_ids, channel_ids, times, tots = zip(*hits)
        triggereds = np.zeros(n_hits)
        triggered_map = {}
        for triggered_hit in event.triggered_hits:
            dom_id, pmt_id, time, tot, _ = triggered_hit
            triggered_map[(dom_id, pmt_id, time, tot)] = True
        for idx, hit in enumerate(hits):
            triggereds[idx] = hit in triggered_map

        hit_series = Table.from_template({
            'channel_id': channel_ids,
            'dom_id': dom_ids,
            'time': times,
            'tot': tots,
            'triggered': triggereds,
            'group_id': self.event_id,
        }, 'Hits')

        blob['Hits'] = hit_series

        event_info = Table.from_template(
            {
                'det_id': header.det_id,
        # 'frame_index': self.index,  # header.time_slice,
                'frame_index': header.time_slice,
                'livetime_sec': 0,
                'mc_id': 0,
                'mc_t': 0,
                'n_events_gen': 0,
                'n_files_gen': 0,
                'overlays': event.overlays,
                'trigger_counter': event.trigger_counter,
                'trigger_mask': event.trigger_mask,
                'utc_nanoseconds': header.ticks * 16,
                'utc_seconds': header.time_stamp,
                'weight_w1': 0,
                'weight_w2': 0,
                'weight_w3': 0,    # MC weights
                'run_id': header.run,    # run id
                'group_id': self.event_id,
            },
            'EventInfo'
        )
        blob['EventInfo'] = event_info

        self.event_id += 1
        self.index += 1
Beispiel #7
0
    def extract_event(self):
        blob = self._current_blob
        r = self.event_reader
        r.retrieve_next_event()    # do it at the beginning!

        n = r.number_of_snapshot_hits

        if n > self.buf_size:
            self._resize_buffers(int(n * 3 / 2))

        r.get_hits(
            self._channel_ids, self._dom_ids, self._times, self._tots,
            self._triggereds
        )

        hit_series = Table.from_template({
            'channel_id': self._channel_ids[:n],
            'dom_id': self._dom_ids[:n],
            'time': self._times[:n],
            'tot': self._tots[:n],
            'triggered': self._triggereds[:n],
            'group_id': self.event_index,
        }, 'Hits')

        event_info = Table.from_template({
            'det_id': r.det_id,
            'frame_index': r.frame_index,
            'livetime_sec': 0,
            'mc_id': 0,
            'mc_t': 0,
            'n_events_gen': 0,
            'n_files_gen': 0,
            'overlays': r.overlays,
            'trigger_counter': r.trigger_counter,
            'trigger_mask': r.trigger_mask,
            'utc_nanoseconds': r.utc_nanoseconds,
            'utc_seconds': r.utc_seconds,
            'weight_w1': np.nan,
            'weight_w2': np.nan,
            'weight_w3': np.nan,
            'run_id': 0,
            'group_id': self.event_index,
        }, 'EventInfo')

        self.event_index += 1
        blob['EventInfo'] = event_info
        blob['Hits'] = hit_series
        return blob
Beispiel #8
0
    def _extract_hits(self):
        total_hits = self.r.number_of_hits

        if total_hits > self.buf_size:
            buf_size = int(total_hits * 3 / 2)
            self._resize_buffers(buf_size)

        self.r.get_hits(
            self._channel_ids, self._dom_ids, self._times, self._tots
        )

        group_id = 0 if total_hits > 0 else []

        hits = Table.from_template(
            {
                'channel_id': self._channel_ids[:total_hits],
                'dom_id': self._dom_ids[:total_hits],
                'time': self._times[:total_hits].astype('f8'),
                'tot': self._tots[:total_hits],
        # 'triggered': self._triggereds[:total_hits],  # dummy
                'group_id': group_id,    # slice_id will be set afterwards
            },
            'TimesliceHits'
        )
        return hits
Beispiel #9
0
 def summaryslice_generator(self):
     slice_id = 0
     while self.r.has_next:
         summary_slice = {}
         self.r.retrieve_next_summaryslice()
         blob = Blob()
         summaryslice_info = Table.from_template({
             'frame_index': self.r.frame_index,
             'slice_id': slice_id,
             'timestamp': self.r.utc_seconds,
             'nanoseconds': self.r.utc_nanoseconds,
             'n_frames': self.r.n_frames,
         }, 'SummarysliceInfo')
         blob['SummarysliceInfo'] = summaryslice_info
         while self.r.has_next_frame:
             rates = np.zeros(31, dtype='f8')
             hrvs = np.zeros(31, dtype='i4')
             fifos = np.zeros(31, dtype='i4')
             self.r.get_rates(rates)
             self.r.get_hrvs(hrvs)
             self.r.get_fifos(fifos)
             summary_slice[self.r.dom_id] = {
                 'rates': rates,
                 'hrvs': hrvs.astype(bool),
                 'fifos': fifos.astype(bool),
                 'n_udp_packets': self.r.number_of_received_packets,
                 'max_sequence_number': self.r.max_sequence_number,
                 'has_udp_trailer': self.r.has_udp_trailer,
                 'high_rate_veto': self.r.high_rate_veto,
                 'fifo_status': self.r.fifo_status,
             }
             self.r.retrieve_next_frame()
         blob['Summaryslice'] = summary_slice
         slice_id += 1
         yield blob
Beispiel #10
0
 def test_init_from_template_with_differently_ordered_dicts(self):
     t1 = Table.from_template({
         'frame_index': 1,
         'slice_id': 2,
         'timestamp': 3,
         'nanoseconds': 4,
         'n_frames': 5,
     }, 'TimesliceInfo')
     t2 = Table.from_template({
         'n_frames': 5,
         'timestamp': 3,
         'nanoseconds': 4,
         'slice_id': 2,
         'frame_index': 1,
     }, 'TimesliceInfo')
     assert t1.dtype == t2.dtype
     assert t1.frame_index[0] == t2.frame_index[0]
     assert t1.slice_id[0] == t2.slice_id[0]
     assert t1.nanoseconds[0] == t2.nanoseconds[0]
     assert t1.n_frames[0] == t2.n_frames[0]
     assert t1.timestamp[0] == t2.timestamp[0]
Beispiel #11
0
 def test_adhoc_noname_template(self):
     a_template = {
         'dtype': np.dtype([('a', '<u4'), ('b', 'f4')]),
         'h5loc': '/yat',
         'split_h5': True,
         'h5singleton': True,
     }
     arr = np.array([(1, 3), (2, 4)], dtype=a_template['dtype'])
     tab = Table.from_template(arr, a_template)
     self.assertListEqual([1, 2], list(tab.a))
     self.assertListEqual([3.0, 4.0], list(tab.b))
     assert DEFAULT_NAME == tab.name
     assert tab.h5singleton
Beispiel #12
0
 def test_adhoc_noname_template(self):
     a_template = {
         "dtype": np.dtype([("a", "<u4"), ("b", "f4")]),
         "h5loc": "/yat",
         "split_h5": True,
         "h5singleton": True,
     }
     arr = np.array([(1, 3), (2, 4)], dtype=a_template["dtype"])
     tab = Table.from_template(arr, a_template)
     self.assertListEqual([1, 2], list(tab.a))
     self.assertListEqual([3.0, 4.0], list(tab.b))
     assert DEFAULT_NAME == tab.name
     assert tab.h5singleton
Beispiel #13
0
 def test_apply_to_timeslice_hits(self):
     tshits = Table.from_template({
         'channel_id': [0, 1, 2],
         'dom_id': [2, 3, 3],
         'time': [10.1, 11.2, 12.3],
         'tot': np.ones(3, dtype=float),
         'group_id': 0,
     }, 'TimesliceHits')
     calib = Calibration(filename=DETX_FILENAME)
     c_tshits = calib.apply(tshits)
     assert len(c_tshits) == len(tshits)
     assert np.allclose([40, 80, 90], c_tshits.t0)
     # TimesliceHits is using int4 for times, so it's truncated when we pass in float64
     assert np.allclose([50.1, 91.2, 102.3], c_tshits.time, atol=0.1)
Beispiel #14
0
 def get_blob(self, index):
     """Index is slice ID"""
     blob = self._current_blob
     self.r.retrieve_timeslice(index)
     timeslice_info = Table.from_template({
         'frame_index': self.r.frame_index,
         'slice_id': index,
         'timestamp': self.r.utc_seconds,
         'nanoseconds': self.r.utc_nanoseconds,
         'n_frames': self.r.n_frames,
     }, 'TimesliceInfo')
     hits = self._extract_hits()
     hits.group_id = index
     blob['TimesliceInfo'] = timeslice_info
     blob[self._hits_blob_key] = hits
     return blob
Beispiel #15
0
 def test_apply_to_timeslice_hits(self):
     tshits = Table.from_template(
         {
             "channel_id": [0, 1, 2],
             "dom_id": [2, 3, 3],
             "time": [10.1, 11.2, 12.3],
             "tot": np.ones(3, dtype=float),
             "group_id": 0,
         },
         "TimesliceHits",
     )
     calib = Calibration(filename=data_path("detx/detx_v1.detx"))
     c_tshits = calib.apply(tshits, correct_slewing=False)
     assert len(c_tshits) == len(tshits)
     assert np.allclose([40, 80, 90], c_tshits.t0)
     # TimesliceHits is using int4 for times, so it's truncated when we pass in float64
     assert np.allclose([50.1, 91.2, 102.3], c_tshits.time, atol=0.1)
Beispiel #16
0
 def create_timeslice_info(self):
     header = self.r.timeslices.stream(self.stream, 0).header
     slice_ids = header["frame_index"].array()
     timestamps = header["timeslice_start.UTC_seconds"].array()
     number_of_slices = len(slice_ids)
     nanoseconds = header["timeslice_start.UTC_16nanosecondcycles"].array(
     ) * 16
     timeslice_info = Table.from_template(
         {
             "frame_index": slice_ids,
             "slice_id": range(number_of_slices),
             "timestamp": timestamps,
             "nanoseconds": nanoseconds,
             "n_frames": np.zeros(len(slice_ids)),
         },
         "TimesliceInfo",
     )
     return timeslice_info
Beispiel #17
0
    def _extract_hits(self, index):
        timeslice = self.r.timeslices.stream(self.stream, index)
        raw_hits = {
            "dom_id": [],
            "channel_id": [],
            "time": [],
            "tot": [],
            "group_id": [],
        }

        for dom_id, frame in timeslice.frames.items():
            raw_hits["channel_id"].extend(frame.pmt)
            raw_hits["time"].extend(frame.tdc)
            raw_hits["tot"].extend(frame.tot)
            raw_hits["dom_id"].extend(len(frame.pmt) * [dom_id])
            raw_hits["group_id"].extend(len(frame.pmt) * [0])

        hits = Table.from_template(raw_hits, "TimesliceHits")
        return hits
Beispiel #18
0
    def _parse_timeslice(self, data):
        tsl_size, datatype = unpack("<ii", data.read(8))
        if not self.legacy:
            version = unpack("<h", data.read(2))[0]
            if version != 1:
                raise ValueError(
                    "Unsupported DAQTimeslice version ({}) or legacy DAQ. "
                    "Make sure Jpp v13+ is used or pass 'legacy=True' "
                    "to {}.".format(version, self.__class__.__name__))
        det_id, run, sqnr = unpack("<iii", data.read(12))
        timestamp, ns_ticks, n_frames = unpack("<iii", data.read(12))

        ts_info = Table.from_template(
            {
                "frame_index": sqnr,
                "slice_id": 0,
                "timestamp": timestamp,
                "nanoseconds": ns_ticks * 16,
                "n_frames": n_frames,
            },
            "TimesliceInfo",
        )
        ts_frameinfos = {}

        _dom_ids = []
        _channel_ids = []
        _times = []
        _tots = []
        for _ in range(n_frames):
            frame_size, datatype = unpack("<ii", data.read(8))
            det_id, run, sqnr = unpack("<iii", data.read(12))
            timestamp, ns_ticks, dom_id = unpack("<iii", data.read(12))
            dataqueue_status = unpack("<i", data.read(4))[0]
            dom_status = unpack("<iiii", data.read(4 * 4))
            n_hits = unpack("<i", data.read(4))[0]
            ts_frameinfos[dom_id] = Table.from_template(
                {
                    "det_id": det_id,
                    "run_id": run,
                    "sqnr": sqnr,
                    "timestamp": timestamp,
                    "nanoseconds": ns_ticks * 16,
                    "dom_id": dom_id,
                    "dataqueue_status": dataqueue_status,
                    "dom_status": dom_status,
                    "n_hits": n_hits,
                },
                "TimesliceFrameInfo",
            )
            for j in range(n_hits):
                hit = unpack("!BlB", data.read(6))
                _dom_ids.append(dom_id)
                _channel_ids.append(hit[0])
                _times.append(hit[1])
                _tots.append(hit[2])

        ts_hits = Table(
            {
                "channel_id": np.array(_channel_ids),
                "dom_id": np.array(_dom_ids),
                "time": np.array(_times),
                "tot": np.array(_tots),
            },
            name="TimesliceHits",
            h5loc="/timeslice_hits",
            split_h5=True,
        )
        return ts_info, ts_frameinfos, ts_hits
Beispiel #19
0
    def process_event(self, data, blob):
        data_io = BytesIO(data)
        preamble = DAQPreamble(file_obj=data_io)  # noqa
        event = DAQEvent(file_obj=data_io, legacy=self.legacy)
        header = event.header

        event_info = Table.from_template(
            {
                "det_id": header.det_id,
                # 'frame_index': self.index,  # header.time_slice,
                "frame_index": header.time_slice,
                "livetime_sec": 0,
                "mc_id": 0,
                "mc_t": 0,
                "n_events_gen": 0,
                "n_files_gen": 0,
                "overlays": event.overlays,
                "trigger_counter": event.trigger_counter,
                "trigger_mask": event.trigger_mask,
                "utc_nanoseconds": header.ticks * 16,
                "utc_seconds": header.time_stamp,
                "weight_w1": 0,
                "weight_w2": 0,
                "weight_w3": 0,  # MC weights
                "run_id": header.run,  # run id
                "group_id": self.event_id,
            },
            "EventInfo",
        )
        blob["EventInfo"] = event_info

        self.event_id += 1
        self.index += 1

        hits = event.snapshot_hits
        n_hits = event.n_snapshot_hits
        if n_hits == 0:
            self.log.warning("No hits found in event.")
            return

        # This might not be needed
        triggereds = np.zeros(n_hits)
        triggered_map = {}
        for thit in event.triggered_hits:
            # TODO: switch to thit.trigger_mask instead of True!
            triggered_map[(thit.dom_id, thit.channel_id, thit.time,
                           thit.tot)] = True
        for idx, hit in enumerate(hits):
            triggereds[idx] = tuple(hit) in triggered_map

        hit_series = Table.from_template(
            {
                "channel_id": hits.channel_id,
                "dom_id": hits.dom_id,
                "time": hits.time,
                "tot": hits.tot,
                "triggered": triggereds,  # TODO: switch to trigger_mask!
                "group_id": self.event_id,
            },
            "Hits",
        )

        blob["Hits"] = hit_series
Beispiel #20
0
    def apply(self,
              hits,
              no_copy=False,
              correct_slewing=True,
              slewing_variant=3):
        """Add x, y, z, t0 (and du, floor if DataFrame) columns to the hits."""
        if not no_copy:
            try:
                hits = hits.copy()
            except AttributeError:  # probably a km3io object
                pass

        if isinstance(hits, (ak.Array, ak.Record, km3io.rootio.Branch)):
            if hasattr(hits, "dom_id"):
                hits = Table(
                    dict(
                        dom_id=hits.dom_id,
                        channel_id=hits.channel_id,
                        time=hits.t,
                        tot=hits.tot,
                        triggered=hits.trig,
                    ))
            else:  # mc_hits in km3io
                hits = Table(
                    dict(
                        pmt_id=hits.pmt_id,
                        time=hits.t,
                        a=hits.a,
                        # TODO: Not all MC files have these two fields
                        # pure_a=hits.pure_a,
                        # pure_t=hits.pure_t,
                        origin=hits.origin,
                    ))

        if istype(hits, "DataFrame"):
            # do we ever see McHits here?
            hits = Table.from_template(hits, "Hits")

        is_mc = None
        if hasattr(hits, "dom_id") and hasattr(hits, "channel_id"):
            try:
                (
                    dir_x,
                    dir_y,
                    dir_z,
                    du,
                    floor,
                    pos_x,
                    pos_y,
                    pos_z,
                    t0,
                    pmt_id,
                ) = _get_calibration_for_hits(hits,
                                              self._calib_by_dom_and_channel)
            except KeyError as e:
                self.log.critical("Wrong calibration (DETX) data provided.")
                raise
            is_mc = False
        elif hasattr(hits, "pmt_id"):
            try:
                (
                    dir_x,
                    dir_y,
                    dir_z,
                    du,
                    floor,
                    pos_x,
                    pos_y,
                    pos_z,
                    t0,
                    dom_id,
                    channel_id,
                ) = _get_calibration_for_mchits(hits, self._calib_by_pmt_id)
            except KeyError as e:
                self.log.critical("Wrong calibration (DETX) data provided.")
                raise
            is_mc = True
        else:
            raise TypeError("Don't know how to apply calibration to '{0}'. "
                            "We need at least 'dom_id' and 'channel_id', or "
                            "'pmt_id'.".format(hits.name))

        if hasattr(hits, "time") and not is_mc:
            if hits.time.dtype != t0.dtype:
                time = hits.time.astype("f4") + t0.astype("f4")
                hits = hits.drop_columns(["time"])
                hits = hits.append_columns(["time"], [time])
            else:
                hits.time += t0

        hits_data = {}
        for colname in hits.dtype.names:
            hits_data[colname] = hits[colname]
        calib = {
            "dir_x": dir_x,
            "dir_y": dir_y,
            "dir_z": dir_z,
            "du": du.astype(np.uint8),
            "floor": floor.astype(np.uint8),
            "pos_x": pos_x,
            "pos_y": pos_y,
            "pos_z": pos_z,
            "t0": t0,
        }

        if is_mc:
            calib["dom_id"] = dom_id.astype(np.int32)
            calib["channel_id"] = channel_id.astype(np.int32)
        else:
            calib["pmt_id"] = pmt_id.astype(np.int32)

        hits_data.update(calib)

        if correct_slewing and not is_mc:
            hits_data["time"] -= slew(hits_data["tot"],
                                      variant=slewing_variant)
        return Table(hits_data,
                     h5loc=hits.h5loc,
                     split_h5=hits.split_h5,
                     name=hits.name)
Beispiel #21
0
    def process(self, blob):
        data = self._get_raw_data(blob)
        if data is None:
            return blob
        try:
            tsl_size, datatype = unpack('<ii', data.read(8))
            det_id, run, sqnr = unpack('<iii', data.read(12))
            timestamp, ns_ticks, n_frames = unpack('<iii', data.read(12))

            ts_info = Table.from_template({
                'frame_index': sqnr,
                'slice_id': 0,
                'timestamp': timestamp,
                'nanoseconds': ns_ticks * 16,
                'n_frames': n_frames
            }, 'TimesliceInfo')
            ts_frameinfos = {}

            _dom_ids = []
            _channel_ids = []
            _times = []
            _tots = []
            for _ in range(n_frames):
                frame_size, datatype = unpack('<ii', data.read(8))
                det_id, run, sqnr = unpack('<iii', data.read(12))
                timestamp, ns_ticks, dom_id = unpack('<iii', data.read(12))
                dom_status = unpack('<iiiii', data.read(5 * 4))
                n_hits = unpack('<i', data.read(4))[0]
                ts_frameinfos[dom_id] = Table.from_template({
                    'det_id': det_id,
                    'run_id': run,
                    'sqnr': sqnr,
                    'timestamp': timestamp,
                    'nanoseconds': ns_ticks * 16,
                    'dom_id': dom_id,
                    'dom_status': dom_status,
                    'n_hits': n_hits,
                }, 'TimesliceFrameInfo')
                for j in range(n_hits):
                    hit = unpack('!BlB', data.read(6))
                    _dom_ids.append(dom_id)
                    _channel_ids.append(hit[0])
                    _times.append(hit[1])
                    _tots.append(hit[2])

            tshits = Table.from_template(
                {
                    'channel_id': np.array(_channel_ids),
                    'dom_id': np.array(_dom_ids),
                    'time': np.array(_times),
                    'tot': np.array(_tots),
                    'triggered': np.zeros(len(_tots)),    # triggered
                    'group_id': 0    # event_id
                },
                'Hits'
            )
            blob['TimesliceInfo'] = ts_info
            blob['TimesliceFrameInfos'] = ts_frameinfos
            blob['TSHits'] = tshits
        except struct.error:
            log.error("Could not parse Timeslice")
            log.error(blob.keys())
        else:
            return blob
Beispiel #22
0
    def extract_event(self):
        blob = Blob()
        r = self.event_reader
        r.retrieve_next_event()    # do it at the beginning!

        n = r.n_fits

        if n > self.buf_size:
            self._resize_buffers(int(n * 3 / 2))

        r.get_fits(
            self._pos_xs,
            self._pos_ys,
            self._pos_zs,
            self._dir_xs,
            self._dir_ys,
            self._dir_zs,
            self._ndfs,
            self._times,
            self._qualities,
            self._energies,
        )
        fit_collection = Table({
            'pos_x': self._pos_xs[:n],
            'pos_y': self._pos_ys[:n],
            'pos_z': self._pos_zs[:n],
            'dir_x': self._dir_xs[:n],
            'dir_y': self._dir_ys[:n],
            'dir_z': self._dir_zs[:n],
            'ndf': self._ndfs[:n],
            'time': self._times[:n],
            'quality': self._qualities[:n],
            'energy': self._energies[:n],
        },
                               h5loc='/jfit')
        fit_collection = fit_collection.append_columns(['event_id'],
                                                       [self.event_index])

        # TODO make this into a datastructure

        event_info = Table.from_template({
            'det_id': 0,
            'frame_index': 0,
            'livetime_sec': 0,
            'MC ID': 0,
            'MC time': 0,
            'n_events_gen': 0,
            'n_files_gen': 0,
            'overlays': 0,
            'trigger_counter': 0,
            'trigger_mask': 0,
            'utc_nanoseconds': 0,
            'utc_seconds': 0,
            'weight_w1': np.nan,
            'weight_w2': np.nan,
            'weight_w3': np.nan,
            'run_id': 0,
            'group_id': self.event_index,
        }, 'EventInfo')

        self.event_index += 1
        blob['EventInfo'] = event_info
        blob['JFit'] = fit_collection
        return blob