Beispiel #1
0
    def split_to_files(self):
        """Splits the entire data into data products separated be the unique request ID.

        Yields
        -------
        `ScienceProduct`
            the next ScienceProduct defined by the unique request ID
        """

        key_cols = ['request_id']
        if 'tc_packet_seq_control' in self.control.colnames:
            key_cols.insert(0, 'tc_packet_seq_control')

        for ci in unique(self.control, keys=key_cols)['index']:
            control = self.control[self.control['index'] == ci]
            data = self.data[self.data['control_index'] == ci]
            # for req_id in self.control['request_id']:
            #     ctrl_inds = np.where(self.control['request_id'] == req_id)
            #     control = self.control[ctrl_inds]
            #     data_index = control['index'][0]
            #     data_inds = np.where(self.data['control_index'] == data_index)
            #     data = self.data[data_inds]

            yield type(self)(service_type=self.service_type,
                             service_subtype=self.service_subtype,
                             ssid=self.ssid,
                             control=control,
                             data=data)
Beispiel #2
0
    def from_tm(cls, tmfile):
        """Process the given SOCFile and creates LevelB FITS files.

        Parameters
        ----------
        tmfile : `SOCPacketFile`
            The input data file.
        """
        packet_data = defaultdict(list)

        for packet_no, binary in tmfile.get_packet_binaries():
            try:
                packet = TMPacket(binary)
            except Exception:
                logger.error('Error parsing %s, %d', tmfile.name, packet_no, exc_info=True)
                return
            packet.source = (tmfile.file.name, packet_no)
            packet_data[packet.key].append(packet)

        for prod_key, packets in packet_data.items():
            headers = []
            hex_data = []
            for packet in packets:
                sh = vars(packet.source_packet_header)
                bs = sh.pop('bitstream')
                hex_data.append(bs.hex)
                dh = vars(packet.data_header)
                dh.pop('datetime')
                headers.append({**sh, **dh, 'raw_file': packet.source[0],
                                'packet': packet.source[1]})
            if len(headers) == 0 or len(hex_data) == 0:
                return None

            control = Table(headers)
            control['index'] = np.arange(len(control), dtype=np.int64)

            data = Table()
            data['control_index'] = np.array(control['index'], dtype=np.int64)
            data['data'] = hex_data

            control = unique(control, keys=['scet_coarse', 'scet_fine', 'sequence_count'])

            # Only keep data that is in the control table via index
            data = data[np.nonzero(control['index'][:, None] == data['control_index'])[1]]

            # now reindex both data and control
            control['index'] = range(len(control))
            data['control_index'] = control['index']

            service_type, service_subtype, ssid = prod_key
            if ssid is not None:
                control['ssid'] = ssid
            product = LevelB(service_type=service_type, service_subtype=service_subtype,
                             ssid=ssid, control=control, data=data)
            yield product
Beispiel #3
0
    def __add__(self, other):
        """Combine two packets of same type. Same time points gets overridden.

        Parameters
        ----------
        other : `LevelB`
            The other packet to combine with.

        Returns
        -------
        `LevelB`
            a new LevelB instance with combined data.

        Raises
        ------
        TypeError
            If not of same type: (service, subservice [, ssid])
        """
        if not isinstance(other, type(self)):
            raise TypeError(
                f'Products must of same type not {type(self)} and {type(other)}'
            )

        # Copy so don't overwrite
        other = other[:]
        other.control['index'] = other.control['index'].data + self.control[
            'index'].data.max() + 1
        other.data['control_index'] = other.data[
            'control_index'] + self.control['index'].max() + 1

        # For level b the control info is used to sort and select
        control = vstack((self.control, other.control))
        control = unique(control,
                         ['scet_coarse', 'scet_fine', 'sequence_count'])

        orig_indices = control['index']
        # some BS for windows!
        new_index = np.array(range(len(control)), dtype=np.int64)
        control['index'] = new_index

        data = vstack((self.data, other.data))
        data = data[np.nonzero(
            orig_indices[:, None] == data['control_index'].data)[1]]
        data['control_index'] = np.array(range(len(data)), dtype=np.int64)

        if np.abs([((len(data['data'][i]) / 2) -
                    (control['data_length'][i] + 7))
                   for i in range(len(data))]).sum() > 0:
            logger.error('Expected and actual data length do not match')

        return type(self)(service_type=self.service_type,
                          service_subtype=self.service_subtype,
                          ssid=self.ssid,
                          control=control,
                          data=data)
Beispiel #4
0
    def to_requests(self):
        for ci in unique(self.control,
                         keys=['tc_packet_seq_control',
                               'request_id'])['index']:
            control = self.control[self.control['index'] == ci]
            data = self.data[self.data['control_index'] == ci]
            # for req_id in self.control['request_id']:
            #     ctrl_inds = np.where(self.control['request_id'] == req_id)
            #     control = self.control[ctrl_inds]
            #     data_index = control['index'][0]
            #     data_inds = np.where(self.data['control_index'] == data_index)
            #     data = self.data[data_inds]

            yield type(self)(control=control, data=data)
Beispiel #5
0
    def __add__(self, other):
        other.control[
            'index'] = other.control['index'] + self.control['index'].max() + 1
        control = vstack((self.control, other.control))
        cnames = control.colnames
        cnames.remove('index')
        control = unique(control, cnames)

        other.data['control_index'] = other.data[
            'control_index'] + self.control['index'].max() + 1
        data = vstack((self.data, other.data))

        data_ind = np.isin(data['control_index'], control['index'])
        data = data[data_ind]

        return type(self)(control, data)
Beispiel #6
0
    def from_packets(cls, packets):

        control = cls()

        control.add_basic(name='tc_packet_id_ref',
                          nix='NIX00001',
                          packets=packets,
                          dtype=np.uint16)
        control.add_basic(name='tc_packet_seq_control',
                          nix='NIX00002',
                          packets=packets,
                          dtype=np.uint16)
        control.add_basic(name='request_id',
                          nix='NIX00037',
                          packets=packets,
                          dtype=np.uint32)
        control.add_basic(name='time_stamp', nix='NIX00402', packets=packets)
        if np.any(control['time_stamp'] > 2**32 - 1):
            coarse = control['time_stamp'] >> 16
            fine = control['time_stamp'] & (1 << 16) - 1
        else:
            coarse = control['time_stamp']
            fine = 0
        control['time_stamp'] = SCETime(coarse, fine)
        try:
            control['num_substructures'] = np.array(
                packets.get_value('NIX00403'), np.uint16).reshape(1, -1)
            control.add_meta(name='num_substructures',
                             nix='NIX00403',
                             packets=packets)
        except AttributeError:
            logger.debug('NIX00403 not found')

        control = unique(control)

        return control
Beispiel #7
0
    def __add__(self, other):
        """
        Combine two products stacking data along columns and removing duplicated data using time as
        the primary key.

        Parameters
        ----------
        other : A subclass of stix_parser.products.quicklook.QLProduct

        Returns
        -------
        A subclass of stix_parser.products.quicklook.QLProduct
            The combined data product
        """
        if not isinstance(other, type(self)):
            raise TypeError(
                f'Products must of same type not {type(self)} and {type(other)}'
            )

        # TODO reindex and update data control_index
        other_control = other.control[:]
        other_data = other.data[:]
        other_control[
            'index'] = other.control['index'] + self.control['index'].max() + 1
        control = vstack((self.control, other_control))
        # control = unique(control, keys=['scet_coarse', 'scet_fine'])
        # control = control.group_by(['scet_coarse', 'scet_fine'])

        other_data['control_index'] = other.data[
            'control_index'] + self.control['index'].max() + 1

        logger.debug('len self: %d, len other %d', len(self.data),
                     len(other_data))

        data = vstack((self.data, other_data))

        logger.debug('len stacked %d', len(data))

        # Not sure where the rounding issue is arising need to investigate
        data['time_float'] = np.around(data['time'].as_float(), 2)

        data = unique(data, keys=['time_float'])

        logger.debug('len unique %d', len(data))

        data.remove_column('time_float')

        unique_control_inds = np.unique(data['control_index'])
        control = control[np.nonzero(
            control['index'][:, None] == unique_control_inds)[1]]

        for idb_key, date_range in other.idb_versions.items():
            self.idb_versions[idb_key].expand(date_range)

        return type(self)(service_type=self.service_type,
                          service_subtype=self.service_subtype,
                          ssid=self.ssid,
                          control=control,
                          data=data,
                          idb_versions=self.idb_versions,
                          level=self.level)
Beispiel #8
0
    def from_packets(cls, packets, eng_packets):
        # Control
        control = Control.from_packets(packets)
        control.remove_column('num_structures')
        control = unique(control)
        if len(control) != 1:
            raise ValueError()
        control['index'] = range(len(control))

        data = Data()
        data['control_index'] = np.full(len(packets['NIX00441']), 0)
        data['delta_time'] = (np.array(packets['NIX00441'],
                                       np.uint16)) * 0.1 * u.s
        unique_times = np.unique(data['delta_time'])

        # time = np.array([])
        # for dt in set(self.delta_time):
        #     i, = np.where(self.delta_time == dt)
        #     nt = sum(np.array(packets['NIX00258'])[i])
        #     time = np.append(time, np.repeat(dt, nt))
        # self.time = time

        data['rcr'] = packets['NIX00401']
        data['pixel_mask1'] = _get_pixel_mask(packets, 'NIXD0407')
        data['pixel_mask2'] = _get_pixel_mask(packets, 'NIXD0444')
        data['pixel_mask3'] = _get_pixel_mask(packets, 'NIXD0445')
        data['pixel_mask4'] = _get_pixel_mask(packets, 'NIXD0446')
        data['pixel_mask5'] = _get_pixel_mask(packets, 'NIXD0447')
        data['detector_masks'] = _get_detector_mask(packets)
        data['integration_time'] = (np.array(packets['NIX00405'])) * 0.1

        ts, tk, tm = control['compression_scheme_triggers_skm'][0]
        triggers, triggers_var = decompress(
            [packets[f'NIX00{i}'] for i in range(242, 258)],
            s=ts,
            k=tk,
            m=tm,
            return_variance=True)

        data['triggers'] = triggers.T
        data['triggers_err'] = np.sqrt(triggers_var).T

        tids = np.searchsorted(data['delta_time'], unique_times)
        data = data[tids]

        num_energy_groups = sum(packets['NIX00258'])

        # Data
        vis = np.zeros((unique_times.size, 32, 32), dtype=complex)
        vis_err = np.zeros((unique_times.size, 32, 32), dtype=complex)
        e_low = np.array(packets['NIXD0016'])
        e_high = np.array(packets['NIXD0017'])

        # TODO create energy bin mask
        control['energy_bin_mask'] = np.full((1, 32), False, np.ubyte)
        all_energies = set(np.hstack([e_low, e_high]))
        control['energy_bin_mask'][:, list(all_energies)] = True

        data['flux'] = np.array(packets['NIX00261']).reshape(
            unique_times.size, -1)
        num_detectors = packets['NIX00262'][0]
        detector_id = np.array(packets['NIX00100']).reshape(
            unique_times.size, -1, num_detectors)

        # vis[:, detector_id[0], e_low.reshape(unique_times.size, -1)[0]] = (
        #         np.array(packets['NIX00263']) + np.array(packets['NIX00264'])
        #         * 1j).reshape(unique_times.size, num_detectors, -1)

        ds, dk, dm = control['compression_scheme_counts_skm'][0]
        real, real_var = decompress(packets['NIX00263'],
                                    s=ds,
                                    k=dk,
                                    m=dm,
                                    return_variance=True)
        imaginary, imaginary_var = decompress(packets['NIX00264'],
                                              s=ds,
                                              k=dk,
                                              m=dm,
                                              return_variance=True)

        mesh = np.ix_(np.arange(unique_times.size), detector_id[0][0],
                      e_low.reshape(unique_times.size, -1)[0])
        vis[mesh] = (real + imaginary * 1j).reshape(unique_times.size,
                                                    num_detectors, -1)

        # TODO this doesn't seem correct prob need combine in a better
        vis_err[mesh] = (np.sqrt(real_var) +
                         np.sqrt(imaginary_var) * 1j).reshape(
                             unique_times.size, num_detectors, -1)

        data['visibility'] = vis
        data['visibility_err'] = vis_err

        data['time'] = Time(scet_to_datetime(f'{int(control["time_stamp"][0])}:0')) \
            + data['delta_time'] + data['integration_time'] / 2
        data['timedel'] = data['integration_time']

        return cls(control=control, data=data)
Beispiel #9
0
    def from_packets(cls, packets, eng_packets):
        # Control
        ssid = packets['SSID'][0]

        control = Control.from_packets(packets)

        control.remove_column('num_structures')
        control = unique(control)

        if len(control) != 1:
            raise ValueError(
                'Creating a science product form packets from multiple products'
            )

        control['index'] = 0

        data = Data()
        data['delta_time'] = (np.array(packets['NIX00441'],
                                       np.int32)) * 0.1 * u.s
        unique_times = np.unique(data['delta_time'])

        data['rcr'] = np.array(packets['NIX00401'], np.ubyte)
        data['num_pixel_sets'] = np.array(packets['NIX00442'], np.ubyte)
        pixel_masks = _get_pixel_mask(packets, 'NIXD0407')
        pixel_masks = pixel_masks.reshape(-1, data['num_pixel_sets'][0], 12)
        if ssid == 21 and data['num_pixel_sets'][0] != 12:
            pixel_masks = np.pad(pixel_masks,
                                 ((0, 0), (0, 12 - data['num_pixel_sets'][0]),
                                  (0, 0)))
        data['pixel_masks'] = pixel_masks
        data['detector_masks'] = _get_detector_mask(packets)
        data['integration_time'] = (np.array(packets.get('NIX00405'),
                                             np.uint16)) * 0.1 * u.s

        # TODO change once FSW fixed
        ts, tk, tm = control['compression_scheme_counts_skm'][0]
        triggers, triggers_var = decompress(
            [packets.get(f'NIX00{i}') for i in range(242, 258)],
            s=ts,
            k=tk,
            m=tm,
            return_variance=True)

        data['triggers'] = triggers.T
        data['triggers_err'] = np.sqrt(triggers_var).T
        data['num_energy_groups'] = np.array(packets['NIX00258'], np.ubyte)

        tmp = dict()
        tmp['e_low'] = np.array(packets['NIXD0016'], np.ubyte)
        tmp['e_high'] = np.array(packets['NIXD0017'], np.ubyte)
        tmp['num_data_elements'] = np.array(packets['NIX00259'])
        unique_energies_low = np.unique(tmp['e_low'])
        unique_energies_high = np.unique(tmp['e_high'])

        # counts = np.array(eng_packets['NIX00260'], np.uint32)

        cs, ck, cm = control['compression_scheme_counts_skm'][0]
        counts, counts_var = decompress(packets.get('NIX00260'),
                                        s=cs,
                                        k=ck,
                                        m=cm,
                                        return_variance=True)

        counts = counts.reshape(unique_times.size, unique_energies_low.size,
                                data['detector_masks'][0].sum(),
                                data['num_pixel_sets'][0].sum())

        counts_var = counts_var.reshape(unique_times.size,
                                        unique_energies_low.size,
                                        data['detector_masks'][0].sum(),
                                        data['num_pixel_sets'][0].sum())
        # t x e x d x p -> t x d x p x e
        counts = counts.transpose((0, 2, 3, 1))
        counts_var = np.sqrt(counts_var.transpose((0, 2, 3, 1)))
        if ssid == 21:
            out_counts = np.zeros((unique_times.size, 32, 12, 32))
            out_var = np.zeros((unique_times.size, 32, 12, 32))
        elif ssid == 22:
            out_counts = np.zeros((unique_times.size, 32, 4, 32))
            out_var = np.zeros((unique_times.size, 32, 4, 32))

        # energy_index = 0
        # count_index = 0
        # for i, time in enumerate(unique_times):
        #     inds = np.where(data['delta_time'] == time)
        #     cur_num_energies = data['num_energy_groups'][inds].astype(int).sum()
        #     low = np.unique(tmp['e_low'][energy_index:energy_index+cur_num_energies])
        #     high = np.unique(tmp['e_high'][energy_index:energy_index + cur_num_energies])
        #     cur_num_energies = low.size
        #     num_counts = tmp['num_data_elements'][energy_index:energy_index+cur_num_energies].sum()
        #     cur_counts = counts[count_index:count_index+num_counts]
        #     count_index += num_counts
        #     pids = data[inds[0][0]]['pixel_masks']
        #     dids = np.where(data[inds[0][0]]['detector_masks'] == True)
        #     cids = np.full(32, False)
        #     cids[low] = True
        #
        #     if ssid == 21:
        #         cur_counts = cur_counts.reshape(cur_num_energies, dids[0].size, pids.sum())
        #     elif ssid == 22:
        #         cur_counts = cur_counts.reshape(cur_num_energies, dids[0].size, 4)
        #
        dl_energies = np.array([
            [ENERGY_CHANNELS[lch]['e_lower'], ENERGY_CHANNELS[hch]['e_upper']]
            for lch, hch in zip(unique_energies_low, unique_energies_high)
        ]).reshape(-1)
        dl_energies = np.unique(dl_energies)
        sci_energies = np.hstack(
            [[ENERGY_CHANNELS[ch]['e_lower'] for ch in range(32)],
             ENERGY_CHANNELS[31]['e_upper']])

        # If there is any onboard summing of energy channels rebin back to standard sci channels
        if (unique_energies_high - unique_energies_low).sum() > 0:
            rebinned_counts = np.zeros((*counts.shape[:-1], 32))
            rebinned_counts_var = np.zeros((*counts_var.shape[:-1], 32))
            e_ch_start = 0
            e_ch_end = counts.shape[-1]
            if dl_energies[0] == 0.0:
                rebinned_counts[..., 0] = counts[..., 0]
                rebinned_counts_var[..., 0] = counts_var[..., 0]
                e_ch_start += 1
            elif dl_energies[-1] == np.inf:
                rebinned_counts[..., -1] = counts[..., -1]
                rebinned_counts_var[..., -1] = counts_var[..., -1]
                e_ch_end -= 1

            torebin = np.where((dl_energies >= 4.0) & (dl_energies <= 150.0))
            rebinned_counts[..., 1:-1] = np.apply_along_axis(
                rebin_proportional, -1,
                counts[...,
                       e_ch_start:e_ch_end].reshape(-1, e_ch_end - e_ch_start),
                dl_energies[torebin], sci_energies[1:-1]).reshape(
                    (*counts.shape[:-1], 30))

            rebinned_counts_var[..., 1:-1] = np.apply_along_axis(
                rebin_proportional, -1,
                counts_var[..., e_ch_start:e_ch_end].reshape(
                    -1, e_ch_end - e_ch_start), dl_energies[torebin],
                sci_energies[1:-1]).reshape((*counts_var.shape[:-1], 30))

            energy_indices = np.full(32, True)
            energy_indices[[0, -1]] = False

            ix = np.ix_(np.full(unique_times.size, True),
                        data['detector_masks'][0].astype(bool),
                        np.ones(data['num_pixel_sets'][0], dtype=bool),
                        np.full(32, True))

            out_counts[ix] = rebinned_counts
            out_var[ix] = rebinned_counts_var
        else:
            energy_indices = np.full(32, False)
            energy_indices[unique_energies_low.min(
            ):unique_energies_high.max() + 1] = True

            ix = np.ix_(np.full(unique_times.size,
                                True), data['detector_masks'][0].astype(bool),
                        np.ones(data['num_pixel_sets'][0], dtype=bool),
                        energy_indices)

            out_counts[ix] = counts
            out_var[ix] = counts_var

        #     if (high - low).sum() > 0:
        #         raise NotImplementedError()
        #         #full_counts = rebin_proportional(dl_energies, cur_counts, sci_energies)
        #
        #     dids2 = data[inds[0][0]]['detector_masks']
        #     cids2 = np.full(32, False)
        #     cids2[low] = True
        #     tids2 = time == unique_times
        #
        #     if ssid == 21:
        #         out_counts[np.ix_(tids2, cids2, dids2, pids)] = cur_counts
        #     elif ssid == 22:
        #         out_counts[np.ix_(tids2, cids2, dids2)] = cur_counts

        if counts.sum() != out_counts.sum():
            import ipdb
            ipdb.set_trace()
            raise ValueError(
                'Original and reformatted count totals do not match')

        control['energy_bin_mask'] = np.full((1, 32), False, np.ubyte)
        all_energies = set(np.hstack([tmp['e_low'], tmp['e_high']]))
        control['energy_bin_mask'][:, list(all_energies)] = True
        # time x energy x detector x pixel
        # counts = np.array(
        #     eng_packets['NIX00260'], np.uint16).reshape(unique_times.size, num_energies,
        #                                                 num_detectors, num_pixels)
        # time x channel x detector x pixel need to transpose to time x detector x pixel x channel

        sub_index = np.searchsorted(data['delta_time'], unique_times)
        data = data[sub_index]

        data['time'] = Time(scet_to_datetime(f'{int(control["time_stamp"][0])}:0')) \
            + data['delta_time'] + data['integration_time'] / 2
        data['timedel'] = data['integration_time']
        data['counts'] = out_counts * u.ct
        data['counts_err'] = out_var * u.ct
        data['control_index'] = control['index'][0]
        data.remove_columns(['delta_time', 'integration_time'])

        data = data['time', 'timedel', 'rcr', 'pixel_masks', 'detector_masks',
                    'num_pixel_sets', 'num_energy_groups', 'triggers',
                    'triggers_err', 'counts', 'counts_err']
        data['control_index'] = 0

        return cls(control=control, data=data)
Beispiel #10
0
    def from_packets(cls, packets, eng_packets):
        control = Control.from_packets(packets)

        control.remove_column('num_structures')
        control = unique(control)

        if len(control) != 1:
            raise ValueError(
                'Creating a science product form packets from multiple products'
            )

        control['index'] = 0

        data = Data()
        data['start_time'] = (np.array(packets.get('NIX00404'),
                                       np.uint16)) * 0.1 * u.s
        data['rcr'] = np.array(packets.get('NIX00401')[0], np.ubyte)
        data['integration_time'] = (np.array(
            packets.get('NIX00405')[0], np.int16)) * 0.1 * u.s
        data['pixel_masks'] = _get_pixel_mask(packets, 'NIXD0407')
        data['detector_masks'] = _get_detector_mask(packets)
        data['triggers'] = np.array(
            [packets.get(f'NIX00{i}') for i in range(408, 424)], np.int64).T
        data['num_samples'] = np.array(packets.get('NIX00406'), np.int16)

        num_detectors = 32
        num_energies = 32
        num_pixels = 12

        # Data
        tmp = dict()
        tmp['pixel_id'] = np.array(packets.get('NIXD0158'), np.ubyte)
        tmp['detector_id'] = np.array(packets.get('NIXD0153'), np.ubyte)
        tmp['channel'] = np.array(packets.get('NIXD0154'), np.ubyte)
        tmp['continuation_bits'] = packets.get('NIXD0159', np.ubyte)

        control['energy_bin_mask'] = np.full((1, 32), False, np.ubyte)
        all_energies = set(tmp['channel'])
        control['energy_bin_mask'][:, list(all_energies)] = True

        # Find contiguous time indices
        unique_times = np.unique(data['start_time'])
        time_indices = np.searchsorted(unique_times, data['start_time'])

        # Create full count array 0s are not send down, if cb = 0 1 count, for cb 1 just extract
        # and for cb 2 extract and sum
        raw_counts = packets.get('NIX00065')
        counts_1d = []
        raw_count_index = 0
        for cb in tmp['continuation_bits']:
            if cb == 0:
                counts_1d.append(1)
            elif cb == 1:
                cur_count = raw_counts[raw_count_index]
                counts_1d.append(cur_count)
                raw_count_index += cb
            elif cb == 2:
                cur_count = raw_counts[raw_count_index:(raw_count_index + cb)]
                combined_count = int.from_bytes(
                    (cur_count[0] + 1).to_bytes(2, 'big') +
                    cur_count[1].to_bytes(1, 'big'), 'big')
                counts_1d.append(combined_count)
                raw_count_index += cb
            else:
                raise ValueError(
                    f'Continuation bits value of {cb} not allowed (0, 1, 2)')
        counts_1d = np.array(counts_1d, np.uint16)
        # raw_counts = counts_1d

        end_inds = np.cumsum(data['num_samples'])
        start_inds = np.hstack([0, end_inds[:-1]])
        dd = [(tmp['pixel_id'][s:e], tmp['detector_id'][s:e],
               tmp['channel'][s:e], counts_1d[s:e])
              for s, e in zip(start_inds.astype(int), end_inds)]

        counts = np.zeros(
            (len(unique_times), num_detectors, num_pixels, num_energies),
            np.uint32)
        for i, (pid, did, cid, cc) in enumerate(dd):
            counts[time_indices[i], did, pid, cid] = cc

        # Create final count array with 4 dimensions: unique times, 32 det, 32 energies, 12 pixels

        # for i in range(self.num_samples):
        #     tid = np.argwhere(self.raw_counts == unique_times)

        # start_index = 0
        # for i, time_index in enumerate(time_indices):
        #     end_index = np.uint32(start_index + np.sum(data['num_samples'][time_index]))
        #
        #     for did, cid, pid in zip(tmp['detector_id'], tmp['channel'], tmp['pixel_id']):
        #         index_1d = ((tmp['detector_id'] == did) & (tmp['channel'] == cid)
        #                     & (tmp['pixel_id'] == pid))
        #         cur_count = counts_1d[start_index:end_index][index_1d[start_index:end_index]]
        #         # If we have a count assign it other wise do nothing as 0
        #         if cur_count:
        #             counts[i, did, cid, pid] = cur_count[0]
        #
        #     start_index = end_index

        sub_index = np.searchsorted(data['start_time'], unique_times)
        data = data[sub_index]
        data['time'] = Time(scet_to_datetime(f'{int(control["time_stamp"][0])}:0'))\
            + data['start_time'] + data['integration_time']/2
        data['timedel'] = data['integration_time']
        data['counts'] = counts * u.ct
        data['control_index'] = control['index'][0]

        data.remove_columns(['start_time', 'integration_time', 'num_samples'])

        return cls(control=control, data=data)