Esempio n. 1
0
    def from_levelb(cls, levelb, parent=''):
        packets, idb_versions, control = QLProduct.from_levelb(
            levelb, parent=parent, NIX00405_offset=QLNIX00405_off)

        control.add_data('energy_bin_edge_mask',
                         _get_energy_bins(packets, 'NIX00266', 'NIXD0111'))
        control.add_basic(name='num_energies', nix='NIX00270', packets=packets)

        control['num_samples'] = np.array(packets.get_value(
            'NIX00277')).flatten()[np.cumsum(control['num_energies']) - 1]
        control['num_samples'].meta = {'NIXS': 'NIX00277'}

        time, duration, scet_timerange = control._get_time()
        # Map a given entry back to the control info through index
        control_indices = np.hstack([
            np.full(ns, cind) for ns, cind in control[['num_samples', 'index']]
        ])

        control.add_data('compression_scheme_counts_skm',
                         _get_compression_scheme(packets, 'NIX00278'))

        counts = np.array(packets.get_value('NIX00278')).reshape(
            control['num_energies'][0], control['num_samples'].sum())
        counts_var = np.array(packets.get_value('NIX00278', attr="error")).\
            reshape(control['num_energies'][0], control['num_samples'].sum())

        control.add_data('compression_scheme_triggers_skm',
                         _get_compression_scheme(packets, 'NIX00274'))

        triggers = packets.get_value('NIX00274').T
        triggers_var = packets.get_value('NIX00274', attr="error").T

        data = Data()
        data['control_index'] = control_indices
        data['time'] = time
        data['timedel'] = duration
        data.add_meta(name='timedel', nix='NIX00405', packets=packets)
        data['triggers'] = triggers.astype(get_min_uint(triggers))
        data.add_meta(name='triggers', nix='NIX00274', packets=packets)
        data['triggers_err'] = np.float32(np.sqrt(triggers_var))
        data['counts'] = (counts.T * u.ct).astype(get_min_uint(counts))
        data.add_meta(name='counts', nix='NIX00278', packets=packets)
        data['counts_err'] = np.float32(np.sqrt(counts_var).T * u.ct)

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions)
Esempio n. 2
0
    def from_levelb(cls, levelb, parent):
        packets, idb_versions = GenericProduct.getLeveL0Packets(levelb)

        control = Control()
        control['scet_coarse'] = packets.get('scet_coarse')
        control['scet_fine'] = packets.get('scet_fine')
        control['integration_time'] = 0
        control['index'] = np.arange(len(control)).astype(
            get_min_uint(len(control)))

        control['raw_file'] = levelb.control['raw_file']
        control['packet'] = levelb.control['packet']
        control['parent'] = parent

        # Create array of times as dt from date_obs
        times = SCETime(control['scet_coarse'], control['scet_fine'])

        # Data
        data = Data()
        data['time'] = times
        data['timedel'] = SCETimeDelta(0, 0)

        reshape_nixs = {'NIX00103', 'NIX00104'}
        reshape = False
        if reshape_nixs.intersection(packets.data[0].__dict__.keys()):
            reshape = True
        for nix, param in packets.data[0].__dict__.items():

            name = param.idb_info.get_product_attribute_name()
            data.add_basic(name=name,
                           nix=nix,
                           attr='value',
                           packets=packets,
                           reshape=reshape)

        data['control_index'] = np.arange(len(control)).astype(
            get_min_uint(len(control)))

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions,
                   packets=packets)
Esempio n. 3
0
    def from_levelb(cls, levelb, parent=''):
        packets, idb_versions, control = QLProduct.from_levelb(
            levelb, parent=parent, NIX00405_offset=QLNIX00405_off)

        # Control
        control['samples_per_variance'] = np.array(
            packets.get_value('NIX00279'), np.ubyte)
        control.add_meta(name='samples_per_variance',
                         nix='NIX00279',
                         packets=packets)
        control.add_data('pixel_mask', _get_pixel_mask(packets))
        control.add_data('detector_mask', _get_detector_mask(packets))

        control.add_data('compression_scheme_variance_skm',
                         _get_compression_scheme(packets, 'NIX00281'))

        energy_masks = np.array([[
            bool(int(x))
            for x in format(packets.get_value('NIX00282')[i], '032b')
        ] for i in range(len(packets.get_value('NIX00282')))])

        control['energy_bin_mask'] = energy_masks
        control.add_meta(name='energy_bin_mask',
                         nix='NIX00282',
                         packets=packets)
        control['num_energies'] = 1
        control.add_basic(name='num_samples', nix='NIX00280', packets=packets)

        time, duration, scet_timerange = control._get_time()
        # Map a given entry back to the control info through index
        control_indices = np.hstack([
            np.full(ns, cind) for ns, cind in control[['num_samples', 'index']]
        ])

        variance = packets.get_value('NIX00281').T
        variance_var = packets.get_value('NIX00281', attr='error').T

        # Data
        data = Data()
        data['time'] = time
        data['timedel'] = duration
        data.add_meta(name='timedel', nix='NIX00405', packets=packets)
        data['control_index'] = control_indices
        data['variance'] = variance.astype(get_min_uint(variance))
        data.add_meta(name='variance', nix='NIX00281', packets=packets)
        data['variance_err'] = np.float32(np.sqrt(variance_var))

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions)
Esempio n. 4
0
    def from_packets(cls, packets, NIX00405_offset=0):
        """
        Common generator method the create and prepare the control table.

        Parameters
        ----------
        packets : `PacketSequence`
            The set of packets of same data type for what the data product will be created.
        NIX00405_offset : int, optional
            NIX00405 (integration time) is used in two ways (X * 0.1s) or ((X+1) * 0.1s),
            by default 0

        Returns
        -------
        `Control`
            The Control object for the data product.
        """
        # Header
        control = cls()
        # self.energy_bin_mask = None
        # self.samples = None
        control.add_basic(name='scet_coarse',
                          nix='NIX00445',
                          packets=packets,
                          dtype=np.uint32)
        # Not all QL data have fine time in TM default to 0 if no present
        try:
            control.add_basic(name='scet_fine',
                              nix='NIX00446',
                              packets=packets)
        except AttributeError:
            control['scet_fine'] = np.zeros_like(control['scet_coarse'],
                                                 np.uint32)

        try:
            control['integration_time'] = (packets.get_value('NIX00405') +
                                           (NIX00405_offset * u.s))
            control.add_meta(name='integration_time',
                             nix='NIX00405',
                             packets=packets)
        except AttributeError:
            control['integration_time'] = np.zeros_like(
                control['scet_coarse'], np.float) * u.s

        # control = unique(control)
        control['index'] = np.arange(len(control)).astype(
            get_min_uint(len(control)))

        return control
Esempio n. 5
0
    def from_levelb(cls, levelb, *, parent=''):
        """Converts level binary science packets to a L1 product.

        Parameters
        ----------
        levelb : `stixcore.products.levelb.binary.LevelB`
            The binary level product.
        parent : `str`, optional
            The parent data file name the binary packed comes from, by default ''
        NIX00405_offset : int, optional
            [description], by default 0

        Returns
        -------
        tuple (packets, idb_versions, control)
            the converted packets
            all used IDB versions and time periods
            initialized control table
        """
        packets, idb_versions = GenericProduct.getLeveL0Packets(levelb)

        control = ControlSci.from_packets(packets)
        # control.remove_column('num_structures')

        control['index'] = np.ubyte(0)
        control['packet'] = levelb.control['packet'].reshape(1, -1)
        control['packet'].dtype = get_min_uint(control['packet'])
        control['raw_file'] = np.unique(levelb.control['raw_file']).reshape(
            1, -1)
        control['parent'] = parent

        if len(control) != 1:
            raise ValueError(
                'Creating a science product form packets from multiple products'
            )

        return packets, idb_versions, control
Esempio n. 6
0
    def from_levelb(cls, levelb, parent=''):
        packets, idb_versions = GenericProduct.getLeveL0Packets(levelb)

        control = Control()
        control['scet_coarse'] = packets.get('scet_coarse')
        control['scet_fine'] = packets.get('scet_fine')
        control['index'] = np.arange(len(control)).astype(get_min_uint(len(control)))

        # When the packets are parsed empty packets are dropped but in LB we don't parse so this
        # is not known need to compare control and levelb.control and only use matching rows
        if len(levelb.control) > len(control):
            matching_index = np.argwhere(
                np.in1d(levelb.control['scet_coarse'], np.array(packets.get('scet_coarse'))))
            control['raw_file'] = levelb.control['raw_file'][matching_index].reshape(-1)
            control['packet'] = levelb.control['packet'][matching_index].reshape(-1)
        else:
            control['raw_file'] = levelb.control['raw_file'].reshape(-1)
            control['packet'] = levelb.control['packet'].reshape(-1)

        control['parent'] = parent

        tmp = Data()
        tmp.add_basic(name='ubsd_counter', nix='NIX00285', packets=packets, dtype=np.uint32)
        tmp.add_basic(name='pald_counter', nix='NIX00286', packets=packets, dtype=np.uint32)
        tmp.add_basic(name='num_flares', nix='NIX00294', packets=packets, dtype=np.uint16)

        colnames = ['start_scet_coarse', 'end_scet_coarse', 'highest_flareflag', 'tm_byte_volume',
                    'average_z_loc', 'average_y_loc', 'processing_mask']

        flares = Data()
        if tmp['num_flares'].sum() > 0:
            flares.add_basic(name='start_scet_coarse', nix='NIX00287', packets=packets)
            flares.add_basic(name='end_scet_coarse', nix='NIX00288', packets=packets)
            flares.add_basic(name='highest_flareflag', nix='NIX00289', packets=packets,
                             dtype=np.byte)
            flares.add_basic(name='tm_byte_volume', nix='NIX00290', packets=packets, dtype=np.byte)
            flares.add_basic(name='average_z_loc', nix='NIX00291', packets=packets, dtype=np.byte)
            flares.add_basic(name='average_y_loc', nix='NIX00292', packets=packets, dtype=np.byte)
            flares.add_basic(name='processing_mask', nix='NIX00293', packets=packets, dtype=np.byte)

        tmp_data = defaultdict(list)
        start = 0
        for i, (ubsd, pald, n_flares) in enumerate(tmp):
            end = start + n_flares
            if n_flares == 0:
                tmp_data['control_index'].append(i)
                tmp_data['coarse'].append(control['scet_coarse'][i])
                tmp_data['fine'].append(control['scet_fine'][i])
                tmp_data['ubsd'].append(ubsd)
                tmp_data['pald'].append(pald)
                for name in colnames:
                    tmp_data[name].append(0)
            else:
                tmp_data['control_index'].append([i] * n_flares)
                tmp_data['coarse'].append([control['scet_coarse'][i]] * n_flares)
                tmp_data['fine'].append([control['scet_fine'][i]] * n_flares)
                ubsd['ubsd']([ubsd] * n_flares)
                pald['pald'].append([pald] * n_flares)
                for name in colnames:
                    tmp_data[name].extend(flares[name][start:end])

            start = end

        data = Data(tmp_data)
        data['time'] = SCETime(tmp_data['coarse'], tmp_data['fine'])
        data['timedel'] = SCETimeDelta(np.full(len(data), 0), np.full(len(data), 0))
        data.remove_columns(['coarse', 'fine'])

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions,
                   packets=packets)
Esempio n. 7
0
    def from_levelb(cls, levelb, parent=''):
        packets, idb_versions, control = QLProduct.from_levelb(levelb, parent=parent)

        # Control
        control.add_basic(name='integration_time', nix='NIX00122', packets=packets,
                          dtype=np.uint32, attr='value')
        control.add_basic(name='quiet_time', nix='NIX00123', packets=packets,
                          dtype=np.uint16, attr='value')
        control.add_basic(name='live_time', nix='NIX00124', packets=packets,
                          dtype=np.uint32, attr='value')
        control.add_basic(name='average_temperature', nix='NIX00125', packets=packets,
                          dtype=np.uint16, attr='value')
        control.add_data('detector_mask', _get_detector_mask(packets))
        control.add_data('pixel_mask', _get_pixel_mask(packets))

        control.add_data('subspectrum_mask', _get_sub_spectrum_mask(packets))
        control.add_data('compression_scheme_counts_skm',
                         _get_compression_scheme(packets, 'NIX00158'))

        subspec_data = {}
        j = 129
        for subspec, i in enumerate(range(300, 308)):
            subspec_data[subspec+1] = {'num_points': packets.get_value(f'NIXD0{j}'),
                                       'num_summed_channel': packets.get_value(f'NIXD0{j + 1}'),
                                       'lowest_channel': packets.get_value(f'NIXD0{j + 2}')}
            j += 3

        control.add_basic(name='num_samples', nix='NIX00159', packets=packets, dtype=np.uint16)

        control['subspec_num_points'] = (
                np.vstack([v['num_points'] for v in subspec_data.values()]).T + 1).astype(np.uint16)
        control['subspec_num_summed_channel'] = (np.vstack(
            [v['num_summed_channel'] for v in subspec_data.values()]).T + 1).astype(np.uint16)
        control['subspec_lowest_channel'] = (
            np.vstack([v['lowest_channel'] for v in subspec_data.values()]).T).astype(np.uint16)

        channels = []
        for i, subspectrum_mask in enumerate(control['subspectrum_mask']):
            subspec_index = np.argwhere(subspectrum_mask == 1)
            sub_channels = [np.arange(control['subspec_num_points'][i, index])
                            * (control['subspec_num_summed_channel'][i, index])
                            + control['subspec_lowest_channel'][i, index] for index in
                            subspec_index]
            channels.append(list(chain(*[ch.tolist() for ch in sub_channels])))
        control['num_channels'] = [len(c) for c in channels]

        duration = SCETimeDelta(packets.get_value('NIX00122').astype(np.uint32))
        time = SCETime(control['scet_coarse'], control['scet_fine']) + duration / 2

        dids = packets.get_value('NIXD0155')
        pids = packets.get_value('NIXD0156')
        ssids = packets.get_value('NIXD0157')
        num_spec_points = packets.get_value('NIX00146')

        unique_times, unique_time_indices = np.unique(time.as_float(), return_index=True)
        unique_times_lookup = {k: v for k, v in zip(unique_times, np.arange(unique_times.size))}

        # should really do the other way make a smaller lookup rather than repeating many many times
        tids = np.hstack([[unique_times_lookup[t.as_float()]] * n
                          for t, n in zip(time, control['num_samples'])])
        c_in = list(chain.from_iterable([repeat(c, n)
                                         for c, n in zip(channels, control['num_samples'])]))

        counts = packets.get_value('NIX00158')
        counts_var = packets.get_value('NIX00158', attr='error')

        c_out = np.arange(1025)
        start = 0
        count_map = defaultdict(list)
        counts_var_map = defaultdict(list)
        for tid, did, pid, ssid, nps, cin in zip(tids, dids, pids, ssids, num_spec_points, c_in):
            end = start + nps

            logger.debug('%d, %d, %d, %d, %d, %d', tid, did, pid, ssid, nps, end)
            count_map[tid, did, pid].append(counts[start:end])
            counts_var_map[tid, did, pid].append(counts_var[start:end])
            start = end

        full_counts = np.zeros((unique_times.size, 32, 12, 1024))
        full_counts_var = np.zeros((unique_times.size, 32, 12, 1024))

        for tid, did, pid in count_map.keys():
            cur_counts = count_map[tid, did, pid]
            cur_counts_var = counts_var_map[tid, did, pid]

            counts_rebinned = rebin_proportional(np.hstack(cur_counts), cin, c_out)
            counts_var_rebinned = rebin_proportional(np.hstack(cur_counts_var), cin, c_out)

            full_counts[tid, did, pid] = counts_rebinned
            full_counts_var[tid, did, pid] = counts_var_rebinned

        control = control[unique_time_indices]
        control['index'] = np.arange(len(control))

        # Data
        data = Data()
        data['time'] = time[unique_time_indices]
        data['timedel'] = duration[unique_time_indices]
        data.add_meta(name='timedel', nix='NIX00122', packets=packets)

        data['counts'] = (full_counts*u.ct).astype(get_min_uint(full_counts))
        data.add_meta(name='counts', nix='NIX00158', packets=packets)
        data['counts_err'] = (np.sqrt(full_counts_var)*u.ct).astype(np.float32)
        data['control_index'] = np.arange(len(control)).astype(np.uint16)

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions,
                   packets=packets)
Esempio n. 8
0
    def from_levelb(cls, levelb, parent=''):
        packets, idb_versions, control = QLProduct.from_levelb(levelb, parent=parent,
                                                               NIX00405_offset=QLNIX00405_off)

        control.add_data('pixel_mask', _get_pixel_mask(packets))
        control.add_data('compression_scheme_spectra_skm',
                         _get_compression_scheme(packets, 'NIX00452'))
        control.add_data('compression_scheme_triggers_skm',
                         _get_compression_scheme(packets, 'NIX00484'))

        # Fixed for spectra
        num_energies = np.unique(packets.get_value('NIX00100')).size
        control['num_energies'] = num_energies
        control.add_meta(name='num_energies', nix='NIX00100', packets=packets)
        control.add_basic(name='num_samples', nix='NIX00089', packets=packets)

        # TODO Handel NIX00089 value of zero ie valid packet with no data

        # Due to the way packets are split up full contiguous block of detector 1-32 are not always
        # down-linked to the ground so need to pad the array to write to table and later fits
        total_samples = control['num_samples'].sum()
        full, partial = divmod(total_samples, 32)
        pad_after = 0
        if partial != 0:
            pad_after = 32 - partial

        control_indices = np.pad(np.hstack([np.full(ns, cind) for ns, cind in
                                            control[['num_samples', 'index']]]), (0, pad_after),
                                 constant_values=-1)
        control_indices = control_indices.reshape(-1, 32)

        duration, time, scet_timerange = cls._get_time(control, num_energies, packets, pad_after)

        # sample x detector x energy
        # counts = np.array([eng_packets.get('NIX00{}'.format(i)) for i in range(452, 484)],
        #                   np.uint32).T * u.ct

        counts = []
        counts_var = []
        for i in range(452, 484):
            counts.append(packets.get_value('NIX00{}'.format(i)))
            counts_var.append(packets.get_value('NIX00{}'.format(i), attr='error'))
        counts = np.vstack(counts).T
        counts_var = np.vstack(counts_var).T
        counts = np.pad(counts, ((0, pad_after), (0, 0)), constant_values=0)
        counts_var = np.pad(counts_var, ((0, pad_after), (0, 0)), constant_values=0)
        triggers = packets.get_value('NIX00484').T.reshape(-1)
        triggers_var = packets.get_value('NIX00484', attr='error').T.reshape(-1)
        triggers = np.pad(triggers, (0, pad_after), constant_values=0)
        triggers_var = np.pad(triggers_var, (0, pad_after), constant_values=0)

        detector_index = np.pad(np.array(packets.get_value('NIX00100'), np.int16), (0, pad_after),
                                constant_values=-1)
        num_integrations = np.pad(np.array(packets.get_value('NIX00485'), np.uint16),
                                  (0, pad_after), constant_values=0)

        # Data
        data = Data()
        data['control_index'] = control_indices[:, 0]
        data['time'] = time[:, 0]
        data['timedel'] = duration[:, 0]
        data.add_meta(name='timedel', nix='NIX00405', packets=packets)
        data['detector_index'] = detector_index.reshape(-1, 32).astype(np.ubyte)
        data.add_meta(name='detector_index', nix='NIX00100', packets=packets)
        data['spectra'] = (counts.reshape(-1, 32, num_energies) * u.ct).astype(get_min_uint(counts))

        # data['spectra'].meta = {'NIXS': [f'NIX00{i}' for i in range(452, 484)],
        #                        'PCF_CURTX': [packets.get(f'NIX00{i}')[0].idb_info.PCF_CURTX
        #                                      for i in range(452, 484)]}
        data['spectra'].meta = {'NIXS': 'NIX00452',
                                'PCF_CURTX': packets.get('NIX00452')[0].idb_info.PCF_CURTX}
        data['spectra_err'] = np.float32(np.sqrt(counts_var.reshape(-1, 32, num_energies)))
        data['triggers'] = triggers.reshape(-1, num_energies).astype(get_min_uint(triggers))
        data.add_meta(name='triggers', nix='NIX00484', packets=packets)
        data['triggers_err'] = np.float32(np.sqrt(triggers_var.reshape(-1, num_energies)))
        data['num_integrations'] = num_integrations.reshape(-1, num_energies).astype(np.ubyte)
        data.add_meta(name='num_integrations', nix='NIX00485', packets=packets)

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions,
                   packets=packets)
Esempio n. 9
0
    def from_levelb(cls, levelb, parent=''):
        packets, idb_versions, control = QLProduct.from_levelb(levelb, parent=parent,
                                                               NIX00405_offset=QLNIX00405_off)

        control.add_data('detector_mask', _get_detector_mask(packets))
        control.add_data('pixel_mask', _get_pixel_mask(packets))
        control.add_data('energy_bin_edge_mask', _get_energy_bins(packets, 'NIX00266', 'NIXD0107'))
        control.add_basic(name='num_energies', nix='NIX00270', packets=packets)

        control['num_samples'] = np.array(packets.get_value('NIX00271')).flatten()[
            np.cumsum(control['num_energies']) - 1]
        control.add_meta(name='num_samples', nix='NIX00270', packets=packets)

        time, duration, scet_timerange = control._get_time()
        # Map a given entry back to the control info through index
        control_indices = np.hstack([np.full(ns, cind) for ns, cind in
                                     control[['num_samples', 'index']]])

        control.add_data('compression_scheme_counts_skm',
                         _get_compression_scheme(packets, 'NIX00272'))

        counts_flat = packets.get_value('NIX00272')
        counts_var_flat = packets.get_value('NIX00272', attr='error')

        flat_indices = np.hstack((0, np.cumsum([*control['num_samples']]) *
                                  control['num_energies'])).astype(int)

        counts = np.hstack([
            counts_flat[flat_indices[i]:flat_indices[i + 1]].reshape(n_eng, n_sam)
            for i, (n_sam, n_eng) in enumerate(control[['num_samples', 'num_energies']])])

        counts_var = np.hstack([
            counts_var_flat[flat_indices[i]:flat_indices[i + 1]].reshape(n_eng, n_sam)
            for i, (n_sam, n_eng) in enumerate(control[['num_samples', 'num_energies']])])

        control.add_data('compression_scheme_triggers_skm',
                         _get_compression_scheme(packets, 'NIX00274'))

        triggers = packets.get_value('NIX00274').T
        triggers_var = packets.get_value('NIX00274', attr="error").T

        data = Data()
        data['control_index'] = control_indices
        data['time'] = time
        data['timedel'] = duration
        data.add_meta(name='timedel', nix='NIX00405', packets=packets)
        data['triggers'] = triggers.astype(get_min_uint(triggers))
        data.add_meta(name='triggers', nix='NIX00274', packets=packets)
        data['triggers_err'] = np.float32(np.sqrt(triggers_var))
        data['rcr'] = np.hstack(packets.get_value('NIX00276')).flatten().astype(np.ubyte)
        data.add_meta(name='rcr', nix='NIX00276', packets=packets)
        data['counts'] = (counts.T*u.ct).astype(get_min_uint(counts))
        data.add_meta(name='counts', nix='NIX00272', packets=packets)
        data['counts_err'] = np.float32(np.sqrt(counts_var).T * u.ct)

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions,
                   packets=packets)
Esempio n. 10
0
    def from_levelb(cls, levelb, parent=''):
        packets, idb_versions = GenericProduct.getLeveL0Packets(levelb)
        if len(packets.data) == 0:
            logger.warning('No data all packets empty %s', levelb)
            return None
        control = ControlSci()

        scet_coarse = packets.get_value('NIX00445')
        scet_fine = packets.get_value('NIX00446')

        control.add_basic(name='summing_value',
                          nix='NIX00088',
                          packets=packets,
                          dtype=np.uint8)
        control.add_basic(name='samples',
                          nix='NIX00089',
                          packets=packets,
                          dtype=np.uint16)
        try:
            control.add_basic(name='averaging_value',
                              nix='NIX00490',
                              packets=packets,
                              dtype=np.uint16)
        except AttributeError:
            control['averaging_value'] = np.uint16(1)

        try:
            control.add_basic(name='request_id',
                              nix='NIX00037',
                              packets=packets,
                              dtype=np.uint32)
        except AttributeError:
            control['request_id'] = np.uint32(0)

        control['raw_file'] = np.unique(levelb.control['raw_file']).reshape(
            1, -1)
        control['packet'] = levelb.control['packet'].reshape(1, -1)
        control['parent'] = parent

        control['index'] = np.arange(len(control)).astype(
            get_min_uint(len(control)))

        delta_time = ((control['summing_value'] * control['averaging_value']) /
                      1000.0) * u.s
        samples = packets.get_value('NIX00089')

        offsets = SCETimeDelta(
            np.concatenate([
                delta_time[i] * np.arange(0, ns)
                for i, ns in enumerate(samples)
            ]))
        timedel = SCETimeDelta(
            np.concatenate(
                [delta_time[i] * np.ones(ns) for i, ns in enumerate(samples)]))
        ctimes = np.concatenate(
            [np.full(ns, scet_coarse[i]) for i, ns in enumerate(samples)])
        ftimes = np.concatenate(
            [np.full(ns, scet_fine[i]) for i, ns in enumerate(samples)])
        starts = SCETime(ctimes, ftimes)
        time = starts + offsets

        # Data
        try:
            data = Data()
            data['time'] = time
            data['timedel'] = timedel
            data.add_basic(name='cha_diode0',
                           nix='NIX00090',
                           packets=packets,
                           dtype=np.uint16)
            data.add_basic(name='cha_diode1',
                           nix='NIX00091',
                           packets=packets,
                           dtype=np.uint16)
            data.add_basic(name='chb_diode0',
                           nix='NIX00092',
                           packets=packets,
                           dtype=np.uint16)
            data.add_basic(name='chb_diode1',
                           nix='NIX00093',
                           packets=packets,
                           dtype=np.uint16)
            data['control_index'] = np.hstack(
                [np.full(ns, i) for i, ns in enumerate(samples)])
        except ValueError as e:
            logger.warning(e)
            raise e

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions)
Esempio n. 11
0
    def from_levelb(cls, levelb, parent=''):
        packets, idb_versions, control = ScienceProduct.from_levelb(
            levelb, parent=parent)

        control.add_data('compression_scheme_counts_skm',
                         _get_compression_scheme(packets, 'NIX00268'))

        control.add_data('compression_scheme_triggers_skm',
                         _get_compression_scheme(packets, 'NIX00267'))

        control['pixel_masks'] = np.unique(_get_pixel_mask(packets)[0], axis=0)
        control.add_meta(name='pixel_masks', nix='NIXD0407', packets=packets)
        control['detector_masks'] = np.unique(_get_detector_mask(packets)[0],
                                              axis=0)
        control['dectecor_masks'] = fix_detector_mask(
            control, control['detector_masks'])
        control.add_meta(name='detector_masks',
                         nix='NIX00407',
                         packets=packets)
        raw_rcr = packets.get_value('NIX00401', attr='value')

        e_min = np.array(packets.get_value('NIXD0442'))
        e_max = np.array(packets.get_value('NIXD0443'))
        energy_unit = np.array(packets.get_value('NIXD0019')) + 1
        num_times = np.array(packets.get_value('NIX00089'))
        total_num_times = num_times.sum()

        rcr = np.hstack([
            np.full(nt, rcr) for rcr, nt in zip(raw_rcr, num_times)
        ]).astype(np.ubyte)

        counts = np.array(packets.get_value('NIX00268'))
        counts_var = np.array(packets.get_value('NIX00268', attr='error'))

        counts = counts.reshape(total_num_times, -1)
        counts_var = counts_var.reshape(total_num_times, -1)

        full_counts = np.zeros((total_num_times, 32))
        full_counts_var = np.zeros((total_num_times, 32))

        cids = [
            np.arange(emin, emax + 1, eunit)
            for (emin, emax, eunit) in zip(e_min, e_max, energy_unit)
        ]

        control['energy_bin_mask'] = np.full((1, 32), False, np.ubyte)
        control['energy_bin_mask'][:, cids] = True

        dl_energies = np.array([[ENERGY_CHANNELS[ch].e_lower for ch in chs] +
                                [ENERGY_CHANNELS[chs[-1]].e_upper]
                                for chs in cids][0])

        sci_energies = np.hstack(
            [[ENERGY_CHANNELS[ch].e_lower for ch in range(32)],
             ENERGY_CHANNELS[31].e_upper])
        ind = 0
        for nt in num_times:
            e_ch_start = 0
            e_ch_end = counts.shape[1]
            if dl_energies[0] == 0:
                full_counts[ind:ind + nt, 0] = counts[ind:ind + nt, 0]
                full_counts_var[ind:ind + nt, 0] = counts_var[ind:ind + nt, 0]
                e_ch_start = 1
            if dl_energies[-1] == np.inf:
                full_counts[ind:ind + nt, -1] = counts[ind:ind + nt, -1]
                full_counts_var[ind:ind + nt, -1] = counts[ind:ind + nt, -1]
                e_ch_end -= 1

            torebin = np.where((dl_energies >= 4.0) & (dl_energies <= 150.0))
            full_counts[ind:ind + nt, 1:-1] = np.apply_along_axis(
                rebin_proportional, 1, counts[ind:ind + nt,
                                              e_ch_start:e_ch_end],
                dl_energies[torebin], sci_energies[1:-1])

            full_counts_var[ind:ind + nt, 1:-1] = np.apply_along_axis(
                rebin_proportional, 1, counts_var[ind:ind + nt,
                                                  e_ch_start:e_ch_end],
                dl_energies[torebin], sci_energies[1:-1])

            ind += nt

        if counts.sum() != full_counts.sum():
            raise ValueError(
                'Original and reformatted count totals do not match')

        try:
            delta_time = packets.get_value('NIX00441')
        except AttributeError:
            delta_time = packets.get_value('NIX00404')

        closing_time_offset = packets.get_value('NIX00269')

        # TODO incorporate into main loop above
        centers = []
        deltas = []
        last = 0
        for i, nt in enumerate(num_times):
            edge = np.hstack([
                delta_time[last:last + nt],
                delta_time[last + nt - 1] + closing_time_offset[i]
            ])
            delta = np.diff(edge)
            center = edge[:-1] + delta / 2
            centers.append(center)
            deltas.append(delta)
            last = last + nt

        centers = np.hstack(centers)
        deltas = np.hstack(deltas)
        deltas = SCETimeDelta(deltas)

        # Data
        data = Data()
        data['time'] = control['time_stamp'][0] + centers
        data['timedel'] = deltas
        data['timedel'].meta = {'NIXS': ['NIX00441', 'NIX00269']}
        data.add_basic(name='triggers', nix='NIX00267', packets=packets)
        data['triggers'] = data['triggers'].astype(
            get_min_uint(data['triggers']))
        data['rcr'] = rcr
        data.add_meta(name='rcr', nix='NIX00401', packets=packets)
        data.add_basic(name='triggers_err',
                       nix='NIX00267',
                       attr='error',
                       packets=packets)
        data['triggers_err'] = np.float32(data['triggers_err'])
        data['counts'] = (full_counts * u.ct).astype(
            get_min_uint(full_counts))[..., :e_max.max() + 1]
        data.add_meta(name='counts', nix='NIX00268', packets=packets)
        data['counts_err'] = np.float32(np.sqrt(full_counts_var) *
                                        u.ct)[..., :e_max.max() + 1]
        data['control_index'] = np.ubyte(0)

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions)
Esempio n. 12
0
    def from_levelb(cls, levelb, parent=''):
        packets, idb_versions, control = ScienceProduct.from_levelb(
            levelb, parent=parent)

        control.add_data('compression_scheme_counts_skm',
                         _get_compression_scheme(packets, 'NIX00260'))

        control.add_data('compression_scheme_triggers_skm',
                         _get_compression_scheme(packets, 'NIX00242'))

        data = Data()
        try:
            data['delta_time'] = np.uint32(
                packets.get_value('NIX00441').to(u.ds))
            data.add_meta(name='delta_time', nix='NIX00441', packets=packets)
        except AttributeError:
            data['delta_time'] = np.uint32(
                packets.get_value('NIX00404').to(u.ds))
            data.add_meta(name='delta_time', nix='NIX00404', packets=packets)
        unique_times = np.unique(data['delta_time'])

        data.add_basic(name='rcr',
                       nix='NIX00401',
                       attr='value',
                       packets=packets,
                       dtype=np.ubyte)
        data['num_pixel_sets'] = np.atleast_1d(
            _get_unique(packets, 'NIX00442', np.ubyte))
        data.add_meta(name='num_pixel_sets', nix='NIX00442', packets=packets)
        pixel_masks, pm_meta = _get_pixel_mask(packets, 'NIXD0407')
        pixel_masks = pixel_masks.reshape(-1, data['num_pixel_sets'][0], 12)
        if packets.ssid == 21 and data['num_pixel_sets'][0] != 12:
            pixel_masks = np.pad(pixel_masks,
                                 ((0, 0), (0, 12 - data['num_pixel_sets'][0]),
                                  (0, 0)))
        data.add_data('pixel_masks', (pixel_masks, pm_meta))
        data.add_data('detector_masks', _get_detector_mask(packets))
        # NIX00405 in BSD is 1 indexed
        data['integration_time'] = SCETimeDelta(packets.get_value('NIX00405'))
        data.add_meta(name='integration_time', nix='NIX00405', packets=packets)

        triggers = np.array(
            [packets.get_value(f'NIX00{i}') for i in range(242, 258)])
        triggers_var = np.array([
            packets.get_value(f'NIX00{i}', attr='error')
            for i in range(242, 258)
        ])

        data['triggers'] = triggers.T.astype(get_min_uint(triggers))
        data['triggers'].meta = {
            'NIXS': [f'NIX00{i}' for i in range(242, 258)]
        }
        data['triggers_err'] = np.float32(np.sqrt(triggers_var).T)
        data.add_basic(name='num_energy_groups',
                       nix='NIX00258',
                       packets=packets,
                       dtype=np.ubyte)

        tmp = dict()
        tmp['e_low'] = np.array(packets.get_value('NIXD0016'), np.ubyte)
        tmp['e_high'] = np.array(packets.get_value('NIXD0017'), np.ubyte)
        tmp['num_data_elements'] = np.array(packets.get_value('NIX00259'))
        unique_energies_low = np.unique(tmp['e_low'])
        unique_energies_high = np.unique(tmp['e_high'])

        counts = np.array(packets.get_value('NIX00260'))
        counts_var = np.array(packets.get_value('NIX00260', attr='error'))

        counts = counts.reshape(unique_times.size, unique_energies_low.size,
                                data['detector_masks'][0].sum(),
                                data['num_pixel_sets'][0].sum())

        counts_var = counts_var.reshape(unique_times.size,
                                        unique_energies_low.size,
                                        data['detector_masks'][0].sum(),
                                        data['num_pixel_sets'][0].sum())
        # t x e x d x p -> t x d x p x e
        counts = counts.transpose((0, 2, 3, 1))

        out_counts = None
        out_var = None

        counts_var = np.sqrt(counts_var.transpose((0, 2, 3, 1)))
        if packets.ssid == 21:
            out_counts = np.zeros((unique_times.size, 32, 12, 32))
            out_var = np.zeros((unique_times.size, 32, 12, 32))
        elif packets.ssid == 22:
            out_counts = np.zeros((unique_times.size, 32, 12, 32))
            out_var = np.zeros((unique_times.size, 32, 12, 32))

        dl_energies = np.array([
            [ENERGY_CHANNELS[lch].e_lower, ENERGY_CHANNELS[hch].e_upper]
            for lch, hch in zip(unique_energies_low, unique_energies_high)
        ]).reshape(-1)
        dl_energies = np.unique(dl_energies)
        sci_energies = np.hstack(
            [[ENERGY_CHANNELS[ch].e_lower for ch in range(32)],
             ENERGY_CHANNELS[31].e_upper])

        # If there is any onboard summing of energy channels rebin back to standard sci channels
        if (unique_energies_high - unique_energies_low).sum() > 0:
            rebinned_counts = np.zeros((*counts.shape[:-1], 32))
            rebinned_counts_var = np.zeros((*counts_var.shape[:-1], 32))
            e_ch_start = 0
            e_ch_end = counts.shape[-1]
            if dl_energies[0] == 0.0:
                rebinned_counts[..., 0] = counts[..., 0]
                rebinned_counts_var[..., 0] = counts_var[..., 0]
                e_ch_start += 1
            elif dl_energies[-1] == np.inf:
                rebinned_counts[..., -1] = counts[..., -1]
                rebinned_counts_var[..., -1] = counts_var[..., -1]
                e_ch_end -= 1

            torebin = np.where((dl_energies >= 4.0) & (dl_energies <= 150.0))
            rebinned_counts[..., 1:-1] = np.apply_along_axis(
                rebin_proportional, -1,
                counts[...,
                       e_ch_start:e_ch_end].reshape(-1, e_ch_end - e_ch_start),
                dl_energies[torebin], sci_energies[1:-1]).reshape(
                    (*counts.shape[:-1], 30))

            rebinned_counts_var[..., 1:-1] = np.apply_along_axis(
                rebin_proportional, -1,
                counts_var[..., e_ch_start:e_ch_end].reshape(
                    -1, e_ch_end - e_ch_start), dl_energies[torebin],
                sci_energies[1:-1]).reshape((*counts_var.shape[:-1], 30))

            energy_indices = np.full(32, True)
            energy_indices[[0, -1]] = False

            ix = np.ix_(np.full(unique_times.size, True),
                        data['detector_masks'][0].astype(bool),
                        np.ones(data['num_pixel_sets'][0], dtype=bool),
                        np.full(32, True))

            out_counts[ix] = rebinned_counts
            out_var[ix] = rebinned_counts_var
        else:
            energy_indices = np.full(32, False)
            energy_indices[unique_energies_low.min(
            ):unique_energies_high.max() + 1] = True

            ix = np.ix_(np.full(unique_times.size,
                                True), data['detector_masks'][0].astype(bool),
                        np.ones(data['num_pixel_sets'][0], dtype=bool),
                        energy_indices)

            out_counts[ix] = counts
            out_var[ix] = counts_var

        if counts.sum() != out_counts.sum():
            raise ValueError(
                'Original and reformatted count totals do not match')

        control['energy_bin_mask'] = np.full((1, 32), False, np.ubyte)
        all_energies = set(np.hstack([tmp['e_low'], tmp['e_high']]))
        control['energy_bin_mask'][:, list(all_energies)] = True

        # only fix here as data is send so needed for extraction but will be all zeros
        data['detector_masks'] = fix_detector_mask(control,
                                                   data['detector_masks'])

        sub_index = np.searchsorted(data['delta_time'], unique_times)
        data = data[sub_index]

        data['time'] = (control['time_stamp'][0] + data['delta_time'] +
                        data['integration_time'] / 2)
        data['timedel'] = data['integration_time']
        data['counts'] = \
            (out_counts * u.ct).astype(get_min_uint(out_counts))[..., :tmp['e_high'].max()+1]
        data.add_meta(name='counts', nix='NIX00260', packets=packets)
        data['counts_err'] = np.float32(out_var *
                                        u.ct)[..., :tmp['e_high'].max() + 1]
        data['control_index'] = control['index'][0]

        data = data['time', 'timedel', 'rcr', 'pixel_masks', 'detector_masks',
                    'num_pixel_sets', 'num_energy_groups', 'triggers',
                    'triggers_err', 'counts', 'counts_err']
        data['control_index'] = np.ubyte(0)

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions)
Esempio n. 13
0
    def from_levelb(cls, levelb, parent=''):
        packets, idb_versions, control = ScienceProduct.from_levelb(
            levelb, parent=parent)

        data = Data()
        data['start_time'] = packets.get_value('NIX00404').astype(np.uint32)
        data.add_meta(name='start_time', nix='NIX00404', packets=packets)
        data.add_basic(name='rcr',
                       nix='NIX00401',
                       attr='value',
                       packets=packets,
                       dtype=np.ubyte)
        # NIX00405 in BSD is 1 indexed
        data['integration_time'] = packets.get_value('NIX00405').astype(
            np.uint16)
        data.add_meta(name='integration_time', nix='NIX00405', packets=packets)
        data.add_data('pixel_masks', _get_pixel_mask(packets, 'NIXD0407'))
        data.add_data('detector_masks', _get_detector_mask(packets))
        data['triggers'] = np.array(
            [packets.get_value(f'NIX00{i}') for i in range(408, 424)]).T
        data['triggers'].dtype = get_min_uint(data['triggers'])
        data['triggers'].meta = {
            'NIXS': [f'NIX00{i}' for i in range(408, 424)]
        }
        data.add_basic(name='num_samples',
                       nix='NIX00406',
                       packets=packets,
                       dtype=np.uint16)

        num_detectors = 32
        num_energies = 32
        num_pixels = 12

        # Data
        tmp = dict()
        tmp['pixel_id'] = np.array(packets.get_value('NIXD0158'), np.ubyte)
        tmp['detector_id'] = np.array(packets.get_value('NIXD0153'), np.ubyte)
        tmp['channel'] = np.array(packets.get_value('NIXD0154'), np.ubyte)
        tmp['continuation_bits'] = np.array(packets.get_value('NIXD0159'),
                                            np.ubyte)

        control['energy_bin_mask'] = np.full((1, 32), False, np.ubyte)
        all_energies = set(tmp['channel'])
        control['energy_bin_mask'][:, list(all_energies)] = True

        # Find contiguous time indices
        unique_times = np.unique(data['start_time'])
        time_indices = np.searchsorted(unique_times, data['start_time'])

        counts_1d = packets.get_value('NIX00065')

        end_inds = np.cumsum(data['num_samples'])
        start_inds = np.hstack([0, end_inds[:-1]])
        dd = [(tmp['pixel_id'][s:e], tmp['detector_id'][s:e],
               tmp['channel'][s:e], counts_1d[s:e])
              for s, e in zip(start_inds.astype(int), end_inds)]

        counts = np.zeros(
            (len(unique_times), num_detectors, num_pixels, num_energies),
            np.uint32)
        for i, (pid, did, cid, cc) in enumerate(dd):
            counts[time_indices[i], did, pid, cid] = cc

        data['detector_masks'] = fix_detector_mask(control,
                                                   data['detector_masks'])

        sub_index = np.searchsorted(data['start_time'], unique_times)
        data = data[sub_index]
        data['time'] = control["time_stamp"][0] \
            + data['start_time'] + data['integration_time'] / 2
        data['timedel'] = SCETimeDelta(data['integration_time'])
        data['counts'] = (counts * u.ct).astype(get_min_uint(counts))
        # data.add_meta(name='counts', nix='NIX00065', packets=packets)
        data['control_index'] = control['index'][0]

        data.remove_columns(['start_time', 'integration_time', 'num_samples'])

        return cls(service_type=packets.service_type,
                   service_subtype=packets.service_subtype,
                   ssid=packets.ssid,
                   control=control,
                   data=data,
                   idb_versions=idb_versions)