def from_packets(cls, packets, eng_packets): # Header control = Control.from_packets(packets) # Control control['samples_per_variance'] = np.array(packets.get('NIX00279'), np.ubyte) control['pixel_mask'] = _get_pixel_mask(packets) control['detector_mask'] = _get_detector_mask(packets) control['compression_scheme_variance_skm'] = _get_compression_scheme( packets, 'NIXD0118', 'NIXD0119', 'NIXD0120') energy_masks = np.array([[ bool(int(x)) for x in format(packets.get('NIX00282')[i], '032b') ] for i in range(len(packets.get('NIX00282')))]) control['energy_bin_mask'] = energy_masks control['num_energies'] = 1 control['num_samples'] = packets.get('NIX00280') time, duration = control._get_time() # Map a given entry back to the control info through index control_indices = np.hstack([ np.full(ns, cind) for ns, cind in control[['num_samples', 'index']] ]) vs, vk, vm = control['compression_scheme_variance_skm'][0] variance, variance_var = decompress(packets.get('NIX00281'), s=vs, k=vk, m=vm, return_variance=True) # Data data = Data() data['time'] = time data['timedel'] = duration data['control_index'] = control_indices data['variance'] = variance data['variance_err'] = np.sqrt(variance_var) return cls(control=control, data=data)
def from_packets(cls, packets, eng_packets): control = Control.from_packets(packets) control['integration_time'] = ( np.array(packets['NIX00122'], np.uint32) + 1) * 0.1 * u.s # control['obs_beg'] = control['obs_utc'] # control['.obs_end'] = control['obs_beg'] + timedelta(seconds=control['duration'].astype('float')) # control['.obs_avg'] = control['obs_beg'] + (control['obs_end'] - control['obs_beg']) / 2 # Control control['quiet_time'] = np.array(packets['NIX00123'], np.uint16) control['live_time'] = np.array(packets['NIX00124'], np.uint32) control['average_temperature'] = np.array(packets['NIX00125'], np.uint16) control['detector_mask'] = _get_detector_mask(packets) control['pixel_mask'] = _get_pixel_mask(packets) control['subspectrum_mask'] = _get_sub_spectrum_mask(packets) control['compression_scheme_counts_skm'] = _get_compression_scheme( packets, 'NIXD0126', 'NIXD0127', 'NIXD0128') subspec_data = {} j = 129 for subspec, i in enumerate(range(300, 308)): subspec_data[subspec + 1] = { 'num_points': packets.get(f'NIXD0{j}')[0], 'num_summed_channel': packets.get(f'NIXD0{j + 1}')[0], 'lowest_channel': packets.get(f'NIXD0{j + 2}')[0] } j += 3 control['num_samples'] = np.array(packets.get('NIX00159'), np.uint16) # control.remove_column('index') # control = unique(control) # control['index'] = np.arange(len(control)) control['subspec_num_points'] = np.array( [v['num_points'] for v in subspec_data.values()]).reshape(1, -1) control['subspec_num_summed_channel'] = np.array([ v['num_summed_channel'] for v in subspec_data.values() ]).reshape(1, -1) control['subspec_lowest_channel'] = np.array([ v['lowest_channel'] for v in subspec_data.values() ]).reshape(1, -1) subspec_index = np.argwhere( control['subspectrum_mask'][0].flatten() == 1) num_sub_spectra = control['subspectrum_mask'].sum(axis=1) sub_channels = [ np.arange(control['subspec_num_points'][0, index] + 1) * (control['subspec_num_summed_channel'][0, index] + 1) + control['subspec_lowest_channel'][0, index] for index in subspec_index ] channels = list(chain(*[ch.tolist() for ch in sub_channels])) control['num_channels'] = len(channels) # Data data = Data() data['control_index'] = [0] data['time'] = (Time( scet_to_datetime(f"{control['scet_coarse'][0]}" f":{control['scet_fine'][0]}")) + control['integration_time'][0] / 2).reshape(1) data['timedel'] = control['integration_time'][0] # data['detector_id'] = np.array(packets.get('NIXD0155'), np.ubyte) # data['pixel_id'] = np.array(packets.get('NIXD0156'), np.ubyte) # data['subspec_id'] = np.array(packets.get('NIXD0157'), np.ubyte) num_spec_points = np.array(packets.get('NIX00146')) cs, ck, cm = control['compression_scheme_counts_skm'][0] counts, counts_var = decompress(packets.get('NIX00158'), s=cs, k=ck, m=cm, return_variance=True) counts_rebinned = np.apply_along_axis( rebin_proportional, 1, counts.reshape(-1, len(channels)), channels, np.arange(1025)) counts_var_rebinned = np.apply_along_axis( rebin_proportional, 1, counts_var.reshape(-1, len(channels)), channels, np.arange(1025)) dids = np.array(packets.get('NIXD0155'), np.ubyte).reshape(-1, num_sub_spectra[0])[:, 0] pids = np.array(packets.get('NIXD0156'), np.ubyte).reshape(-1, num_sub_spectra[0])[:, 0] full_counts = np.zeros((32, 12, 1024)) full_counts[dids, pids] = counts_rebinned full_counts_var = np.zeros((32, 12, 1024)) full_counts_var[dids, pids] = counts_var_rebinned data['counts'] = full_counts.reshape((1, *full_counts.shape)) data['counts_err'] = np.sqrt(full_counts_var).reshape( (1, *full_counts_var.shape)) return cls(control=control, data=data)
def from_packets(cls, packets, eng_packets): # Header control = Control.from_packets(packets) # Control control['pixel_mask'] = _get_pixel_mask(packets) control['compression_scheme_spectra_skm'] = _get_compression_scheme( packets, 'NIXD0115', 'NIXD0116', 'NIXD0117') control['compression_scheme_triggers_skm'] = _get_compression_scheme( packets, 'NIXD0112', 'NIXD0113', 'NIXD0114') # Fixed for spectra num_energies = 32 control['num_energies'] = num_energies control['num_samples'] = np.array(packets['NIX00089']) # Due to the way packets are split up full contiguous block of detector 1-32 are not always # down-linked to the ground so need to pad the array to write to table and later fits total_samples = control['num_samples'].sum() full, partial = divmod(total_samples, 32) pad_after = 0 if partial != 0: pad_after = 32 - partial control_indices = np.pad(np.hstack([ np.full(ns, cind) for ns, cind in control[['num_samples', 'index']] ]), (0, pad_after), constant_values=-1) control_indices = control_indices.reshape(-1, 32) duration, time = cls._get_time(control, num_energies, packets, pad_after) # sample x detector x energy # counts = np.array([eng_packets.get('NIX00{}'.format(i)) for i in range(452, 484)], # np.uint32).T * u.ct ss, sk, sm = control['compression_scheme_spectra_skm'][0] counts, counts_var = zip(*[ decompress(packets.get('NIX00{}'.format(i)), s=ss, k=sk, m=sm, return_variance=True) for i in range(452, 484) ]) counts = np.vstack(counts).T counts_var = np.vstack(counts_var).T counts = np.pad(counts, ((0, pad_after), (0, 0)), constant_values=0) counts_var = np.pad(counts_var, ((0, pad_after), (0, 0)), constant_values=0) ts, tk, tm = control['compression_scheme_triggers_skm'][0] triggers, triggers_var = decompress(packets.get('NIX00484'), s=ts, k=tk, m=tm, return_variance=True) triggers = np.pad(triggers, (0, pad_after), constant_values=0) triggers_var = np.pad(triggers_var, (0, pad_after), constant_values=0) detector_index = np.pad(np.array(packets.get('NIX00100'), np.int16), (0, pad_after), constant_values=-1) num_integrations = np.pad(np.array(packets.get('NIX00485'), np.uint16), (0, pad_after), constant_values=0) # Data data = Data() data['control_index'] = control_indices[:, 0] data['time'] = time[:, 0] data['timedel'] = duration[:, 0] data['detector_index'] = detector_index.reshape(-1, 32) * u.ct data['spectra'] = counts.reshape(-1, 32, num_energies) * u.ct data['spectra_err'] = np.sqrt(counts_var.reshape(-1, 32, num_energies)) data['triggers'] = triggers.reshape(-1, num_energies) data['triggers_err'] = np.sqrt(triggers_var.reshape(-1, num_energies)) data['num_integrations'] = num_integrations.reshape(-1, num_energies) return cls(control=control, data=data)
def from_packets(cls, packets, eng_packets): control = Control.from_packets(packets) control['detector_mask'] = _get_detector_mask(packets) control['pixel_mask'] = _get_pixel_mask(packets) control['energy_bin_edge_mask'] = _get_energy_bins( packets, 'NIX00266', 'NIXD0107') control['compression_scheme_counts_skm'] = \ _get_compression_scheme(packets, 'NIXD0101', 'NIXD0102', 'NIXD0103') control['compression_scheme_triggers_skm'] = \ _get_compression_scheme(packets, 'NIXD0104', 'NIXD0105', 'NIXD0106') control['num_energies'] = _get_num_energies(packets) control['num_samples'] = np.array( packets['NIX00271'])[np.cumsum(control['num_energies']) - 1] time, duration = control._get_time() # Map a given entry back to the control info through index control_indices = np.hstack([ np.full(ns, cind) for ns, cind in control[['num_samples', 'index']] ]) cs, ck, cm = control['compression_scheme_counts_skm'][0] counts, counts_var = decompress(packets['NIX00272'], s=cs, k=ck, m=cm, return_variance=True) ts, tk, tm = control['compression_scheme_triggers_skm'][0] triggers, triggers_var = decompress(packets['NIX00274'], s=ts, k=tk, m=tm, return_variance=True) flat_indices = np.hstack((0, np.cumsum([*control['num_samples']]) * control['num_energies'])).astype(int) counts_reformed = [ np.array(counts[flat_indices[i]:flat_indices[i + 1]]).reshape( n_eng, n_sam) for i, ( n_sam, n_eng) in enumerate(control[['num_samples', 'num_energies']]) ] counts_var_reformed = [ np.array(counts_var[flat_indices[i]:flat_indices[i + 1]]).reshape( n_eng, n_sam) for i, ( n_sam, n_eng) in enumerate(control[['num_samples', 'num_energies']]) ] counts = np.hstack(counts_reformed).T counts_var = np.hstack(counts_var_reformed).T data = Data() data['control_index'] = control_indices data['time'] = time data['timedel'] = duration data['triggers'] = triggers data['triggers_err'] = np.sqrt(triggers_var) data['rcr'] = packets['NIX00276'] data['counts'] = counts * u.ct data['counts_err'] = np.sqrt(counts_var) * u.ct return cls(control=control, data=data)
def from_packets(cls, packets, eng_packets): # Control control = Control.from_packets(packets) control['pixel_mask'] = np.unique(_get_pixel_mask(packets), axis=0) control['detector_mask'] = np.unique(_get_detector_mask(packets), axis=0) control['rcr'] = np.unique(packets['NIX00401']).astype(np.int16) control['index'] = range(len(control)) e_min = np.array(packets['NIXD0442']) e_max = np.array(packets['NIXD0443']) energy_unit = np.array(packets['NIXD0019']) + 1 num_times = np.array(packets['NIX00089']) total_num_times = num_times.sum() cs, ck, cm = control['compression_scheme_counts_skm'][0] counts, counts_var = decompress(packets['NIX00268'], s=cs, k=ck, m=cm, return_variance=True) counts = counts.reshape(total_num_times, -1) counts_var = counts_var.reshape(total_num_times, -1) full_counts = np.zeros((total_num_times, 32)) full_counts_var = np.zeros((total_num_times, 32)) cids = [ np.arange(emin, emax + 1, eunit) for (emin, emax, eunit) in zip(e_min, e_max, energy_unit) ] control['energy_bin_mask'] = np.full((1, 32), False, np.ubyte) control['energy_bin_mask'][:, cids] = True dl_energies = np.array([[ENERGY_CHANNELS[ch]['e_lower'] for ch in chs] + [ENERGY_CHANNELS[chs[-1]]['e_upper']] for chs in cids][0]) sci_energies = np.hstack( [[ENERGY_CHANNELS[ch]['e_lower'] for ch in range(32)], ENERGY_CHANNELS[31]['e_upper']]) ind = 0 for nt in num_times: e_ch_start = 0 e_ch_end = counts.shape[1] if dl_energies[0] == 0: full_counts[ind:ind + nt, 0] = counts[ind:ind + nt, 0] full_counts_var[ind:ind + nt, 0] = counts_var[ind:ind + nt, 0] e_ch_start = 1 if dl_energies[-1] == np.inf: full_counts[ind:ind + nt, -1] = counts[ind:ind + nt, -1] full_counts_var[ind:ind + nt, -1] = counts[ind:ind + nt, -1] e_ch_end -= 1 torebin = np.where((dl_energies >= 4.0) & (dl_energies <= 150.0)) full_counts[ind:ind + nt, 1:-1] = np.apply_along_axis( rebin_proportional, 1, counts[ind:ind + nt, e_ch_start:e_ch_end], dl_energies[torebin], sci_energies[1:-1]) full_counts_var[ind:ind + nt, 1:-1] = np.apply_along_axis( rebin_proportional, 1, counts_var[ind:ind + nt, e_ch_start:e_ch_end], dl_energies[torebin], sci_energies[1:-1]) ind += nt if counts.sum() != full_counts.sum(): raise ValueError( 'Original and reformatted count totals do not match') delta_time = (np.array(packets['NIX00441'], np.uint16)) * 0.1 * u.s closing_time_offset = (np.array(packets['NIX00269'], np.uint16)) * 0.1 * u.s # TODO incorporate into main loop above centers = [] deltas = [] last = 0 for i, nt in enumerate(num_times): edge = np.hstack([ delta_time[last:last + nt], delta_time[last + nt - 1] + closing_time_offset[i] ]) delta = np.diff(edge) center = edge[:-1] + delta / 2 centers.append(center) deltas.append(delta) last = last + nt centers = np.hstack(centers) deltas = np.hstack(deltas) # Data data = Data() data['time'] = Time(scet_to_datetime(f'{int(control["time_stamp"][0])}:0')) \ + centers data['timedel'] = deltas ts, tk, tm = control['compression_scheme_triggers_skm'][0] triggers, triggers_var = decompress(packets['NIX00267'], s=ts, k=tk, m=tm, return_variance=True) data['triggers'] = triggers data['triggers_err'] = np.sqrt(triggers_var) data['counts'] = full_counts * u.ct data['counts_err'] = np.sqrt(full_counts_var) * u.ct data['control_index'] = 0 return cls(control=control, data=data)
def from_packets(cls, packets, eng_packets): # Control control = Control.from_packets(packets) control.remove_column('num_structures') control = unique(control) if len(control) != 1: raise ValueError() control['index'] = range(len(control)) data = Data() data['control_index'] = np.full(len(packets['NIX00441']), 0) data['delta_time'] = (np.array(packets['NIX00441'], np.uint16)) * 0.1 * u.s unique_times = np.unique(data['delta_time']) # time = np.array([]) # for dt in set(self.delta_time): # i, = np.where(self.delta_time == dt) # nt = sum(np.array(packets['NIX00258'])[i]) # time = np.append(time, np.repeat(dt, nt)) # self.time = time data['rcr'] = packets['NIX00401'] data['pixel_mask1'] = _get_pixel_mask(packets, 'NIXD0407') data['pixel_mask2'] = _get_pixel_mask(packets, 'NIXD0444') data['pixel_mask3'] = _get_pixel_mask(packets, 'NIXD0445') data['pixel_mask4'] = _get_pixel_mask(packets, 'NIXD0446') data['pixel_mask5'] = _get_pixel_mask(packets, 'NIXD0447') data['detector_masks'] = _get_detector_mask(packets) data['integration_time'] = (np.array(packets['NIX00405'])) * 0.1 ts, tk, tm = control['compression_scheme_triggers_skm'][0] triggers, triggers_var = decompress( [packets[f'NIX00{i}'] for i in range(242, 258)], s=ts, k=tk, m=tm, return_variance=True) data['triggers'] = triggers.T data['triggers_err'] = np.sqrt(triggers_var).T tids = np.searchsorted(data['delta_time'], unique_times) data = data[tids] num_energy_groups = sum(packets['NIX00258']) # Data vis = np.zeros((unique_times.size, 32, 32), dtype=complex) vis_err = np.zeros((unique_times.size, 32, 32), dtype=complex) e_low = np.array(packets['NIXD0016']) e_high = np.array(packets['NIXD0017']) # TODO create energy bin mask control['energy_bin_mask'] = np.full((1, 32), False, np.ubyte) all_energies = set(np.hstack([e_low, e_high])) control['energy_bin_mask'][:, list(all_energies)] = True data['flux'] = np.array(packets['NIX00261']).reshape( unique_times.size, -1) num_detectors = packets['NIX00262'][0] detector_id = np.array(packets['NIX00100']).reshape( unique_times.size, -1, num_detectors) # vis[:, detector_id[0], e_low.reshape(unique_times.size, -1)[0]] = ( # np.array(packets['NIX00263']) + np.array(packets['NIX00264']) # * 1j).reshape(unique_times.size, num_detectors, -1) ds, dk, dm = control['compression_scheme_counts_skm'][0] real, real_var = decompress(packets['NIX00263'], s=ds, k=dk, m=dm, return_variance=True) imaginary, imaginary_var = decompress(packets['NIX00264'], s=ds, k=dk, m=dm, return_variance=True) mesh = np.ix_(np.arange(unique_times.size), detector_id[0][0], e_low.reshape(unique_times.size, -1)[0]) vis[mesh] = (real + imaginary * 1j).reshape(unique_times.size, num_detectors, -1) # TODO this doesn't seem correct prob need combine in a better vis_err[mesh] = (np.sqrt(real_var) + np.sqrt(imaginary_var) * 1j).reshape( unique_times.size, num_detectors, -1) data['visibility'] = vis data['visibility_err'] = vis_err data['time'] = Time(scet_to_datetime(f'{int(control["time_stamp"][0])}:0')) \ + data['delta_time'] + data['integration_time'] / 2 data['timedel'] = data['integration_time'] return cls(control=control, data=data)
def from_packets(cls, packets, eng_packets): # Control ssid = packets['SSID'][0] control = Control.from_packets(packets) control.remove_column('num_structures') control = unique(control) if len(control) != 1: raise ValueError( 'Creating a science product form packets from multiple products' ) control['index'] = 0 data = Data() data['delta_time'] = (np.array(packets['NIX00441'], np.int32)) * 0.1 * u.s unique_times = np.unique(data['delta_time']) data['rcr'] = np.array(packets['NIX00401'], np.ubyte) data['num_pixel_sets'] = np.array(packets['NIX00442'], np.ubyte) pixel_masks = _get_pixel_mask(packets, 'NIXD0407') pixel_masks = pixel_masks.reshape(-1, data['num_pixel_sets'][0], 12) if ssid == 21 and data['num_pixel_sets'][0] != 12: pixel_masks = np.pad(pixel_masks, ((0, 0), (0, 12 - data['num_pixel_sets'][0]), (0, 0))) data['pixel_masks'] = pixel_masks data['detector_masks'] = _get_detector_mask(packets) data['integration_time'] = (np.array(packets.get('NIX00405'), np.uint16)) * 0.1 * u.s # TODO change once FSW fixed ts, tk, tm = control['compression_scheme_counts_skm'][0] triggers, triggers_var = decompress( [packets.get(f'NIX00{i}') for i in range(242, 258)], s=ts, k=tk, m=tm, return_variance=True) data['triggers'] = triggers.T data['triggers_err'] = np.sqrt(triggers_var).T data['num_energy_groups'] = np.array(packets['NIX00258'], np.ubyte) tmp = dict() tmp['e_low'] = np.array(packets['NIXD0016'], np.ubyte) tmp['e_high'] = np.array(packets['NIXD0017'], np.ubyte) tmp['num_data_elements'] = np.array(packets['NIX00259']) unique_energies_low = np.unique(tmp['e_low']) unique_energies_high = np.unique(tmp['e_high']) # counts = np.array(eng_packets['NIX00260'], np.uint32) cs, ck, cm = control['compression_scheme_counts_skm'][0] counts, counts_var = decompress(packets.get('NIX00260'), s=cs, k=ck, m=cm, return_variance=True) counts = counts.reshape(unique_times.size, unique_energies_low.size, data['detector_masks'][0].sum(), data['num_pixel_sets'][0].sum()) counts_var = counts_var.reshape(unique_times.size, unique_energies_low.size, data['detector_masks'][0].sum(), data['num_pixel_sets'][0].sum()) # t x e x d x p -> t x d x p x e counts = counts.transpose((0, 2, 3, 1)) counts_var = np.sqrt(counts_var.transpose((0, 2, 3, 1))) if ssid == 21: out_counts = np.zeros((unique_times.size, 32, 12, 32)) out_var = np.zeros((unique_times.size, 32, 12, 32)) elif ssid == 22: out_counts = np.zeros((unique_times.size, 32, 4, 32)) out_var = np.zeros((unique_times.size, 32, 4, 32)) # energy_index = 0 # count_index = 0 # for i, time in enumerate(unique_times): # inds = np.where(data['delta_time'] == time) # cur_num_energies = data['num_energy_groups'][inds].astype(int).sum() # low = np.unique(tmp['e_low'][energy_index:energy_index+cur_num_energies]) # high = np.unique(tmp['e_high'][energy_index:energy_index + cur_num_energies]) # cur_num_energies = low.size # num_counts = tmp['num_data_elements'][energy_index:energy_index+cur_num_energies].sum() # cur_counts = counts[count_index:count_index+num_counts] # count_index += num_counts # pids = data[inds[0][0]]['pixel_masks'] # dids = np.where(data[inds[0][0]]['detector_masks'] == True) # cids = np.full(32, False) # cids[low] = True # # if ssid == 21: # cur_counts = cur_counts.reshape(cur_num_energies, dids[0].size, pids.sum()) # elif ssid == 22: # cur_counts = cur_counts.reshape(cur_num_energies, dids[0].size, 4) # dl_energies = np.array([ [ENERGY_CHANNELS[lch]['e_lower'], ENERGY_CHANNELS[hch]['e_upper']] for lch, hch in zip(unique_energies_low, unique_energies_high) ]).reshape(-1) dl_energies = np.unique(dl_energies) sci_energies = np.hstack( [[ENERGY_CHANNELS[ch]['e_lower'] for ch in range(32)], ENERGY_CHANNELS[31]['e_upper']]) # If there is any onboard summing of energy channels rebin back to standard sci channels if (unique_energies_high - unique_energies_low).sum() > 0: rebinned_counts = np.zeros((*counts.shape[:-1], 32)) rebinned_counts_var = np.zeros((*counts_var.shape[:-1], 32)) e_ch_start = 0 e_ch_end = counts.shape[-1] if dl_energies[0] == 0.0: rebinned_counts[..., 0] = counts[..., 0] rebinned_counts_var[..., 0] = counts_var[..., 0] e_ch_start += 1 elif dl_energies[-1] == np.inf: rebinned_counts[..., -1] = counts[..., -1] rebinned_counts_var[..., -1] = counts_var[..., -1] e_ch_end -= 1 torebin = np.where((dl_energies >= 4.0) & (dl_energies <= 150.0)) rebinned_counts[..., 1:-1] = np.apply_along_axis( rebin_proportional, -1, counts[..., e_ch_start:e_ch_end].reshape(-1, e_ch_end - e_ch_start), dl_energies[torebin], sci_energies[1:-1]).reshape( (*counts.shape[:-1], 30)) rebinned_counts_var[..., 1:-1] = np.apply_along_axis( rebin_proportional, -1, counts_var[..., e_ch_start:e_ch_end].reshape( -1, e_ch_end - e_ch_start), dl_energies[torebin], sci_energies[1:-1]).reshape((*counts_var.shape[:-1], 30)) energy_indices = np.full(32, True) energy_indices[[0, -1]] = False ix = np.ix_(np.full(unique_times.size, True), data['detector_masks'][0].astype(bool), np.ones(data['num_pixel_sets'][0], dtype=bool), np.full(32, True)) out_counts[ix] = rebinned_counts out_var[ix] = rebinned_counts_var else: energy_indices = np.full(32, False) energy_indices[unique_energies_low.min( ):unique_energies_high.max() + 1] = True ix = np.ix_(np.full(unique_times.size, True), data['detector_masks'][0].astype(bool), np.ones(data['num_pixel_sets'][0], dtype=bool), energy_indices) out_counts[ix] = counts out_var[ix] = counts_var # if (high - low).sum() > 0: # raise NotImplementedError() # #full_counts = rebin_proportional(dl_energies, cur_counts, sci_energies) # # dids2 = data[inds[0][0]]['detector_masks'] # cids2 = np.full(32, False) # cids2[low] = True # tids2 = time == unique_times # # if ssid == 21: # out_counts[np.ix_(tids2, cids2, dids2, pids)] = cur_counts # elif ssid == 22: # out_counts[np.ix_(tids2, cids2, dids2)] = cur_counts if counts.sum() != out_counts.sum(): import ipdb ipdb.set_trace() raise ValueError( 'Original and reformatted count totals do not match') control['energy_bin_mask'] = np.full((1, 32), False, np.ubyte) all_energies = set(np.hstack([tmp['e_low'], tmp['e_high']])) control['energy_bin_mask'][:, list(all_energies)] = True # time x energy x detector x pixel # counts = np.array( # eng_packets['NIX00260'], np.uint16).reshape(unique_times.size, num_energies, # num_detectors, num_pixels) # time x channel x detector x pixel need to transpose to time x detector x pixel x channel sub_index = np.searchsorted(data['delta_time'], unique_times) data = data[sub_index] data['time'] = Time(scet_to_datetime(f'{int(control["time_stamp"][0])}:0')) \ + data['delta_time'] + data['integration_time'] / 2 data['timedel'] = data['integration_time'] data['counts'] = out_counts * u.ct data['counts_err'] = out_var * u.ct data['control_index'] = control['index'][0] data.remove_columns(['delta_time', 'integration_time']) data = data['time', 'timedel', 'rcr', 'pixel_masks', 'detector_masks', 'num_pixel_sets', 'num_energy_groups', 'triggers', 'triggers_err', 'counts', 'counts_err'] data['control_index'] = 0 return cls(control=control, data=data)
def from_packets(cls, packets, eng_packets): control = Control.from_packets(packets) control.remove_column('num_structures') control = unique(control) if len(control) != 1: raise ValueError( 'Creating a science product form packets from multiple products' ) control['index'] = 0 data = Data() data['start_time'] = (np.array(packets.get('NIX00404'), np.uint16)) * 0.1 * u.s data['rcr'] = np.array(packets.get('NIX00401')[0], np.ubyte) data['integration_time'] = (np.array( packets.get('NIX00405')[0], np.int16)) * 0.1 * u.s data['pixel_masks'] = _get_pixel_mask(packets, 'NIXD0407') data['detector_masks'] = _get_detector_mask(packets) data['triggers'] = np.array( [packets.get(f'NIX00{i}') for i in range(408, 424)], np.int64).T data['num_samples'] = np.array(packets.get('NIX00406'), np.int16) num_detectors = 32 num_energies = 32 num_pixels = 12 # Data tmp = dict() tmp['pixel_id'] = np.array(packets.get('NIXD0158'), np.ubyte) tmp['detector_id'] = np.array(packets.get('NIXD0153'), np.ubyte) tmp['channel'] = np.array(packets.get('NIXD0154'), np.ubyte) tmp['continuation_bits'] = packets.get('NIXD0159', np.ubyte) control['energy_bin_mask'] = np.full((1, 32), False, np.ubyte) all_energies = set(tmp['channel']) control['energy_bin_mask'][:, list(all_energies)] = True # Find contiguous time indices unique_times = np.unique(data['start_time']) time_indices = np.searchsorted(unique_times, data['start_time']) # Create full count array 0s are not send down, if cb = 0 1 count, for cb 1 just extract # and for cb 2 extract and sum raw_counts = packets.get('NIX00065') counts_1d = [] raw_count_index = 0 for cb in tmp['continuation_bits']: if cb == 0: counts_1d.append(1) elif cb == 1: cur_count = raw_counts[raw_count_index] counts_1d.append(cur_count) raw_count_index += cb elif cb == 2: cur_count = raw_counts[raw_count_index:(raw_count_index + cb)] combined_count = int.from_bytes( (cur_count[0] + 1).to_bytes(2, 'big') + cur_count[1].to_bytes(1, 'big'), 'big') counts_1d.append(combined_count) raw_count_index += cb else: raise ValueError( f'Continuation bits value of {cb} not allowed (0, 1, 2)') counts_1d = np.array(counts_1d, np.uint16) # raw_counts = counts_1d end_inds = np.cumsum(data['num_samples']) start_inds = np.hstack([0, end_inds[:-1]]) dd = [(tmp['pixel_id'][s:e], tmp['detector_id'][s:e], tmp['channel'][s:e], counts_1d[s:e]) for s, e in zip(start_inds.astype(int), end_inds)] counts = np.zeros( (len(unique_times), num_detectors, num_pixels, num_energies), np.uint32) for i, (pid, did, cid, cc) in enumerate(dd): counts[time_indices[i], did, pid, cid] = cc # Create final count array with 4 dimensions: unique times, 32 det, 32 energies, 12 pixels # for i in range(self.num_samples): # tid = np.argwhere(self.raw_counts == unique_times) # start_index = 0 # for i, time_index in enumerate(time_indices): # end_index = np.uint32(start_index + np.sum(data['num_samples'][time_index])) # # for did, cid, pid in zip(tmp['detector_id'], tmp['channel'], tmp['pixel_id']): # index_1d = ((tmp['detector_id'] == did) & (tmp['channel'] == cid) # & (tmp['pixel_id'] == pid)) # cur_count = counts_1d[start_index:end_index][index_1d[start_index:end_index]] # # If we have a count assign it other wise do nothing as 0 # if cur_count: # counts[i, did, cid, pid] = cur_count[0] # # start_index = end_index sub_index = np.searchsorted(data['start_time'], unique_times) data = data[sub_index] data['time'] = Time(scet_to_datetime(f'{int(control["time_stamp"][0])}:0'))\ + data['start_time'] + data['integration_time']/2 data['timedel'] = data['integration_time'] data['counts'] = counts * u.ct data['control_index'] = control['index'][0] data.remove_columns(['start_time', 'integration_time', 'num_samples']) return cls(control=control, data=data)