def _get_time(cls, control, num_energies, packets, pad_after): times = [] durations = [] start = 0 for i, (ns, it) in enumerate(control['num_samples', 'integration_time']): off_sets = packets.get_value('NIX00485')[start:start + ns] * it base_time = SCETime(control["scet_coarse"][i], control["scet_fine"][i]) start_times = base_time + off_sets end_times = base_time + off_sets + it cur_time = start_times + (end_times - start_times) / 2 times.extend(cur_time) durations.extend([it] * ns) start += ns time = np.array([(t.coarse, t.fine) for t in times]) time = np.pad(time, ((0, pad_after), (0, 0)), mode='edge') time = SCETime(time[:, 0], time[:, 1]).reshape(-1, num_energies) duration = SCETimeDelta( np.pad(np.hstack(durations), (0, pad_after)).reshape(-1, num_energies)) scet_timerange = SCETimeRange(start=time[0, 0] - duration[0, 0] / 2, end=time[-1, -1] + duration[-1, 0] / 2) return duration, time, scet_timerange
def __init__(self, *, service_type, service_subtype, ssid, control, data, **kwargs): """Create a new LevelB object. Parameters ---------- control : ´Table´ the control table data : ´Table´ the data table prod_key : ´tuple´ (service, subservice [, ssid]) """ self.level = 'LB' self.service_type = service_type self.service_subtype = service_subtype self.ssid = ssid self.control = control self.data = data self.type = '' # TODO better encapsulated time handling? times = [ SCETime(c, f) for c, f in self.control[['scet_coarse', 'scet_fine']] ] self.control['scet_coarse'] = [t.coarse for t in times] self.control['time_sync'] = [t.time_sync for t in times] # TODO check if need to sort before this self.obt_beg = SCETime(self.control['scet_coarse'][0], self.control['scet_fine'][0]) self.obt_end = SCETime(self.control['scet_coarse'][-1], self.control['scet_fine'][-1]) self.obt_avg = self.obt_beg + (self.obt_end - self.obt_beg) / 2
def test_time_broadcast(): t = SCETime(0, 0) t1 = t + np.arange(100) * u.s t2 = SCETime(np.arange(100, dtype=np.int), 0) t3 = SCETime(0, np.arange(100, dtype=np.int)) assert t1.shape == (100,) assert t2.shape == (100,) assert t3.shape == (100,)
def test_time_lt(): dt = SCETime(coarse=123, fine=45) dt2 = SCETime(coarse=124, fine=45) assert dt < dt2 assert dt <= dt2 assert dt2 > dt assert dt2 >= dt assert dt2 is not dt assert dt2 == SCETime.from_string(str(dt2))
def test_time_minmax(): assert SCETime.min_time() == SCETime(coarse=0, fine=0) assert SCETime.max_time() == SCETime(coarse=MAX_COARSE, fine=MAX_FINE) # TODO enable after https://github.com/i4Ds/STIXCore/issues/102 # assert SCETime.min_time() - SCETime(coarse=0, fine=1) == SCETime.min_time() with pytest.raises(ValueError, match=r'Coarse time must be in range.*'): m = SCETime.max_time() dt = SCETimeDelta(0, 1) nm = m + dt print(nm)
def test_find_version(idb_manager): idb = idb_manager.get_idb(obt=SCETime(coarse=631155005, fine=0)) assert idb.get_idb_version() == "2.26.31" idb.close() # fall back to the default idb = idb_manager.get_idb(obt=SCETime(coarse=2 ** 31 - 1, fine=0)) assert idb.get_idb_version() == "2.26.35" assert idb_manager.find_version(obt=None) == "2.26.31"
def test_force_version_str(idb_manager): idb_manager.download_version("2.26.35", force=True) idb_m = IDBManager(test_data.idb.DIR, force_version='2.26.35') idb_f = idb_m.get_idb("any") assert idb_f.get_idb_version() == '2.26.35' assert idb_f.filename == test_data.idb.DIR / 'v2.26.35' / 'idb.sqlite' idb = idb_m.get_idb(obt=SCETime.min_time()) assert idb_f == idb idb = idb_m.get_idb(obt=SCETime.max_time()) assert idb_f == idb
def test_level1_processor_generate_filename(config, product): config.get.side_effect = [td.soop.DIR, '.'] processor = FitsL1Processor('some/path') config.get.side_effect = [td.soop.DIR, '.'] product.control.colnames = [] beg = SCETime(coarse=0, fine=0) end = SCETime(coarse=1, fine=2**15) avg = beg + (end - beg) / 2 product.obt_beg = beg product.obt_avg = avg product.obt_end = end product.obs_beg = beg.to_datetime() product.obs_avg = avg.to_datetime() product.obs_end = end.to_datetime() product.type = 'ql' product.scet_timerange = SCETimeRange(start=SCETime(0, 0), end=SCETime(coarse=0, fine=2**16 - 1)) product.utc_timerange = product.scet_timerange.to_timerange() product.level = 'L1' product.name = 'a_name' filename = processor.generate_filename(product, version=1) assert filename == 'solo_L1_stix-ql-a-name_20000101_V01.fits' product.type = 'sci' filename = processor.generate_filename(product, version=1) assert filename == 'solo_L1_stix-sci-a-name_20000101T000000-20000101T000001_V01.fits'
def test_time_add(): t1 = SCETime(123, 456) with pytest.raises(TypeError, match=r'Only Quantities and SCETimeDelta.*'): _ = t1 + SCETime(0, 1) with pytest.raises(ValueError, match=r'.*are not convertible'): _ = t1 + (1*u.m) # test right add t2 = t1 + (1 + 1/MAX_FINE) * u.s # test left add t3 = (1 + 1/MAX_FINE) * u.s + t1 assert t2 == t3 assert t2.coarse == 124 assert t2.fine == 457
def test_timedelta_add(): t1 = SCETime(1, 1) dt1 = SCETimeDelta(100, 1) dt2 = SCETimeDelta(200, 2) # test time plus timedelta t1_dt1 = dt1 + t1 dt1_t1 = t1 + dt1 assert t1_dt1 == dt1_t1 assert t1_dt1.coarse == 101 assert t1_dt1.fine == 2 with pytest.raises(ValueError, match=f'.*are not convertible'): _ = dt1 + (1*u.m) # test timedelta plus timedelta/quantity dt1_dt2 = dt1 + dt2 dt1_float = dt1 + (200+2/MAX_FINE)*u.s dt2_dt1 = dt2 + dt1 float_dt2 = (100 + 1/MAX_FINE) * u.s + dt2 assert dt1_dt2 == dt2_dt1 assert dt1_float == dt1_dt2 assert float_dt2 == dt1_dt2 assert dt1_dt2.coarse == 300 assert dt1_dt2.fine == 3
def test_timedelta_init(): dt1 = SCETimeDelta(0, 0) dt2 = SCETimeDelta.from_float(0*u.s) dt3 = SCETimeDelta(dt1) assert dt1 == dt2 assert dt2 == dt3 assert dt1 == dt3 with pytest.raises(ValueError): _ = SCETimeDelta(2 ** 32 + 1, 0) with pytest.raises(ValueError): SCETime(0, 2**16+1) with pytest.raises(ValueError): SCETime(0.0, 0)
def _get_time(self): # Replicate packet time for each sample base_times = Time( list( chain(*[[ SCETime(coarse=self["scet_coarse"][i], fine=self["scet_fine"][i]) ] * n for i, n in enumerate(self['num_samples'])]))) # For each sample generate sample number and multiply by duration and apply unit start_delta = np.hstack([ (np.arange(ns) * it) for ns, it in self[['num_samples', 'integration_time']] ]) # hstack op loses unit start_delta = start_delta.value * self['integration_time'].unit duration = np.hstack([ np.ones(num_sample) * int_time for num_sample, int_time in self[ ['num_samples', 'integration_time']] ]) duration = duration.value * self['integration_time'].unit # TODO Write out and simplify end_delta = start_delta + duration # Add the delta time to base times and convert to relative from start time times = base_times + start_delta + (end_delta - start_delta) / 2 # times -= times[0] return times, duration
def _get_time(self): # Replicate the start time of each for the number of samples in that packet base_coarse, base_fine = zip( *[([ct] * ns, [ft] * ns) for ns, ct, ft in self[ ['num_samples', 'scet_coarse', 'scet_fine']]]) base_coarse = np.hstack(base_coarse) base_fine = np.hstack(base_fine) bases = SCETime(base_coarse, base_fine) # Create start time for each time bin by multiplying the duration by the sample number deltas = SCETimeDelta( np.hstack([(np.arange(ns) * it) for ns, it in self[['num_samples', 'integration_time']] ])) # Create integration time for each sample in each packet by replicating the duration for # number of sample in each packet durations = SCETimeDelta( np.hstack([ np.ones(num_sample) * int_time for num_sample, int_time in self[['num_samples', 'integration_time']] ])) # Add the delta time to base times and convert to bin centers times = bases + deltas + (durations / 2) # Create a time range object covering the total observation time tr = SCETimeRange(start=bases[0], end=bases[-1] + deltas[-1] + durations[-1]) return times, durations, tr
def from_levelb(cls, levelb, parent=''): packets, idb_versions, control = HKProduct.from_levelb(levelb, parent=parent) # Create array of times as dt from date_obs times = SCETime(control['scet_coarse'], control['scet_fine']) # Data data = Data() data['time'] = times data['timedel'] = SCETimeDelta(0, 0) for nix, param in packets.data[0].__dict__.items(): if nix.startswith("NIXG") or nix == 'NIX00020': continue name = param.idb_info.get_product_attribute_name() data.add_basic(name=name, nix=nix, attr='value', packets=packets) data['control_index'] = range(len(control)) return cls(service_type=packets.service_type, service_subtype=packets.service_subtype, ssid=packets.ssid, control=control, data=data, idb_versions=idb_versions)
def test_deltatime_sub(): t1 = SCETime(100, 2) dt1 = SCETimeDelta(100, 1) dt2 = SCETimeDelta(200, 2) with pytest.raises(TypeError, match=r'Unsupported operation for types SCETimeDelta and int'): _ = dt1 - 1 with pytest.raises(TypeError, match=r'Quantity could not be converted to SCETimeDelta'): _ = dt1 - (1*u.m) # test sub deltatimes and quantities dt1_dt2 = dt1 - dt2 dt1_float = dt1 - (200 + 2 / MAX_FINE) * u.s assert dt1_dt2 == dt1_float assert dt1_dt2.coarse == -100 assert dt1_dt2.fine == -1 dt2_dt1 = dt2 - dt1 float_dt1 = (200 + 2/MAX_FINE) * u.s - dt1 assert dt2_dt1 == float_dt1 assert dt2_dt1.coarse == 100 assert dt2_dt1.fine == 1 # test sub times with pytest.raises(TypeError, match=f'Unsupported operation for types.*'): dt1 - t1 t2 = t1 - dt1 assert t2.coarse == 0 assert t2.fine == 1 with pytest.raises(ValueError, match=r'Coarse time must be in range.*'): t1 - dt2
def test_ql_l0(): l0_fits = test_data.products.L0_LightCurve_fits l0_prod = Product(l0_fits) assert isinstance(l0_prod, LCL0) assert l0_prod.level == 'L0' assert l0_prod.service_type == 21 assert l0_prod.service_subtype == 6 assert l0_prod.ssid == 30 # TODO not really a test just from output assert l0_prod.obs_beg == SCETime(coarse=664146182, fine=58989)
def test_ql_lb(): lb_fits = test_data.products.LB_21_6_30_fits lb_prod = Product(lb_fits) assert isinstance(lb_prod, LevelB) assert lb_prod.level == 'LB' assert lb_prod.service_type == 21 assert lb_prod.service_subtype == 6 assert lb_prod.ssid == 30 # TODO not really a test just from output assert lb_prod.obt_beg == SCETime(coarse=664148503, fine=10710)
def test_levelb_processor_generate_filename(): with patch('stixcore.products.level0.quicklookL0.QLProduct') as product: processor = FitsLBProcessor('some/path') product.control.colnames = [] product.service_type = 21 product.service_subtype = 6 product.ssid = 20 product.obt_avg = SCETime(43200, 0) product.level = 'LB' product.name = 'a_name' filename = processor.generate_filename(product, version=1) assert filename == 'solo_LB_stix-21-6-20_0000000000_V01.fits'
def data_root(self, value): """Set the data path root. Parameters ---------- data_root : `str` or `pathlib.Path` Path to the directory with all IDB versions """ path = Path(value) if not path.exists(): raise ValueError(f'path not found: {value}') self._data_root = path try: with open(self._data_root / IDB_VERSION_HISTORY_FILE) as f: self.history = json.load(f) for item in self.history: item['validityPeriodOBT'][0] = SCETime( coarse=item['validityPeriodOBT'][0]['coarse'], fine=item['validityPeriodOBT'][0]['fine']) item['validityPeriodOBT'][1] = SCETime( coarse=item['validityPeriodOBT'][1]['coarse'], fine=item['validityPeriodOBT'][1]['fine']) try: if not self.has_version(item['version']): available = self.download_version(item['version'], force=False) if not available: raise ValueError( 'was not able to download IDB version ' f'{item["version"]} into {self._data_root}' ) except EnvironmentError: pass except EnvironmentError: raise ValueError(f'No IDB version history found at: ' f'{self._data_root / IDB_VERSION_HISTORY_FILE}')
def __init__(self, bitstream): """ Create a TM Data Header Parameters ---------- bitstream : `bitstream.ConstBitstream` """ res = parse_bitstream(bitstream, TM_DATA_HEADER_STRUCTURE) [setattr(self, key, value) for key, value in res['fields'].items() if not key.startswith('spare')] self.datetime = SCETime(coarse=self.scet_coarse, fine=self.scet_fine)
def test_level0_processor_generate_filename(): with patch('stixcore.products.level0.quicklookL0.QLProduct') as product: processor = FitsL0Processor('some/path') product.control.colnames = [] product.type = 'ql' product.scet_timerange = SCETimeRange(start=SCETime(0, 0), end=SCETime(1234, 1234)) product.level = 'LB' product.name = 'a_name' filename = processor.generate_filename(product, version=1) assert filename == 'solo_LB_stix-ql-a-name_0000000000_V01.fits' with patch('stixcore.products.level0.scienceL0.ScienceProduct') as product: product.type = 'sci' product.control.colnames = [] product.obs_avg.coarse = 0 product.level = 'L0' product.name = 'a_name' product.scet_timerange = SCETimeRange(start=SCETime(12345, 6789), end=SCETime(98765, 4321)) filename = processor.generate_filename(product, version=1) assert filename == 'solo_L0_stix-sci-a-name_0000012345-0000098765_V01.fits' dummy_control_data = { 'request_id': [123456], 'tc_packet_seq_control': [98765] } product.control.__getitem__.side_effect = dummy_control_data.__getitem__ product.control.colnames = ['request_id'] filename = processor.generate_filename(product, version=1) assert filename == 'solo_L0_stix-sci-a-name' \ '_0000012345-0000098765_V01_123456.fits' product.control.colnames = ['request_id', 'tc_packet_seq_control'] filename = processor.generate_filename(product, version=1) assert filename == 'solo_L0_stix-sci-a-name' \ '_0000012345-0000098765_V01_123456-98765.fits'
def test_timerange(): tr = SCETimeRange(start=SCETime(coarse=100, fine=0), end=SCETime(coarse=200, fine=0)) tp_in = SCETime(coarse=150, fine=0) tr_in = SCETimeRange(start=SCETime(coarse=150, fine=0), end=SCETime(coarse=160, fine=0)) tr_out = SCETimeRange(start=SCETime(coarse=150, fine=0), end=SCETime(coarse=250, fine=0)) tp_out = SCETime(coarse=250, fine=0) assert tp_in in tr assert tp_out not in tr assert tr_in in tr assert tr_out not in tr tr.expand(tp_out) tr.expand(tr_out) assert tp_out in tr assert tr_out in tr
def test_level0_processor_generate_primary_header(datetime, product): processor = FitsL0Processor('some/path') datetime.now().isoformat.return_value = '1234-05-07T01:02:03.346' product.obs_beg = SCETime(coarse=0, fine=0) product.obs_avg = SCETime(coarse=0, fine=2**15) product.obs_end = SCETime(coarse=1, fine=2**15) product.scet_timerange = SCETimeRange(start=product.obs_beg, end=product.obs_end) product.raw = ['packet1.xml', 'packet2.xml'] product.parent = ['lb1.fits', 'lb2.fts'] product.service_type = 1 product.service_subtype = 2 product.ssid = 3 product.level = 'L0' test_data = { 'FILENAME': 'a_filename.fits', 'DATE': '1234-05-07T01:02:03.346', 'OBT_BEG': 0.0, 'OBT_END': 1.5000076295109483, 'DATE_OBS': '0000000000:00000', 'DATE_BEG': '0000000000:00000', 'DATE_AVG': '0000000000:49152', 'DATE_END': '0000000001:32768', 'STYPE': 1, 'SSTYPE': 2, 'SSID': 3, 'TIMESYS': "OBT", 'LEVEL': 'L0', 'RAW_FILE': 'packet1.xml;packet2.xml', 'PARENT': 'lb1.fits;lb2.fts' } header = processor.generate_primary_header('a_filename.fits', product) for name, value, *comment in header: if name in test_data.keys(): assert value == test_data[name]
def from_levelb(cls, levelb, parent): packets, idb_versions = GenericProduct.getLeveL0Packets(levelb) control = Control() control['scet_coarse'] = packets.get('scet_coarse') control['scet_fine'] = packets.get('scet_fine') control['integration_time'] = 0 control['index'] = np.arange(len(control)).astype( get_min_uint(len(control))) control['raw_file'] = levelb.control['raw_file'] control['packet'] = levelb.control['packet'] control['parent'] = parent # Create array of times as dt from date_obs times = SCETime(control['scet_coarse'], control['scet_fine']) # Data data = Data() data['time'] = times data['timedel'] = SCETimeDelta(0, 0) reshape_nixs = {'NIX00103', 'NIX00104'} reshape = False if reshape_nixs.intersection(packets.data[0].__dict__.keys()): reshape = True for nix, param in packets.data[0].__dict__.items(): name = param.idb_info.get_product_attribute_name() data.add_basic(name=name, nix=nix, attr='value', packets=packets, reshape=reshape) data['control_index'] = np.arange(len(control)).astype( get_min_uint(len(control))) return cls(service_type=packets.service_type, service_subtype=packets.service_subtype, ssid=packets.ssid, control=control, data=data, idb_versions=idb_versions, packets=packets)
def test_levelb_processor_generate_primary_header(datetime, product): processor = FitsLBProcessor('some/path') datetime.now().isoformat.return_value = '1234-05-07T01:02:03.346' beg = SCETime(coarse=0, fine=0) end = SCETime(coarse=1, fine=2**15) product.control = { "scet_coarse": [beg.coarse, end.coarse], "scet_fine": [beg.fine, end.fine] } product.raw = ['packet1.xml', 'packet2.xml'] product.parent = ['packet1.xml', 'packet2.xml'] product.level = 'LB' product.service_type = 1 product.service_subtype = 2 product.ssid = 3 product.obt_beg = beg product.obt_end = end product.date_obs = beg product.date_beg = beg product.date_end = end test_data = { 'FILENAME': 'a_filename.fits', 'DATE': '1234-05-07T01:02:03.346', 'OBT_BEG': beg.to_string(), 'OBT_END': end.to_string(), 'DATE_OBS': beg.to_string(), 'DATE_BEG': beg.to_string(), 'DATE_END': end.to_string(), 'STYPE': product.service_type, 'SSTYPE': product.service_subtype, 'SSID': product.ssid, 'TIMESYS': "OBT", 'LEVEL': 'LB', 'RAW_FILE': 'packet1.xml;packet2.xml', 'PARENT': 'packet1.xml;packet2.xml' } header = processor.generate_primary_header('a_filename.fits', product) for name, value, *comment in header: if name in test_data.keys(): assert value == test_data[name]
def test_level1_processor_generate_primary_header(config, product): config.get.side_effect = [td.soop.DIR, '.'] processor = FitsL1Processor('some/path') beg = SCETime(coarse=683769519, fine=0) end = SCETime(coarse=beg.coarse + 24 * 60 * 60) beg + (end - beg) / 2 product.scet_timerange = SCETimeRange(start=beg, end=end) product.utc_timerange = product.scet_timerange.to_timerange() product.raw = ['packet1.xml', 'packet2.xml'] product.parent = ['l01.fits', 'l02.fts'] product.level = 'L1' product.type = "ql" product.service_type = 1 product.service_subtype = 2 product.ssid = 3 test_data = { 'FILENAME': 'a_filename.fits', 'OBT_BEG': beg.as_float().value, 'OBT_END': end.as_float().value, 'DATE_OBS': product.utc_timerange.start.fits, 'DATE_BEG': product.utc_timerange.start.fits, 'DATE_AVG': product.utc_timerange.center.fits, 'DATE_END': product.utc_timerange.end.fits, 'STYPE': product.service_type, 'SSTYPE': product.service_subtype, 'SSID': product.ssid, 'TIMESYS': 'UTC', 'LEVEL': 'L1', 'OBS_ID': 'SSTX_040A_000_000_5Md2_112;SSTX_040A_000_000_vFLg_11Y', 'OBS_TYPE': '5Md2;vFLg', 'OBS_MODE': 'STIX_ANALYSIS;STIX_BASIC', 'SOOPNAME': '', 'SOOPTYPE': '', 'TARGET': '', 'RSUN_ARC': 1589.33, 'HGLT_OBS': -0.32, 'HGLN_OBS': -66.52, 'RAW_FILE': 'packet1.xml;packet2.xml', 'PARENT': 'l01.fits;l02.fts' } header = processor.generate_primary_header('a_filename.fits', product) for name, value, *comment in header: if name in test_data.keys(): if isinstance(value, float): assert np.allclose(test_data[name], value) else: assert value == test_data[name]
def test_time_init(): t1 = SCETime(0, 0) t2 = SCETime.from_float(0*u.s) t3 = SCETime(t1) assert t1 == t2 assert t2 == t3 assert t1 == t3 with pytest.raises(ValueError, match=r'Coarse time must be in range.*'): SCETime(-1, 0) with pytest.raises(ValueError, match=r'Fine time must be in range.*'): SCETime(0, -1) with pytest.raises(ValueError): _ = SCETime(2 ** 44-1, 0) with pytest.raises(ValueError): SCETime(0, 2**16+1) with pytest.raises(ValueError): SCETime(0.0, 0)
def from_packets(cls, packets): control = cls() control.add_basic(name='tc_packet_id_ref', nix='NIX00001', packets=packets, dtype=np.uint16) control.add_basic(name='tc_packet_seq_control', nix='NIX00002', packets=packets, dtype=np.uint16) control.add_basic(name='request_id', nix='NIX00037', packets=packets, dtype=np.uint32) control.add_basic(name='time_stamp', nix='NIX00402', packets=packets) if np.any(control['time_stamp'] > 2**32 - 1): coarse = control['time_stamp'] >> 16 fine = control['time_stamp'] & (1 << 16) - 1 else: coarse = control['time_stamp'] fine = 0 control['time_stamp'] = SCETime(coarse, fine) try: control['num_substructures'] = np.array( packets.get_value('NIX00403'), np.uint16).reshape(1, -1) control.add_meta(name='num_substructures', nix='NIX00403', packets=packets) except AttributeError: logger.debug('NIX00403 not found') control = unique(control) return control
def test_time_sub(): t1 = SCETime(123, 456) with pytest.raises(TypeError, match=r'Only quantities, SCETime and SCETimeDelt.*'): _ = t1 - 1 with pytest.raises(ValueError, match=r'.*are not convertible'): _ = t1 + (1*u.m) # test sub t2 = t1 - (1 + 1/MAX_FINE) * u.s assert t2.coarse == 122 assert t2.fine == 455 # test rsub with pytest.raises(TypeError, match=r'unsupported operand.*'): t2 = (1 + 1/MAX_FINE) * u.s - t1 # Test subtract to times dt = t1 - t2 assert isinstance(dt, SCETimeDelta) assert dt.coarse == 1 assert dt.fine == 1 dt = t2 - t1 assert isinstance(dt, SCETimeDelta) assert dt.coarse == -1 assert dt.fine == -1 # Test subtract deltatime t3 = t2 - dt assert isinstance(t3, SCETime) assert t3.coarse == 123 assert t3.fine == 456 # Can't subtract time from a delta time with pytest.raises(TypeError, match=r'Unsupported operation for ' r'types SCETimeDelta and SCETime'): _ = dt - t1
def from_levelb(cls, levelb, parent=''): packets, idb_versions = GenericProduct.getLeveL0Packets(levelb) control = Control() control['scet_coarse'] = packets.get('scet_coarse') control['scet_fine'] = packets.get('scet_fine') control['index'] = np.arange(len(control)).astype(get_min_uint(len(control))) # When the packets are parsed empty packets are dropped but in LB we don't parse so this # is not known need to compare control and levelb.control and only use matching rows if len(levelb.control) > len(control): matching_index = np.argwhere( np.in1d(levelb.control['scet_coarse'], np.array(packets.get('scet_coarse')))) control['raw_file'] = levelb.control['raw_file'][matching_index].reshape(-1) control['packet'] = levelb.control['packet'][matching_index].reshape(-1) else: control['raw_file'] = levelb.control['raw_file'].reshape(-1) control['packet'] = levelb.control['packet'].reshape(-1) control['parent'] = parent tmp = Data() tmp.add_basic(name='ubsd_counter', nix='NIX00285', packets=packets, dtype=np.uint32) tmp.add_basic(name='pald_counter', nix='NIX00286', packets=packets, dtype=np.uint32) tmp.add_basic(name='num_flares', nix='NIX00294', packets=packets, dtype=np.uint16) colnames = ['start_scet_coarse', 'end_scet_coarse', 'highest_flareflag', 'tm_byte_volume', 'average_z_loc', 'average_y_loc', 'processing_mask'] flares = Data() if tmp['num_flares'].sum() > 0: flares.add_basic(name='start_scet_coarse', nix='NIX00287', packets=packets) flares.add_basic(name='end_scet_coarse', nix='NIX00288', packets=packets) flares.add_basic(name='highest_flareflag', nix='NIX00289', packets=packets, dtype=np.byte) flares.add_basic(name='tm_byte_volume', nix='NIX00290', packets=packets, dtype=np.byte) flares.add_basic(name='average_z_loc', nix='NIX00291', packets=packets, dtype=np.byte) flares.add_basic(name='average_y_loc', nix='NIX00292', packets=packets, dtype=np.byte) flares.add_basic(name='processing_mask', nix='NIX00293', packets=packets, dtype=np.byte) tmp_data = defaultdict(list) start = 0 for i, (ubsd, pald, n_flares) in enumerate(tmp): end = start + n_flares if n_flares == 0: tmp_data['control_index'].append(i) tmp_data['coarse'].append(control['scet_coarse'][i]) tmp_data['fine'].append(control['scet_fine'][i]) tmp_data['ubsd'].append(ubsd) tmp_data['pald'].append(pald) for name in colnames: tmp_data[name].append(0) else: tmp_data['control_index'].append([i] * n_flares) tmp_data['coarse'].append([control['scet_coarse'][i]] * n_flares) tmp_data['fine'].append([control['scet_fine'][i]] * n_flares) ubsd['ubsd']([ubsd] * n_flares) pald['pald'].append([pald] * n_flares) for name in colnames: tmp_data[name].extend(flares[name][start:end]) start = end data = Data(tmp_data) data['time'] = SCETime(tmp_data['coarse'], tmp_data['fine']) data['timedel'] = SCETimeDelta(np.full(len(data), 0), np.full(len(data), 0)) data.remove_columns(['coarse', 'fine']) return cls(service_type=packets.service_type, service_subtype=packets.service_subtype, ssid=packets.ssid, control=control, data=data, idb_versions=idb_versions, packets=packets)