def _create_catalog(self, src_name, key_name): catalog = SourceCatalog() src = f'{src_name} {key_name}' catalog.add_item(SourceItem('LPD', src_name, [], key_name, slice(None, None), None)) return src, catalog
def _create_catalog(self, src_name, key_name): catalog = SourceCatalog() src = f'{src_name} {key_name}' catalog.add_item( SourceItem('JungFrauPR', src_name, [], key_name, None, None)) return src, catalog
def data_with_assembled(cls, tid, shape, *, src_type=DataSource.BRIDGE, dtype=config['SOURCE_PROC_IMAGE_DTYPE'], gen='random', slicer=None, with_xgm=False, with_digitizer=False, **kwargs): imgs = cls._gen_images(gen, shape, dtype) processed = ProcessedData(tid) processed.image = ImageData.from_array(imgs, **kwargs) if imgs.ndim == 2: slicer = None else: slicer = slice(None, None) if slicer is None else slicer src_list = [('Foo', 'oof'), ('Bar', 'rab'), ('karaboFAI', 'extra_foam')] src_name, key_name = random.choice(src_list) catalog = SourceCatalog() ctg = 'ABCD' src = f'{src_name} {key_name}' catalog.add_item(SourceItem(ctg, src_name, [], key_name, slicer, None)) catalog._main_detector = src n_pulses = processed.n_pulses if with_xgm: # generate XGM data processed.pulse.xgm.intensity = np.random.rand(n_pulses) processed.xgm.intensity = random.random() processed.xgm.x = random.random() processed.xgm.y = random.random() if with_digitizer: # generate digitizer data digitizer = processed.pulse.digitizer digitizer.ch_normalizer = 'B' for ch in digitizer: digitizer[ch].pulse_integral = np.random.rand(n_pulses) data = { 'processed': processed, 'catalog': catalog, 'meta': { src: { 'timestamp.tid': tid, 'source_type': src_type, } }, 'raw': { src: dict() }, 'assembled': { 'data': imgs, } } if imgs.ndim == 2: data['assembled']['sliced'] = imgs else: data['assembled']['sliced'] = imgs[slicer] return data, processed
def testGeneral(self): data, processed = self.simple_data(1234, (2, 2)) meta = data['meta'] raw = data['raw'] catalog = data['catalog'] proc = XgmProcessor() # empty source self.assertNotIn('XGM', catalog) proc.process(data) # invalid control source item = SourceItem('XGM', 'xgm1', [], 'some_property', None, None, 0) catalog.add_item(item) src = f"{item.name} {item.property}" meta[src] = {'train_id': 12346} raw[src] = [100, 200, 300] with self.assertRaises(ProcessingError): proc.process(data) catalog.remove_item(src) # valid control sources src_pf = 'xgm1 pulseEnergy.photonFlux' src_bpx = 'xgm1 beamPosition.ixPos' src_bpy = 'xgm1 beamPosition.iyPos' catalog.add_item( SourceItem('XGM', 'xgm1', [], 'pulseEnergy.photonFlux', None, None, 0)) catalog.add_item( SourceItem('XGM', 'xgm1', [], 'beamPosition.ixPos', None, None, 0)) catalog.add_item( SourceItem('XGM', 'xgm1', [], 'beamPosition.iyPos', None, None, 0)) meta.update({ src_pf: { 'train_id': 12345 }, src_bpx: { 'train_id': 12345 }, src_bpy: { 'train_id': 12345 } }) raw.update({src_pf: 0.02, src_bpx: 1e-5, src_bpy: -2e-5}) proc.process(data) self.assertEqual(0.02, processed.xgm.intensity) self.assertEqual(1e-5, processed.xgm.x) self.assertEqual(-2e-5, processed.xgm.y) self.assertIsNone(processed.pulse.xgm.intensity) self._reset_processed(processed) # invalid pipeline source item = SourceItem('XGM', 'xgm1:output', [], 'some_property', None, None, 1) catalog.add_item(item) src = f"{item.name} {item.property}" meta[src] = {'train_id': 12346} raw[src] = [100, 200, 300] with self.assertRaises(ProcessingError): proc.process(data) catalog.remove_item(src) # valid pipeline source src_it = 'xgm1:output data.intensityTD' catalog.add_item( SourceItem('XGM', 'xgm1:output', [], 'data.intensityTD', slice(None, None), (0, 1000), 1)) meta[src_it] = {'train_id': 12346} raw[src_it] = [100, 200, 300] proc.process(data) self.assertListEqual([100, 200, 300], processed.pulse.xgm.intensity.tolist()) self._reset_processed(processed) # same pipeline source with a different slice catalog.add_item( SourceItem('XGM', 'xgm1:output', [], 'data.intensityTD', slice(1, 3), (0, 1000), 1)) proc.process(data) self.assertListEqual([200, 300], processed.pulse.xgm.intensity.tolist()) self._reset_processed(processed) # if the same source has different "intensity" properties, the value of # the last one will finally be set in the processed data src_it1 = 'xgm1:output data.intensitySa1TD' src_it2 = 'xgm1:output data.intensitySa2TD' src_it3 = 'xgm1:output data.intensitySa3TD' catalog.add_item( SourceItem('XGM', 'xgm1:output', [], 'data.intensitySa1TD', slice(None, None), (0, 1000), 1)) catalog.add_item( SourceItem('XGM', 'xgm1:output', [], 'data.intensitySa2TD', slice(1, 4), (0, 100), 1)) catalog.add_item( SourceItem('XGM', 'xgm1:output', [], 'data.intensitySa3TD', slice(2, 3), (0, 10), 1)) meta.update({ src_it1: { 'train_id': 54321 }, src_it2: { 'train_id': 54321 }, src_it3: { 'train_id': 54321 } }) raw.update({ src_it1: [10, 20, 30], src_it2: [1, 2, 3], src_it3: [1000, 2000, 3000], }) with patch("extra_foam.pipeline.processors.xgm.logger.warning" ) as mocked_warning: proc.process(data) mocked_warning.assert_called() self.assertListEqual([3000], processed.pulse.xgm.intensity.tolist()) self._reset_processed(processed) # remove instrument source catalog.remove_item(src_pf) catalog.remove_item(src_bpx) with patch("extra_foam.pipeline.processors.xgm.logger.warning" ) as mocked_warning: proc.process(data) mocked_warning.assert_called() self.assertIsNone(processed.xgm.intensity) self.assertIsNone(processed.xgm.x) self.assertEqual(-2e-5, processed.xgm.y) self.assertListEqual([3000], processed.pulse.xgm.intensity.tolist()) self._reset_processed(processed) # remove one pipeline source catalog.remove_item(src_it3) with patch("extra_foam.pipeline.processors.xgm.logger.warning" ) as mocked_warning: proc.process(data) mocked_warning.assert_called() # result from data.intensitySa2TD self.assertListEqual([2, 3], processed.pulse.xgm.intensity.tolist()) self._reset_processed(processed) # remove all pipeline sources catalog.clear() proc.process(data) self.assertIsNone(processed.pulse.xgm.intensity) self._reset_processed(processed)
def testPulseIntegral(self): data, processed = self.simple_data(1234, (2, 2)) meta = data['meta'] raw = data['raw'] catalog = data['catalog'] proc = DigitizerProcessor() proc._meta.hdel = MagicMock() category = 'Digitizer' # empty source self.assertNotIn(category, catalog) proc.process(data) # pipeline source with unknown property item = SourceItem(category, 'digitizer1:network', [], 'data.intensityTD', slice(None, None), (0, 1000)) catalog.add_item(item) src = f"{item.name} {item.property}" meta[src] = {'tid': 12346} raw[src] = [100, 200, 300] with self.assertRaises(ProcessingError): proc.process(data) catalog.remove_item(src) # pipeline source with valid property integral_vrange = (-1.0, 1.0) n_pulses = 5 for ch in itertools.chain(self._adq_channels, self._fastadc_channels): if ch in self._adq_channels: item = SourceItem( category, 'digitizer1:network', [], f'digitizers.channel_1_{ch}.apd.pulseIntegral', slice(None, None), integral_vrange) else: item = SourceItem(category, 'digitizer1:channel_2.output', [], f'data.peaks', slice(None, None), integral_vrange) catalog.clear() catalog.add_item(item) src = f"{item.name} {item.property}" meta[src] = {'tid': 12346} pulse_integral_gt = np.random.randn(n_pulses) raw[src] = pulse_integral_gt proc.process(data) np.testing.assert_array_almost_equal( pulse_integral_gt, processed.pulse.digitizer[ch].pulse_integral) self.assertEqual(ch, processed.pulse.digitizer.ch_normalizer) # test pulse filter np.testing.assert_array_equal( ((integral_vrange[0] <= pulse_integral_gt) & (pulse_integral_gt <= integral_vrange[1])).nonzero()[0], processed.pidx.kept_indices(n_pulses)) self._reset_processed(processed) # test moving average # first reset with patch.object(proc._meta, "hdel") as patched: proc._update_moving_average({ 'reset_ma_digitizer': 1, 'ma_window': 5 }) patched.assert_called_once() # 1st train raw[src] = [10, 20, 30] proc.process(data) self.assertListEqual( [10, 20, 30], processed.pulse.digitizer[ch].pulse_integral.tolist()) # 2nd train raw[src] = [30, 60, 90] proc.process(data) self.assertListEqual( [20, 40, 60], processed.pulse.digitizer[ch].pulse_integral.tolist()) self._reset_processed(processed)
def _create_catalog(self, src_name, key_name): catalog = SourceCatalog() src = f'{src_name} {key_name}' catalog.add_item( SourceItem('FastCCD', src_name, [], key_name, None, None, 1)) return src, catalog
def _create_catalog(self, mapping): catalog = SourceCatalog() for ctg, src in mapping.items(): catalog.add_item(SourceItem(ctg, src, [], "ppt", None, None)) return catalog
def testTransformWithModules(self): transformer = DataTransformer.transform_euxfel src_type = DataSource.BRIDGE # test with valid data data = self._gen_kb_data( 1234, { 'abc': [('ppt1', 1), ('ppt2', 2)], 'xyz_1:xtdf': [('ppt', 2)], 'xyz_2:xtdf': [('ppt', 3)] }) catalog = self._create_catalog({'ABC': [('abc', 'ppt2', 0)]}) catalog.add_item( SourceItem('XYZ', 'xyz_*:xtdf', [1, 2], 'ppt', slice(None, None), [0, 100], 1)) raw, meta, tid = transformer(data, catalog=catalog, source_type=src_type) self.assertDictEqual( { 'abc ppt2': 2, 'xyz_*:xtdf ppt': { 'xyz_1:xtdf': { 'ppt': 2 }, 'xyz_2:xtdf': { 'ppt': 3 } } }, raw) self.assertDictEqual( { 'abc ppt2': { 'train_id': 1234, 'source_type': src_type }, 'xyz_*:xtdf ppt': { 'train_id': 1234, 'source_type': src_type } }, meta) # test when requested modules are different from modules in the data data = self._gen_kb_data( 1235, { 'abc': [('ppt1', 1), ('ppt2', 2)], 'xyz_1:xtdf': [('ppt', 2)], 'xyz_2:xtdf': [('ppt', 3)], 'xyz_3:xtdf': [('ppt', 4)] }) catalog.remove_item('xyz_*:xtdf ppt') catalog.add_item( SourceItem('XYZ', 'xyz_*:xtdf', [0, 1, 3, 4], 'ppt', slice(None, None), [0, 100], 1)) raw, meta, tid = transformer(data, catalog=catalog, source_type=src_type) self.assertDictEqual( { 'abc ppt2': 2, 'xyz_*:xtdf ppt': { 'xyz_1:xtdf': { 'ppt': 2 }, 'xyz_3:xtdf': { 'ppt': 4 } } }, raw) self.assertDictEqual( { 'abc ppt2': { 'train_id': 1235, 'source_type': src_type }, 'xyz_*:xtdf ppt': { 'train_id': 1235, 'source_type': src_type } }, meta)