def test_serialization(typed_dict): name, fields, cls = typed_dict reloaded_cls = amitypes.loads(amitypes.dumps(cls)) assert reloaded_cls.__name__ == name assert typing.get_type_hints(reloaded_cls) == fields
def test_manager_partition_updates(manager_info, partition): info, injector = manager_info # receive the data from the info socket (sent on sub connect) topic = info.recv_string() node = info.recv_string() payload = dill.loads(info.recv()) # check that the topic of the message is as expected assert topic == 'sources' # check that the message came from the manager assert node == 'manager' # check that the expected partition/source info was empty assert not payload # inject a partition change into the manager injector.partition(partition, wait=True) # receive the data from the info socket topic = info.recv_string() node = info.recv_string() payload = dill.loads(info.recv()) # check that the topic of the message is as expected assert topic == 'sources' # check that the message came from the manager assert node == 'manager' # check that the expected partition/source info was attached payload = {src: at.loads(typ) for src, typ in payload.items()} assert payload == partition
def test_pickled(typed_dict): name, fields, cls = typed_dict reloaded_cls = amitypes.loads(amitypes.dumps(cls)) assert reloaded_cls.__name__ == name assert typing.get_type_hints(reloaded_cls) == fields pickled_cls = pickle.loads(pickle.dumps(reloaded_cls)) assert pickled_cls.__name__ == name assert typing.get_type_hints(pickled_cls) == fields
def test_json_encoding(encoder, orig): # dump and then load the object reloaded = json.loads(json.dumps(orig, cls=encoder)) # check the orig and reloaded have the same keys assert set(orig) == set(reloaded) for key, expected in orig.items(): value = reloaded[key] if isinstance(expected, np.ndarray): assert np.array_equal(value, expected) else: if isinstance(value, str) and not isinstance(expected, str): value = amitypes.loads(value) assert value == expected
def test_hdf5_source(hdf5writer): src_cls = Source.find_source('hdf5') assert src_cls is not None idnum = 0 num_workers = 1 heartbeat_period = 5 src_cfg = { 'type': 'hdf5', 'interval': 0, 'init_time': 0, 'files': [str(hdf5writer)], } expected_cfg = { 'gasdet': float, 'ec': int, 'camera': at.Group, 'camera:image': at.Array2d, 'camera:raw': at.Array3d, 'timestamp': int, 'heartbeat': int, 'source': at.DataSource, } expected_grps = { 'camera': { 'image': at.Array2d, 'raw': at.Array3d, } } source = src_cls(idnum, num_workers, heartbeat_period, src_cfg) assert source.src_type == 'hdf5' # request all the sources source.request(set(expected_cfg)) # loop over all the events count = 0 for evt in source.events(): if evt.mtype == MsgTypes.Transition: assert evt.identity == idnum assert isinstance(evt.payload, Transition) if evt.payload.ttype == Transitions.Configure: sources = {k: at.loads(v) for k, v in evt.payload.payload.items()} assert sources == expected_cfg elif evt.mtype == MsgTypes.Datagram: assert set(evt.payload) == set(expected_cfg) for name, data in evt.payload.items(): if type(data) in at.NumPyTypeDict: assert at.NumPyTypeDict[type(data)] == expected_cfg[name] else: assert isinstance(data, expected_cfg[name]) if isinstance(data, at.DataSource): assert data.cfg == src_cfg assert data.key == 1 assert isinstance(data.run, h5py.File) assert data.evt == (count, data.run) elif isinstance(data, at.Group): assert data.src == source.src_type assert data.type == 'Group' assert data.name == name assert name in expected_grps assert set(data) == set(expected_grps[name]) # check the types of the group for k, v in expected_grps[name].items(): assert isinstance(data[k], v) count += 1 elif evt.mtype == MsgTypes.Heartbeat: assert count == heartbeat_period * (evt.payload.identity + 1) # check that the last evt was an unconfigure assert evt.mtype == MsgTypes.Transition and evt.payload.ttype == Transitions.Unconfigure
def test_random_source(sim_src_cfg): src_cls = Source.find_source('random') assert src_cls is not None idnum = 0 num_workers = 1 heartbeat_period = 10 source = src_cls(idnum, num_workers, heartbeat_period, sim_src_cfg) assert source.src_type == 'random' # check the names from the source are correct expected_names = {'timestamp', 'heartbeat', 'source'} expected_names.update(sim_src_cfg['config'].keys()) assert source.names == expected_names # check the types from the source are correct expected_dtypes = {'timestamp': int, 'heartbeat': int, 'source': at.DataSource} for name, cfg in sim_src_cfg['config'].items(): if cfg["dtype"] == "Scalar": if cfg.get("integer", False): expected_dtypes[name] = int else: expected_dtypes[name] = float elif cfg["dtype"] == "Waveform": expected_dtypes[name] = at.Array1d elif cfg["dtype"] == "Image": expected_dtypes[name] = at.Array2d else: expected_dtypes[name] = None assert source.types == expected_dtypes # check the returned configuration message config = source.configure() assert config.mtype == MsgTypes.Transition assert config.identity == idnum assert isinstance(config.payload, Transition) assert config.payload.ttype == Transitions.Configure assert set(config.payload.payload) == expected_names for name, dtype in config.payload.payload.items(): assert at.loads(dtype) == expected_dtypes[name] # do a first loop over the data (events should be empty) for msg in source.events(): if msg.mtype == MsgTypes.Datagram: assert not msg.payload break # test the request feature of the source assert not source.requested_names source.request(expected_names) assert source.requested_names == expected_names # do a second loop over the data (events should be non-empty) for msg in source.events(): if msg.mtype == MsgTypes.Datagram: for name in expected_names: assert name in msg.payload if expected_dtypes[name] == at.Array1d: assert type(msg.payload[name]) == np.ndarray assert msg.payload[name].ndim == 1 elif expected_dtypes[name] == at.Array2d: assert type(msg.payload[name]) == np.ndarray assert msg.payload[name].ndim == 2 else: assert type(msg.payload[name]) == expected_dtypes[name] break
def test_static_source(sim_src_cfg): src_cls = Source.find_source('static') assert src_cls is not None idnum = 0 num_workers = 1 heartbeat_period = 10 source = src_cls(idnum, num_workers, heartbeat_period, sim_src_cfg) assert source.src_type == 'static' # check the names from the source are correct expected_names = {'timestamp', 'heartbeat', 'source'} expected_names.update(sim_src_cfg['config'].keys()) assert source.names == expected_names # check the types from the source are correct expected_dtypes = {'timestamp': int, 'heartbeat': int, 'source': at.DataSource} for name, cfg in sim_src_cfg['config'].items(): if cfg["dtype"] == "Scalar": if cfg.get("integer", False): expected_dtypes[name] = int else: expected_dtypes[name] = float elif cfg["dtype"] == "Waveform": expected_dtypes[name] = at.Array1d elif cfg["dtype"] == "Image": expected_dtypes[name] = at.Array2d else: expected_dtypes[name] = None assert source.types == expected_dtypes # check the returned configuration message config = source.configure() assert config.mtype == MsgTypes.Transition assert config.identity == idnum assert isinstance(config.payload, Transition) assert config.payload.ttype == Transitions.Configure assert set(config.payload.payload) == expected_names for name, dtype in config.payload.payload.items(): assert at.loads(dtype) == expected_dtypes[name] # do a first loop over the data (events should be empty) count = 0 for msg in source.events(): if msg.mtype == MsgTypes.Datagram: assert not msg.payload count += 1 assert msg.timestamp == num_workers * count + idnum # check that the static source returned the correct number of events assert count == sim_src_cfg['bound'] # test the request feature of the source assert not source.requested_names source.request(expected_names) assert source.requested_names == expected_names # do a second loop over the data (events should be non-empty) count = 0 for msg in source.events(): if msg.mtype == MsgTypes.Datagram: for name, cfg in sim_src_cfg['config'].items(): if cfg["dtype"] == "Scalar": assert msg.payload[name] == 1 elif (cfg["dtype"] == "Image") or (cfg["dtype"] == "Waveform"): assert (msg.payload[name] == 1).all() count += 1 expected_ts = num_workers * count + idnum + sim_src_cfg['bound'] assert msg.timestamp == expected_ts assert msg.payload['timestamp'] == expected_ts assert msg.payload['heartbeat'] == expected_ts // heartbeat_period # check that the static source returned the correct number of events assert count == sim_src_cfg['bound']
def test_psana_source(xtcwriter): psana_src_cls = Source.find_source('psana') assert psana_src_cls is not None idnum = 0 num_workers = 1 heartbeat_period = 10 src_cfg = { 'type': 'psana', 'interval': 0, 'init_time': 0, 'files': [str(xtcwriter)], } # these are broken in xtcwriter excludes = {'HX2:DVD:GCC:01:PMON', 'HX2:DVD:GPI:01:PMON', 'motor1', 'motor2'} expected_cfg = { 'HX2:DVD:GCC:01:PMON': float, 'HX2:DVD:GPI:01:PMON': str, 'motor1': float, 'motor2': float, 'xpphsd': at.Detector, 'xpphsd:calibconst': typing.Dict, 'xpphsd:raw:calib': at.Array1d, 'xpphsd:raw': at.Group, 'xpphsd:fex:calib': at.Array1d, 'xpphsd:fex': at.Group, 'xppcspad': at.Detector, 'xppcspad:calibconst': typing.Dict, 'xppcspad:raw:calib': at.Array3d, 'xppcspad:raw:image': at.Array2d, 'xppcspad:raw:raw': at.Array3d, 'xppcspad:raw': at.Group, 'epicsinfo': at.Detector, 'epicsinfo:epicsinfo': typing.Dict, 'epicsinfo:calibconst': typing.Dict, 'timestamp': float, 'heartbeat': int, 'source': at.DataSource, } expected_grps = { 'xpphsd:raw': { 'calib': at.Array1d, }, 'xpphsd:fex': { 'calib': at.Array1d, }, 'xppcspad:raw': { 'calib': at.Array3d, 'image': at.Array2d, 'raw': at.Array3d, }, } expected_grp_types = { 'xpphsd:raw': 'hsd_raw_0_0_0', 'xpphsd:fex': 'hsd_fex_4_5_6', 'xppcspad:raw': 'cspad_raw_2_3_42', } psana_source = psana_src_cls(idnum, num_workers, heartbeat_period, src_cfg) assert psana_source.src_type == 'psana' evtgen = psana_source.events() # check the returned configuration message config = next(evtgen) # first event is the config assert config.mtype == MsgTypes.Transition assert config.identity == idnum assert isinstance(config.payload, Transition) assert config.payload.ttype == Transitions.Configure sources = {k: at.loads(v) for k, v in config.payload.payload.items()} assert sources == expected_cfg # request all the sources psana_source.request(set(expected_cfg)) # loop over all the events for count, msg in enumerate(evtgen): if msg.mtype == MsgTypes.Datagram: assert set(msg.payload) == set(expected_cfg) for name, data in msg.payload.items(): if name in excludes: continue assert isinstance(data, expected_cfg[name]) if isinstance(data, at.DataSource): assert data.cfg == src_cfg assert data.key == 1 assert isinstance(data.run, psana.psexp.run.Run) assert isinstance(data.evt, psana.event.Event) elif isinstance(data, at.Group): assert name in expected_grps and name in expected_grp_types assert data.src == psana_source.src_type assert data.type == expected_grp_types[name] assert data.name == name assert set(data) == set(expected_grps[name]) # check the types of the group for k, v in expected_grps[name].items(): assert isinstance(data[k], v) elif msg.mtype == MsgTypes.Heartbeat: break # check the number of events we processed assert count == heartbeat_period
def test_dumps_and_loads(types): for obj in types: assert amitypes.loads(amitypes.dumps(obj)) == obj
def test_loads(type_map): for expected, objstr in type_map: # check that the object loads as expected assert amitypes.loads(objstr) == expected
async def updateSources(self, init=False): num_workers = None while True: topic = await self.graphinfo.recv_string() source = await self.graphinfo.recv_string() msg = await self.graphinfo.recv_pyobj() if topic == 'sources': source_library = SourceLibrary() for source, node_type in msg.items(): pth = [] for part in source.split(':')[:-1]: if pth: part = ":".join((pth[-1], part)) pth.append(part) source_library.addNodeType(source, amitypes.loads(node_type), [pth]) self.source_library = source_library if init: break ctrl = self.widget() tree = ctrl.ui.source_tree ctrl.ui.clear_model(tree) ctrl.ui.create_model(ctrl.ui.source_tree, self.source_library.getLabelTree()) ctrl.chartWidget.updateStatus("Updated sources.") elif topic == 'event_rate': if num_workers is None: ctrl = self.widget() compiler_args = await ctrl.graphCommHandler.compilerArgs num_workers = compiler_args['num_workers'] events_per_second = [None] * num_workers total_events = [None] * num_workers time_per_event = msg[ctrl.graph_name] worker = int(re.search(r'(\d)+', source).group()) events_per_second[worker] = len(time_per_event) / ( time_per_event[-1][1] - time_per_event[0][0]) total_events[worker] = msg['num_events'] if all(events_per_second): events_per_second = int(np.sum(events_per_second)) total_num_events = int(np.sum(total_events)) ctrl = self.widget() ctrl.ui.rateLbl.setText( f"Num Events: {total_num_events} Events/Sec: {events_per_second}" ) events_per_second = [None] * num_workers total_events = [None] * num_workers elif topic == 'error': ctrl = self.widget() if hasattr(msg, 'node_name'): node_name = ctrl.metadata[msg.node_name]['parent'] node = self.nodes(data='node')[node_name] node.setException(msg) ctrl.chartWidget.updateStatus( f"{source} {node.name()}: {msg}", color='red') else: ctrl.chartWidget.updateStatus(f"{source}: {msg}", color='red')