def start_dispatcher(host, port, logfile=None): """The dispatcher function Parameters ---------- logfile : string string come from user command. ex --logfile=temp.log logfile will be "temp.log". logfile could be empty. """ dispatcher = RemoteDispatcher((host, port)) if logfile is not None: raise ValueError( "Parameter 'logfile' is deprecated and will be removed in future releases. " "Currently it does not have effect. Call the function with 'logfile=None' " ) def log_writer(name, doc): """logger's wrapper function This function will be used to fit .subscribe() method. It has two arguments as .subscribe expects. Inside, it calls logger.* to write doc which is a dict as a str into logfile """ if name in ('start', 'stop'): logger.info("%s: %r", name, doc) else: logger.debug("%s: %r", name, doc) dispatcher.subscribe(log_writer) # Subscribe log writer dispatcher.start()
def start_dispatcher(host, port, logfile): """The dispatcher function Parameters ---------- logfile : string string come from user command. ex --logfile=temp.log logfile will be "temp.log". logfile could be empty. """ dispatcher = RemoteDispatcher((host, port)) if logfile: set_handler(file=logfile) def log_writer(name, doc): """logger's wrapper function This function will be used to fit .subscribe() method. It has two arguments as .subscribe expects. Inside, it calls logger.* to write doc which is a dict as a str into logfile """ if name in ('start', 'stop'): logger.info("%s: %r", name, doc) else: logger.debug("%s: %r", name, doc) dispatcher.subscribe(log_writer) # Subscribe log writer dispatcher.start()
def make_and_start_dispatcher(queue): def put_in_queue(name, doc): print('putting ', name, 'in queue') queue.put((name, doc)) d = RemoteDispatcher('127.0.0.1:5568', deserializer=cloudpickle.loads) d.subscribe(put_in_queue) print("REMOTE IS READY TO START") d.loop.call_later(9, d.stop) d.start()
def make_and_start_dispatcher(queue): def put_in_queue(name, doc): print('putting ', name, 'in queue') queue.put((name, doc)) d = RemoteDispatcher('127.0.0.1:5568') d.subscribe(put_in_queue) print("REMOTE IS READY TO START") d.loop.call_later(9, d.stop) d.start()
def make_and_start_dispatcher(queue): def put_in_queue(name, doc): print('putting ', name, 'in queue') queue.put((name, doc)) d = RemoteDispatcher('127.0.0.1:5568') d.subscribe('all', put_in_queue) print("REMOTE IS READY TO START") d._loop.call_later(9, d.stop) d.start()
def make_and_start_dispatcher(queue): def put_in_queue(name, doc): print("putting ", name, "in queue") queue.put((name, doc)) d = RemoteDispatcher("127.0.0.1:5568") d.subscribe(put_in_queue) print("REMOTE IS READY TO START") d.loop.call_later(9, d.stop) d.start()
def run(self): def put_in_queue(name, doc): print("putting ", name, "in queue") self._queue.put((name, doc)) from bluesky.callbacks.zmq import RemoteDispatcher d = RemoteDispatcher("127.0.0.1:5568") d.subscribe(put_in_queue) print("REMOTE IS READY TO START") d.loop.call_later(9, d.stop) d.start()
def main(): parser = argparse.ArgumentParser( description='Listen for documents over 0MQ and validate Resources.') parser.add_argument( 'proxy_address', type=str, help="bluesky-0MQ-proxy out address, given as in localhost:5578") parser.add_argument( '--emails', required=False, nargs='*', help="space-separated list of email addresses") args = parser.parse_args() log_handler = logging.StreamHandler() # stderr log_handler.setFormatter(LogFormatter()) logger.setLevel('INFO') logger.addHandler(log_handler) if args.emails: server_name = socket.gethostname() smtp_handler = SMTPHandler( mailhost='localhost', fromaddr=f'Resource Health Check <noreply@{server_name}>', toaddrs=args.emails, subject=(f'Error report from resource health check on ' f'{server_name}') ) smtp_handler.setFormatter(LogFormatter(color=False)) smtp_handler.setLevel('WARNING') # Use QueueHandler in case sending email is slow. LogRecords flow # from QueueHandler -> Queue -> QueueListener -> SMTPHandler. cleanup_listener = True que = queue.Queue() queue_handler = QueueHandler(que) queue_listener = QueueListener(que, smtp_handler, respect_handler_level=True) logger.addHandler(queue_handler) queue_listener.start() else: cleanup_listener = False rr = RunRouter([validator_factory]) rd = RemoteDispatcher(args.proxy_address) rd.subscribe(rr) logger.info(f'Listening to {args.proxy_address}') try: rd.start() # runs forever finally: if cleanup_listener: queue_listener.stop()
class ConsumerThread(QThread): documents = Signal([tuple]) def __init__(self, *args, zmq_address, **kwargs): super().__init__(*args, **kwargs) self.dispatcher = RemoteDispatcher(zmq_address) def callback(name, doc): if name == 'start': log.debug("New streaming Run: uid=%r", doc['uid']) self.documents.emit((name, doc)) self.dispatcher.subscribe(callback) def run(self): self.dispatcher.start()
def main(): from bluesky.callbacks.zmq import Publisher, RemoteDispatcher parser = argparse.ArgumentParser( description= "Listen for unfilled documents over 0MQ and emit filled ones.") parser.add_argument( "receive_from", type=str, help="bluesky-0MQ-proxy out address, given as in localhost:5578", ) parser.add_argument( "send_to", type=str, help="bluesky-0MQ-proxy in address, given as in localhost:5578", ) args = parser.parse_args() # Data flows through: # * RemoteDispatcher (0MQ) # * Accumulator (caches until stop doc is received) # * EmittingFiller (fills external data) # * Publisher (0MQ) publisher = Publisher(args.send_to) handler_registry = discover_handlers() def factory(name, doc): filler = EmittingFiller(handler_registry, inplace=False, callback=publisher, coerce="force_numpy") accumulator = Accumulator(callback=filler) return [accumulator], [] rr = RunRouter([factory]) rd = RemoteDispatcher(args.receive_from) rd.subscribe(rr) print(f"Listening to {args.receive_from}") try: rd.start() # runs forever except KeyboardInterrupt: print("Terminated by user; exiting")
def start_callback(): from bluesky.callbacks.zmq import RemoteDispatcher from SciStreams.config import config as configd from SciStreams.interfaces.databroker.databases import databases cmsdb = databases['cms:data'] # db needed to fill events stream_buffer = BufferStream(db=cmsdb) def callback(*nds): nds = stream_buffer(nds) stream_input(*nds) # get the dispatcher port for bluesky zeromq process ipstring = "localhost:{:4d}".format(configd['bluesky']['port']) d = RemoteDispatcher(ipstring) d.subscribe(callback) #d.subscribe(print) # when done subscribing things and ready to use: d.start() # runs event loop forever
def start_analysis( mask_kwargs=None, pdf_kwargs=None, fq_kwargs=None, mask_setting=None, save_kwargs=None, # pdf_argrelmax_kwargs=None, # mean_argrelmax_kwargs=None ): """Start analysis pipeline Parameters ---------- mask_kwargs : dict The kwargs passed to the masking see xpdtools.tools.mask_img pdf_kwargs : dict The kwargs passed to the pdf generator, see xpdtools.tools.pdf_getter fq_kwargs : dict The kwargs passed to the fq generator, see xpdtools.tools.fq_getter mask_setting : dict The setting of the mask save_kwargs : dict The kwargs passed to the main formatting node (mostly the filename template) """ # if pdf_argrelmax_kwargs is None: # pdf_argrelmax_kwargs = {} # if mean_argrelmax_kwargs is None: # mean_argrelmax_kwargs = {} d = RemoteDispatcher(glbl_dict['proxy_address']) install_qt_kicker( loop=d.loop) # This may need to be d._loop depending on tag if mask_setting is None: mask_setting = {} if fq_kwargs is None: fq_kwargs = {} if pdf_kwargs is None: pdf_kwargs = {} if mask_kwargs is None: mask_kwargs = {} if save_kwargs is None: save_kwargs = {} for a, b in zip( [ mask_kwargs, pdf_kwargs, fq_kwargs, mask_setting, save_kwargs, # pdf_argrelmax_kwargs, # mean_argrelmax_kwargs ], [ _mask_kwargs, _pdf_kwargs, _fq_kwargs, _mask_setting, _save_kwargs, # _pdf_argrelmax_kwargs, # _mean_argrelmax_kwargs ]): if a: b.update(a) d.subscribe(lambda *x: raw_source.emit(x)) print('Starting Analysis Server') d.start()
import sys from bluesky.callbacks.zmq import RemoteDispatcher # listen for 0MQ messages from xf28id2-ca1:5578 zmq_server = "xf28id2-ca1:5578" zmq_prefix = sys.argv[1].encode() def echo(name, doc): print(f"got a {name} document with 0MQ prefix {zmq_prefix}") if name == "start": print(f" start id {doc['uid']}") elif name == "descriptor": print(f" start id {doc['run_start']}") print(f" uid {doc['uid']}") elif name == "event": print(f" descriptor id {doc['descriptor']}") print(f" uid {doc['uid']}") d = RemoteDispatcher("xf28id2-ca1:5578", prefix=zmq_prefix) d.subscribe(echo) print("ZMQ ECHO CONSUMER IS RUNNING") d.start()
def main(): rr = RunRouter([factory]) dispatcher = RemoteDispatcher('localhost:5578') dispatcher.subscribe(rr) dispatcher.start()
db = Broker(mds=mds, reg=fs) td = TemporaryDirectory() source = conf_main_pipeline(db, td.name, # vis=False, write_to_disk=False, # mask_setting=None ) # a = LiveImage('pe1_image') loop = zmq_asyncio.ZMQEventLoop() install_qt_kicker(loop=loop) def put_in_queue(nd): if nd[0] == 'event': nd[1]['data']['pe1_image'] = np.asarray(nd[1]['data']['pe1_image']) # if nd[0] == 'event': # db.fill_event(nd[1]) # print(nd) # source.emit(nd) a(*nd) plt.pause(.1) disp = RemoteDispatcher('127.0.0.1:5568', loop=loop) # disp.subscribe(istar(put_in_queue)) # disp.subscribe(a) disp.subscribe(istar(source.emit)) print("REMOTE IS READY TO START") # disp._loop.call_later(60, disp.stop) disp.start()
stream_source = self.descriptor_doc['data_keys'][stream_name]['source'] if stream_source == 'pizzabox-di-file': data = load_trig_trace(raw_data) if stream_source == 'pizzabox-adc-file': data = load_adc_trace(raw_data) stream_offset = f'{stream_name} offset' if stream_offset in self.start_doc: data.iloc[:, 1] -= self.start_doc[stream_offset] stream_gain = f'{stream_name} gain' if stream_gain in self.start_doc: data.iloc[:, 1] /= 10**self.start_doc[stream_gain] if stream_source == 'pizzabox-enc-file': data = load_enc_trace(raw_data) if dev_name == 'hhm_theta': data.iloc[:, 1] = xas.xray.encoder2energy( data['encoder'], 360000, -float(self.start_doc['angle_offset'])) dev_name = 'energy' self._preprocessed_data[dev_name] = data dispatcher = RemoteDispatcher('localhost:5578', prefix=b'raw') if __name__ == '__main__': dispatcher.subscribe(Interpolator(handler_registry)) dispatcher.start()
db, td.name, # vis=False, write_to_disk=False, # mask_setting=None ) # a = LiveImage('pe1_image') loop = zmq_asyncio.ZMQEventLoop() install_qt_kicker(loop=loop) def put_in_queue(nd): if nd[0] == 'event': nd[1]['data']['pe1_image'] = np.asarray(nd[1]['data']['pe1_image']) # if nd[0] == 'event': # db.fill_event(nd[1]) # print(nd) # source.emit(nd) a(*nd) plt.pause(.1) disp = RemoteDispatcher('127.0.0.1:5568', loop=loop) # disp.subscribe(istar(put_in_queue)) # disp.subscribe(a) disp.subscribe(istar(source.emit)) print("REMOTE IS READY TO START") # disp._loop.call_later(60, disp.stop) disp.start()
) # publish 0MQ messages with xpdan's prefix zmq_publishing_prefix = b"an" zmq_analysis_publisher = ZmqPublisher( address=("127.0.0.1", 4567), prefix=zmq_publishing_prefix ) def zmq_publish_from_analysis_factory(start_name, start_doc): print( f"zmq_publish_from_analysis_factory called with {start_name}:\n{pprint.pformat(start_doc)}\n" ) def zmq_publish_from_analysis(name, doc): if name == "start": # add batch_count print("adding batch_count=1") doc["batch_count"] = 1 print(f"analysis consumer publishing {name}:\n{pprint.pformat(doc)}\n") zmq_analysis_publisher(name, doc) return [zmq_publish_from_analysis], [] zmq_dispatcher.subscribe(RunRouter(factories=[zmq_publish_from_analysis_factory])) print(f"NOT XPDAN CONSUMER IS LISTENING ON {zmq_listening_prefix}") print(f"AND PUBLISHING ON {zmq_publishing_prefix}") zmq_dispatcher.start()