def run(self): ctx = zmq.Context() sockets = [] poller = zmq.Poller() for addr in self.addrs: socket = ctx.socket(zmq.SUB) socket.connect(addr) socket.setsockopt_string(zmq.SUBSCRIBE, u"") sockets.append(socket) poller.register(socket, zmq.POLLIN) try: while not self.kill: socks = dict(poller.poll(timeout * 1000)) for socket, v in socks.items(): msg = socket.recv_json() topic, msg, exclude = msg['topic'], msg['msg'], msg[ 'exclude'] self.wsmanager.send(topic, msg, exclude=exclude) finally: for s in sockets: s.close()
def __init__(self, endpoint=DEFAULT_HEARTBEAT, interval=10., io_loop=None, ctx=None, register=5, onregister=None): self.loop = io_loop or ioloop.IOLoop.instance() self.daemon = True self.kill_context = ctx is None self.context = ctx or zmq.Context() self.endpoint = endpoint self.running = False self.interval = interval logger.debug('Publishing to ' + self.endpoint) self._endpoint = self.context.socket(zmq.PUB) self._endpoint.linger = 0 #self._endpoint.identity = b'HB' self._endpoint.hwm = 0 self._endpoint.bind(self.endpoint) self._cb = ioloop.PeriodicCallback(self._ping, interval * 1000, io_loop=self.loop) self.register = register self.current_register = 0 self.onregister = onregister
def __init__(self, zmq_bind_ip=None, zmq_work_port=None, zmq_results_port=11300, quiet=False, connect_timeout=None, network_timeout=None): if zmq_bind_ip is not None and zmq_work_port is not None: work_endpoint = 'tcp://%s:%d' % (zmq_bind_ip, zmq_work_port) results_endpoint = 'tcp://%s:%d' % (zmq_bind_ip, zmq_results_port) ipv6 = is_ipv6(zmq_bind_ip) self.context = zmq.Context() self.work_push = self.context.socket(zmq.PUSH) self.work_push.ipv6 = ipv6 self.work_push.bind(work_endpoint) self.results_pull = self.context.socket(zmq.PULL) self.results_pull.ipv6 = ipv6 self.results_pull.bind(results_endpoint) self.connect_timeout = connect_timeout self.network_timeout = network_timeout self.quiet = quiet
def __init__(self, worker_collection, zmq_url, auth=None): self.logger = logging.getLogger('root') self.worker_collection = worker_collection self.zmq_url = zmq_url self.zmq_ctx = zmq.Context() self.zmq_socket = self.zmq_ctx.socket(zmq.ROUTER) if auth: try: self.logger.info( "Using CURVE encryption for HIRO engine interface") self.zmq_socket.curve_publickey, self.zmq_socket.curve_secretkey = auth self.zmq_socket.curve_server = True except ZMQError: self.logger.critical( "CURVE keys malformed, please check your config file!") sys.exit(5) else: self.logger.warn("HIRO engine interface is not encrypted!") self.zmq_socket.bind(self.zmq_url) self.response_queue = gevent.queue.JoinableQueue(maxsize=0) self.worker_collection.register_response_queue(self.response_queue)
def zmqClient(): import zmq.green as zmq context = zmq.Context() socket = context.socket(zmq.SUB) socket.connect('ipc://zmq.S') # subscribe to everything socket.setsockopt(zmq.SUBSCRIBE, '') while True: server_msg = socket.recv() print "*** sending {} to {} clients".format(server_msg, len(clients)) for uid, ws in clients.iteritems(): msg = "{uid} {msg}".format(uid=uid, msg=server_msg) try: ws.send(msg) except IOError: print "error sending to {}".format(uid)
def start(self, controller): thread = controller.context.get('thread', None) if thread is not None: gevent.kill(thread) context = zmq.Context() socket = context.socket(zmq.SUB) socket.connect("tcp://meganuke:5559") socket.setsockopt(zmq.SUBSCRIBE, b'') logger.debug("Collecting.start") def collect(): while True: logger.debug("Collecting.collect") message = socket.recv_multipart() if message[0] == b'Measurements': for sample in measurements_capnp.Measurements.read_multiple_bytes( message[1]): controller.handle_message('Measurements', sample) controller.context['thread'] = gevent.spawn(collect)
def test_flow(self): """ Test the full event to websocket flow. We start the :func:`.run` with the event listener on port 5555 and the WebSocket server on port 5000. Then we send an event and check we receive it in our WebSocket connection. .. note:: The coverage displayed by the coverage tool doesn't play nicely with gevent. """ gevent.spawn(run, 5555, 5000) time.sleep(1) ws = websocket.create_connection('ws://localhost:5000/') context = zmq.Context(1) publisher = context.socket(zmq.PUB) publisher.connect('tcp://localhost:5555') publisher.send_multipart(['worker.event', 'foo-bar']) self.assertEqual('foo-bar', ws.recv())
def run(): """ Fires up the relay process. """ # These form the connection to the Gateway daemon(s) upstream. context = zmq.Context() receiver = context.socket(zmq.SUB) receiver.setsockopt(zmq.SUBSCRIBE, '') for binding in settings.RELAY_RECEIVER_BINDINGS: # Relays bind upstream to an Announcer, or another Relay. receiver.connect(binding) sender = context.socket(zmq.PUB) for binding in settings.RELAY_SENDER_BINDINGS: # End users, or other relays, may attach here. sender.bind(binding) def relay_worker(message): """ This is the worker function that re-sends the incoming messages out to any subscribers. :param str message: A JSON string to re-broadcast. """ if is_message_duped(message): # We've already seen this message recently. Discard it. return if settings.RELAY_DECOMPRESS_MESSAGES: message = zlib.decompress(message) sender.send(message) logger.info("Relay is now listening for order data.") while True: # For each incoming message, spawn a greenlet using the relay_worker # function. gevent.spawn(relay_worker, receiver.recv())
def __init__(self, *args, **kwargs): self._sub_socket = None self._time_sync_manager = None try: Device.__init__(self, *args, **kwargs['dconfig']) device_config = self.getConfiguration() self._subscription_protocal = device_config.get( 'subscription_protocal', None) if self._subscription_protocal: self._zmq_context = zmq.Context() self._sub_socket = self._zmq_context.socket(zmq.SUB) self._subscription_filter = device_config.get( 'monitor_event_types', [u'']) # If sub channel is filtering by category / event type, then auto add # the EXIT category to the sub channels filter list of categories to include. # if len(self._subscription_filter ) > 0 and self._subscription_filter[0] != '': self._sub_socket.setsockopt_string(zmq.SUBSCRIBE, u'EXIT') for sf in self._subscription_filter: self._sub_socket.setsockopt_string(zmq.SUBSCRIBE, sf) self._sub_socket.connect(self._subscription_protocal) self._time_sync_manager = None if device_config.get('remote_iohub_address'): from ...net import TimeSyncState, ioHubTimeGreenSyncManager self._time_sync_state = TimeSyncState() self._time_sync_manager = ioHubTimeGreenSyncManager( device_config.get('remote_iohub_address'), self._time_sync_state) self._time_sync_manager.start() gevent.spawn(self._poll) # really like _run except Exception as e: print2err('** Exception during RemoteEventSubscriber.__init__: ', e) printExceptionDetailsToStdErr()
def connect(self, push_addr=None, pull_addr=None, ctrl_addr=None): c = zmq.Context() if not pull_addr: addr = zmq_addr(5555, transport='tcp') self.pull_socket = c.socket(zmq.PULL) self.pull_socket.connect(addr) if not push_addr: addr = zmq_addr(6666, transport='tcp') self.push_socket = c.socket(zmq.PUSH) self.push_socket.connect(addr) if not ctrl_addr: addr = zmq_addr(7777, transport='tcp') self.ctrl_socket = c.socket(zmq.ROUTER) self.ctrl_socket.setsockopt(zmq.IDENTITY, self.worker_id) self.ctrl_socket.connect(addr)
def server(): # Set up port and protocol port = "24124" binding = "tcp://*:%s" % port binding_ipc = "ipc:///tmp/zmqtest" # Set up 0MQ as Request/Response topology context = zmq.Context() server_socket = context.socket(zmq.REP) # Bind to the socket server_socket.bind(binding_ipc) while True: # Receive the mesasage message = server_socket.recv() try: # De-Serialize it msg = gmessages.Message() msg.ParseFromString(message) # Report it print(("Message of type \"%d\"received from \"%s\" " "with %d peripherals") % (msg.metadata.message_type, msg.metadata.device_name, msg.metadata.periph_count)) for p in msg.peripheral: print(("Peripheral \"%s\" with id: \"%d\"" " contains payload of \"%s\"") % (p.peripheral_name, p.peripheral_id, repr( p.payload)[1:-1])) print("") ack = compose_ack("ACK") except Exception: ack = compose_ack("NACK") # Respond to client server_socket.send(ack.SerializeToString())
def test_base_logger(self): beeswarm.shared.zmq_context = zmq.Context() context = beeswarm.shared.zmq_context processed_sessions_publisher = context.socket(zmq.PUB) processed_sessions_publisher.bind(SocketNames.PROCESSED_SESSIONS.value) test_list = [] mock_logger = TestLogger({}, test_list) mock_logger.start() # force context switch to allow greenlet to start gevent.sleep() for _ in range(15): processed_sessions_publisher.send('TOPIC DATA') gevent.sleep(2) self.assertEqual(len(mock_logger.test_queue), 15) mock_logger.stop() # will except if the logger hangs. mock_logger.get(block=True, timeout=2) processed_sessions_publisher.close()
def client(msg): # Set up port and protocol port = "24124" binding = "tcp://localhost:%s" % port # Set up 0MQ as Request/Response topology context = zmq.Context() client_socket = context.socket(zmq.REQ) client_socket.connect(binding) # Send one message and exit print("Sending Message of type \"%d\"" % msg.metadata.message_type) client_socket.send(msg.SerializeToString()) # Wait for response message = client_socket.recv() # De-Serialize response ack = gmessages.Message() ack.ParseFromString(message) print("Received reply [%s]" % (ack.control.cmd))
def setUp(self): context = zmq.Context() worker_socket = context.socket(zmq.REP) worker_socket.bind("ipc:///tmp/worker") frontend_socket = context.socket(zmq.REP) frontend_socket.bind("ipc:///tmp/frontend") sockets = { "worker": { "socket": worker_socket, "receive": worker_socket.recv_json, "send": worker_socket.send_json }, "frontend": { "socket": frontend_socket, "receive": frontend_socket.recv_json, "send": worker_socket.send_json }, } time = TimeStub() self.poller = Poller(sockets, time)
def callback_func(out_dict, extra_prms=None): # send back the file name print "\n\n", out_dict['clean_fn'], "\n\n" # return URL on ZMQ channel if not ('zmq_impath_return_ch' in extra_prms): print "rr_text_query::callback_func: error, need zmq_impath_return_ch in extra_prms" else: try: context = zmq.Context() impath_sender = context.socket(zmq.REQ) impath_sender.connect(extra_prms['zmq_impath_return_ch']) impath_sender.send(str(out_dict['clean_fn'])) impath_sender.recv() finally: impath_sender.close() context.term()
def __init__(self, bind): self._heartbeat_count = config['heartbeat_count'] self._heartbeat_interval = config['heartbeat_interval'] self._control_service = config['control_service'] self._heartbeat_expiry = self._heartbeat_count * self._heartbeat_interval self._heartbeat_at = time() + 1e-3 * self._heartbeat_interval self._workers = {} self._waiting_workers = [] self._services = {} context = zmq.Context() self._sock = context.socket(zmq.ROUTER) self._sock.linger = 0 self._sock.bind(bind) log.info('Broker listening', bind=bind) self._poller = zmq.Poller() self._poller.register(self._sock, zmq.POLLIN)
def __init__(self, bnc): Greenlet.__init__(self) self.log = logging.getLogger("{module}.{name}".format( module=self.__class__.__module__, name=self.__class__.__name__)) self.myUuid = uuid.uuid4() self.myUuidStr = str(self.myUuid) self.sutLostCallback = None self.loopGevent = None self.bnc = bnc self.connected = False self.context = zmq.Context() self.poller = zmq.Poller() self.socket = self.context.socket(zmq.PAIR) self.poller.register(self.socket, zmq.POLLIN) self.socket.connect(bnc) self.connected = True self.asyncResults = {}
def init(): _read_config() #init db and indexes #chat_handles config.mongo_db.chat_handles.ensure_index('wallet_id', unique=True) config.mongo_db.chat_handles.ensure_index('handle', unique=True) #chat_history config.mongo_db.chat_history.ensure_index('when') config.mongo_db.chat_history.ensure_index([ ("handle", pymongo.ASCENDING), ("when", pymongo.DESCENDING), ]) #set up zeromq publisher for sending out received events to connected socket.io clients import zmq.green as zmq from socketio import server as socketio_server zmq_context = zmq.Context() global zmq_publisher_eventfeed zmq_publisher_eventfeed = zmq_context.socket(zmq.PUB) zmq_publisher_eventfeed.bind('inproc://queue_eventfeed') logger.info("Starting up socket.io server (block event feed)...") sio_server = socketio_server.SocketIOServer( (module_config['SOCKETIO_HOST'], module_config['SOCKETIO_PORT']), SocketIOMessagesFeedServer(zmq_context), resource="socket.io", policy_server=False) sio_server.start() #start the socket.io server greenlets logger.info("Starting up socket.io server (chat feed)...") sio_server = socketio_server.SocketIOServer( (module_config['SOCKETIO_CHAT_HOST'], module_config['SOCKETIO_CHAT_PORT']), SocketIOChatFeedServer(), resource="socket.io", policy_server=False) sio_server.start() #start the socket.io server greenlets
def monitor(): from . import current_app from .models import Provision context = zmq.Context() socket = context.socket(zmq.REP) socket.bind('tcp://*:5500') while True: task_json = socket.recv_json() socket.send_json({'status': 'ok'}) provision = Provision.objects.get(pk=task_json['id']) data = {} message_type = task_json['type'] if task_json['type'] == 'provision': data = { 'id': task_json['id'], 'status': provision.status, } elif task_json['type'] == 'log': data = { 'id': task_json['id'], 'status': task_json['status'], 'host': task_json['host'], 'task': task_json['task'], 'timestamp': task_json['timestamp'], } if task_json['log'] is not None: data['log'] = task_json['log'] else: continue current_app.socketio.emit( message_type, data, namespace='/onelove', room=str(provision.user.pk), ) socket.close() context.tern()
def main(): context = zmq.Context() socket = context.socket(zmq.PUB) socket.bind("inproc://test") one = Actor("one", "two", socket, context) two = Actor("two", "one", socket, context) one.start() two.start() sleep(1) socket.send("one 1") prev_c = two.counter prev_t = time() while True: now_c = two.counter now_t = time() print(now_c - prev_c) / (now_t - prev_t) prev_c = now_c prev_t = now_t sleep(1)
def set_up(): #set up zeromq publisher for sending out received events to connected socket.io clients import zmq.green as zmq from socketio import server as socketio_server zmq_context = zmq.Context() zmq_publisher_eventfeed = zmq_context.socket(zmq.PUB) zmq_publisher_eventfeed.bind('inproc://queue_eventfeed') #set event feed for shared access config.ZMQ_PUBLISHER_EVENTFEED = zmq_publisher_eventfeed logger.info("Starting up socket.io server (block event feed)...") sio_server = socketio_server.SocketIOServer( (config.SOCKETIO_HOST, config.SOCKETIO_PORT), SocketIOMessagesFeedServer(zmq_context), resource="socket.io", policy_server=False) sio_server.start() #start the socket.io server greenlets logger.info("Starting up socket.io server (chat feed)...") sio_server = socketio_server.SocketIOServer( (config.SOCKETIO_CHAT_HOST, config.SOCKETIO_CHAT_PORT), SocketIOChatFeedServer(config.mongo_db), resource="socket.io", policy_server=False) sio_server.start() #start the socket.io server greenlets
def start(self, start_dispatching=True): self.context = zmq.Context() for rfcc in self.rpc_fanout_clients_channels: rfcc.connect(self.context) for rpcc in self.rpc_server_channels.values(): rpcc.connect(self.context) for sc in self.sub_channels: sc.connect() for mwsc in self.mw_sub_channels: mwsc.connect(self.context) for pc in self.pub_channels: pc.connect() for mwpc in self.mw_pub_channels: mwpc.connect(self.context) if start_dispatching: self.start_dispatching()
def consume(events_port, event_queue): """ Consume events coming in and put them in the event queue. :param events_port: The port to listen on for events. :param event_queue: A ``Queue`` instance, for putting the events in. """ context = zmq.Context(1) subscriber = context.socket(zmq.SUB) subscriber.bind('tcp://*:{0}'.format(events_port)) subscriber.setsockopt(zmq.SUBSCRIBE, 'worker.event') while True: address, content = subscriber.recv_multipart() logger.debug('Received [{0}]: {1}'.format(address, content)) event_queue.put(content) subscriber.close() context.term()
def main(): """ The main flow of the application. """ context = zmq.Context() subscriber = context.socket(zmq.SUB) # Connect to the first publicly available relay. subscriber.connect('tcp://relay-us-central-1.eve-emdr.com:8050') # Disable filtering. subscriber.setsockopt(zmq.SUBSCRIBE, "") # We use a greenlet pool to cap the number of workers at a reasonable level. greenlet_pool = Pool(size=MAX_NUM_POOL_WORKERS) print("Consumer daemon started, waiting for jobs...") print("Worker pool size: %d" % greenlet_pool.size) while True: # Since subscriber.recv() blocks when no messages are available, # this loop stays under control. If something is available and the # greenlet pool has greenlets available for use, work gets done. greenlet_pool.spawn(worker, subscriber.recv())
def _fetch_from_forwarder(self): """ 从forwarder server那拿数据 :return: """ ctx = zmq.Context() self.forwarder_client = ctx.socket(zmq.SUB) for address in self.config['FORWARDER_OUTPUT_ADDRESS_LIST']: self.forwarder_client.connect(address) self.forwarder_client.setsockopt(zmq.SUBSCRIBE, self.node_id) while True: topic, msg = self.forwarder_client.recv_multipart() task = shine_pb2.Task() task.ParseFromString(msg) logger.debug('task:\n%s', task) # 这样就不会内存泄露了 job = gevent.spawn(self._handle_task, task) job.join()
def __init__(self, agent): self.log = logging.getLogger("{module}.{name}".format( module=self.__class__.__module__, name=self.__class__.__name__)) self.agent = agent self.controllerDL = None self.controllerUL = None self.uplinkSocketLock = threading.Lock() self.poller = zmq.Poller() self.context = zmq.Context() self.dl_socket = self.context.socket( zmq.SUB) # for downlink communication with controller if sys.version_info.major >= 3: self.dl_socket.setsockopt_string(zmq.SUBSCRIBE, self.agent.uuid) else: self.dl_socket.setsockopt(zmq.SUBSCRIBE, self.agent.uuid) self.dl_socket.setsockopt(zmq.LINGER, 100) self.ul_socket = self.context.socket( zmq.PUB) # for uplink communication with controller #register module socket in poller self.poller.register(self.dl_socket, zmq.POLLIN)
def receiveDatafeed(self, topics): '''Receive datafeed from the data publisher. Parameters ---------- topics : list of str topics that the receiver concerns. ''' ctx = zmq.Context() sock = ctx.socket(zmq.SUB) # add concerned topics for topic in topics: sock.setsockopt_string(zmq.SUBSCRIBE, topic) # let's connect to the publisher sock.connect(self.config['MQ_SUB_ADDR']) while True: # decode raw data raw = sock.recv().decode('utf-8') cols = ['secId', 'tradeDate', 'price'] instId, tradeDatetime, price = raw.split(',') data = [ self.secIds.get(instId, instId), dt.datetime.strptime(tradeDatetime, '%Y%m%d %H:%M:%S'), float(price) ] if len(data) == len(cols): # required data are all available df = pd.DataFrame(dict(zip(cols, data)), index=[0]) df.set_index('secId', inplace=True) self.notifyAll(df) else: logging.warning( 'Received data {rd:s} which is not enough to use.'.format( rd=raw))
def cmdGreenlet(self): # Nonblocking, e.g the osis server contains a broker which queus internally the messages. self.cmdcontext = zmq.Context() frontend = self.cmdcontext.socket(zmq.ROUTER) backend = self.cmdcontext.socket(zmq.DEALER) frontend.bind("tcp://*:%s" % self.port) backend.bind("inproc://cmdworkers") # Initialize poll set poller = zmq.Poller() poller.register(frontend, zmq.POLLIN) poller.register(backend, zmq.POLLIN) workers = [] for i in range(self.nrCmdGreenlets): workers.append(gevent.spawn(self.repCmdServer)) j.logger.log("init cmd channel on port:%s for daemon:%s" % (self.port, self.name), level=5, category="zdaemon.init") while True: socks = dict(poller.poll()) if socks.get(frontend) == zmq.POLLIN: parts = frontend.recv_multipart() parts.append(parts[0]) # add session id at end backend.send_multipart([parts[0]] + parts) if socks.get(backend) == zmq.POLLIN: parts = backend.recv_multipart() frontend.send_multipart( parts[1:] ) # @todo dont understand why I need to remove first part of parts?
def _recv_loop(ns, path, sock_type, *addr): _in = ns.V(f"{path}/in").value _name = ns.V(f"{path}/name").value _cb = ns.V(f"{path}/callbacks").value ctx = zmq.Context() socket = ctx.socket(sock_type) for a in addr: socket.connect(a) poller = zmq.Poller() poller.register(socket, zmq.POLLIN) while ns.V(f"{path}/continue").value: while True: gevent.time.sleep(0) s = dict(poller.poll(1.0)) if socket in s and s[socket] == zmq.POLLIN: data = socket.recv_multipart() for fun_name in _cb: gevent.time.sleep(0) data = _cb[fun_name](ns, path, data) _in.put(data) else: break gevent.time.sleep(0.5) ctx.term()
def main(): opt = process_command_line() print opt zmq_ctx = zmq.Context() resources = OrderedDict() resources['^/websocket'] = WSApplication resources['^/.*'] = app server = WebSocketServer(('0.0.0.0', opt.port), Resource(resources)) def shutdown(): print("\nShutting down...") server.stop() zmq_ctx.destroy() sys.exit() gevent.signal(signal.SIGINT, shutdown) gevent.signal(signal.SIGTERM, shutdown) server.serve_forever()