class Subscriber(object): def __init__(self, context, sub_address, sub_topics): self.context = context self.subscriber_address = sub_address self.subscriber_topics = sub_topics socket = self.context.socket(zmq.SUB) ioloop = IOLoop.instance() self.subscriber = ZMQStream(socket, ioloop) self.subscriber.setsockopt(zmq.LINGER, 0) self.subscriber.on_recv(callback=self.subscriber_recv) self.subscriber.setsockopt(zmq.SUBSCRIBE, "") self.subscriber.connect(self.subscriber_address) return def shutdown(self): self.subscriber.on_recv(callback=None) self.subscriber.socket.disconnect(self.subscriber_address) self.subscriber = None return def subscriber_recv(self, msg): return
class Broker (object): def __init__(self, pub_uri=defaults.broker_pub_uri, sub_uri=defaults.broker_sub_uri, patterns=None, ): self.pub_uri = pub_uri self.sub_uri = sub_uri if patterns: self.patterns = patterns else: self.patterns = [] self.setup_logging() self.setup_zmq() self.setup_sockets() self.setup_subscriptions() self.setup_events() def setup_logging(self): self.log = logging.getLogger('zmqevt.broker') def setup_zmq(self): self.context = zmq.Context() def setup_sockets(self): self.sub = ZMQStream(self.context.socket(zmq.SUB)) self.sub.bind(self.sub_uri) self.pub = ZMQStream(self.context.socket(zmq.PUB)) self.pub.bind(self.pub_uri) def setup_subscriptions(self): if self.patterns: for p in self.patterns: self.subscribe(p) def subscribe(self, pattern): self.log.debug('Subcribe to "%s".' % pattern) self.sub.setsockopt(zmq.SUBSCRIBE, pattern) def setup_events(self): self.sub.on_recv(self.publish) def publish(self, msg): assert len(msg) == 2, 'Received invalid message.' # This regenerates the event to ensure that we don't # pass on invalid data. try: evt = event.Event.load(msg) except Exception, detail: self.log.error('Error processing message: %s' % detail) return self.log.debug('Event: %s' % (str(evt.dump()))) self.pub.send_multipart(evt.dump())
class RouterPubSubProxy: ''' This is a proxy that has one front end socket, and two backend sockets. The front end socket is a router that passes the messages to backend Pub. Pub broadcasts them to all subscribers, which respond with results to backend Sub. All communications on this proxy are done through IPC. ''' def __init__(self, front, back_out, back_in, loop): ''' Initializes the instance of RouterPubSubProxy. @param front - channel name to be the routing stream @param back_out - channel name of the publishing stream @param back_in - channel name of result receiving stream @param loop - IOLoop ''' self._loop = loop ctx = zmq.Context.instance() # Create the front end stream front_address = ZmqAddress(chan_name=front) self._front_stream = ZMQStream(ctx.socket(zmq.ROUTER), io_loop=loop) self._front_stream.setsockopt(zmq.ROUTER_MANDATORY, 1) self._front_stream.bind(front_address.zmq_url()) # Create the back end streams back_out_address = ZmqAddress(chan_name=back_out) self._back_out_stream = ZMQStream(ctx.socket(zmq.PUB), io_loop=loop) self._back_out_stream.bind(back_out_address.zmq_url()) back_in_address = ZmqAddress(chan_name=back_in) self._back_in_stream = ZMQStream(ctx.socket(zmq.SUB), io_loop=loop) self._back_in_stream.setsockopt(zmq.SUBSCRIBE, b'') self._back_in_stream.bind(back_in_address.zmq_url()) def callback(from_name, to_name, zmq_stream, msgs): log.info("Routing from {0} to {1} messages {2}" .format(from_name, to_name, msgs)) zmq_stream.send_multipart(msgs) zmq_stream.flush() self._front_stream.on_recv(lambda msgs: callback(front, back_out, self._back_out_stream, msgs)) self._back_in_stream.on_recv(lambda msgs: callback(back_in, front, self._front_stream, msgs)) def start(self): ''' Start this proxy. ''' self._loop.start()
class LocalRequestProxy: ''' This class is responsible for routing client requests coming from a particular server to the RouterPubSubProxy, which will route them to the workers. ''' def __init__(self, front_end_name, back_end_name, loop): ''' Initializes an instance of LocalRequestProxy @param front_end_name - name of the front end socket. It will be initialized with the Router socket. @param back_end_name - name of the back end socket. It will be initialized with the Dealer socket. @param loop - zmq IOLoop ''' self._loop = loop ctx = zmq.Context.instance() # Create the front end stream front_address = ZmqAddress(chan_name=front_end_name, transport=INPROC) self._front_end = ZMQStream(ctx.socket(zmq.ROUTER), io_loop=loop) self._front_end.setsockopt(zmq.ROUTER_MANDATORY, 1) self._front_end.bind(front_address.zmq_url()) # Create the back end stream back_address = ZmqAddress(chan_name=back_end_name) self._back_end = ZMQStream(ctx.socket(zmq.DEALER), io_loop=loop) self._back_end.connect(back_address.zmq_url()) def callback(from_name, to_name, zmq_stream, msgs): log.debug("Routing from {0} to {1} messages {2}" .format(from_name, to_name, msgs)) zmq_stream.send_multipart(msgs) zmq_stream.flush() self._front_end.on_recv(lambda msgs: callback(front_end_name, back_end_name, self._back_end, msgs)) self._back_end.on_recv(lambda msgs: callback(back_end_name, front_end_name, self._front_end, msgs))
def stream(self, sock_type, sock_addr, sock_bind, callback=None, subscribe=''): assert self.ctx is not None sock_addr = sock_addr % { 'port': random.randint(1024,65535), } s = ZMQStream( self.ctx.socket(sock_type)) if sock_type == zmq.SUB: s.setsockopt(zmq.SUBSCRIBE, subscribe) if sock_bind: s.bind(sock_addr) else: s.connect(sock_addr) if callback: s.on_recv(callback) return (s, sock_addr)
def launch_scheduler(in_addr, out_addr, mon_addr, not_addr, config=None, logname='ZMQ', log_addr=None, loglevel=logging.DEBUG, scheme='lru', identity=b'task'): from zmq.eventloop import ioloop from zmq.eventloop.zmqstream import ZMQStream if config: # unwrap dict back into Config config = Config(config) ctx = zmq.Context() loop = ioloop.IOLoop() ins = ZMQStream(ctx.socket(zmq.XREP), loop) ins.setsockopt(zmq.IDENTITY, identity) ins.bind(in_addr) outs = ZMQStream(ctx.socket(zmq.XREP), loop) outs.setsockopt(zmq.IDENTITY, identity) outs.bind(out_addr) mons = ZMQStream(ctx.socket(zmq.PUB), loop) mons.connect(mon_addr) nots = ZMQStream(ctx.socket(zmq.SUB), loop) nots.setsockopt(zmq.SUBSCRIBE, '') nots.connect(not_addr) scheme = globals().get(scheme, None) # setup logging if log_addr: connect_logger(logname, ctx, log_addr, root="scheduler", loglevel=loglevel) else: local_logger(logname, loglevel) scheduler = TaskScheduler(client_stream=ins, engine_stream=outs, mon_stream=mons, notifier_stream=nots, scheme=scheme, loop=loop, logname=logname, config=config) scheduler.start() try: loop.start() except KeyboardInterrupt: print("interrupted, exiting...", file=sys.__stderr__)
class ZMQCameraPubSub(object): def __init__(self, callback): self.callback = callback self.name = "".join( random.choice(string.ascii_lowercase + string.digits) for x in range(6)) def connect(self, stream): self.context = zmq.Context() self.subscriber = self.context.socket(zmq.SUB) # self.subscriber.setsockopt(zmq.RCVHWM, 1) # self.subscriber.setsockopt(zmq.RCVBUF, 1*1024) self.subscriber.setsockopt(zmq.LINGER, 0) self.subscriber.connect(stream) self.subscriber = ZMQStream(self.subscriber) self.subscriber.on_recv(self.callback, copy=False) # self.request.linger = 0 self.subscriber.setsockopt(zmq.SUBSCRIBE, b"") self.subscriber.setsockopt(zmq.SUBSCRIBE, self.name.encode('ascii')) def close(self): if self.subscriber: self.subscriber.stop_on_recv() self.subscriber.close() self.subscriber = None def subscribe(self, to, topic=''): subscribeto = to if len(topic) > 0: subscribeto = f"{subscribeto}.{topic}" subscribeto = subscribeto.encode('ascii') self.subscriber.setsockopt(zmq.SUBSCRIBE, subscribeto) def unsubscribe(self, to, topic=''): subscribetopic = to if len(topic) > 0: subscribetopic = f"{subscribetopic}.{topic}" subscribetopic = subscribetopic.encode('ascii') self.subscriber.setsockopt(zmq.UNSUBSCRIBE, subscribetopic)
def launch_scheduler(in_addr, out_addr, mon_addr, not_addr, config=None,logname='ZMQ', log_addr=None, loglevel=logging.DEBUG, scheme='lru', identity=b'task'): from zmq.eventloop import ioloop from zmq.eventloop.zmqstream import ZMQStream if config: # unwrap dict back into Config config = Config(config) ctx = zmq.Context() loop = ioloop.IOLoop() ins = ZMQStream(ctx.socket(zmq.XREP),loop) ins.setsockopt(zmq.IDENTITY, identity) ins.bind(in_addr) outs = ZMQStream(ctx.socket(zmq.XREP),loop) outs.setsockopt(zmq.IDENTITY, identity) outs.bind(out_addr) mons = ZMQStream(ctx.socket(zmq.PUB),loop) mons.connect(mon_addr) nots = ZMQStream(ctx.socket(zmq.SUB),loop) nots.setsockopt(zmq.SUBSCRIBE, '') nots.connect(not_addr) scheme = globals().get(scheme, None) # setup logging if log_addr: connect_logger(logname, ctx, log_addr, root="scheduler", loglevel=loglevel) else: local_logger(logname, loglevel) scheduler = TaskScheduler(client_stream=ins, engine_stream=outs, mon_stream=mons, notifier_stream=nots, scheme=scheme, loop=loop, logname=logname, config=config) scheduler.start() try: loop.start() except KeyboardInterrupt: print ("interrupted, exiting...", file=sys.__stderr__)
class ZmqSubscriber(object): def __init__(self, moduleName, centralHost=SUBSCRIBER_OPT_DEFAULTS['centralHost'], context=None, centralPublishEndpoint=SUBSCRIBER_OPT_DEFAULTS[ 'centralPublishEndpoint'], replay=None): self.moduleName = moduleName self.centralHost = centralHost if context is None: context = zmq.Context.instance() self.context = context self.centralPublishEndpoint = parseEndpoint( centralPublishEndpoint, defaultPort=DEFAULT_CENTRAL_PUBLISH_PORT, centralHost=self.centralHost) self.replayPaths = replay if self.replayPaths is None: self.replayPaths = [] self.handlers = {} self.counter = 0 self.deserializer = serializers.get_deserializer('json') self.stream = None @classmethod def addOptions(cls, parser, defaultModuleName): if not parser.has_option('--centralHost'): parser.add_option('--centralHost', default=SUBSCRIBER_OPT_DEFAULTS['centralHost'], help='Host where central runs [%default]') if not parser.has_option('--moduleName'): parser.add_option('--moduleName', default=defaultModuleName, help='Name to use for this module [%default]') if not parser.has_option('--centralPublishEndpoint'): parser.add_option( '--centralPublishEndpoint', default=SUBSCRIBER_OPT_DEFAULTS['centralPublishEndpoint'], help='Endpoint where central publishes messages [%default]') if not parser.has_option('--replay'): parser.add_option( '--replay', action='append', help= 'Replay specified message log (can specify multiple times), or use - to read from stdin' ) @classmethod def getOptionValues(cls, opts): result = {} for key in SUBSCRIBER_OPT_DEFAULTS.iterkeys(): val = getattr(opts, key, None) if val is not None: result[key] = val return result def start(self): sock = self.context.socket(zmq.SUB) self.stream = ZMQStream(sock) # causes problems with multiple instances #self.stream.setsockopt(zmq.IDENTITY, self.moduleName) self.stream.connect(self.centralPublishEndpoint) logging.info('zmq.subscriber: connected to central at %s', self.centralPublishEndpoint) self.stream.on_recv(self.routeMessages) def routeMessages(self, messages): for msg in messages: self.routeMessage(msg) def routeMessage(self, msg): colonIndex = msg.find(':') topic = msg[:(colonIndex + 1)] body = msg[(colonIndex + 1):] handled = 0 for topicPrefix, registry in self.handlers.iteritems(): if topic.startswith(topicPrefix): for handler in registry.itervalues(): handler(topic[:-1], body) handled = 1 return handled def subscribeRaw(self, topicPrefix, handler): topicRegistry = self.handlers.setdefault(topicPrefix, {}) if not topicRegistry: logging.info('zmq.subscriber: subscribe %s', topicPrefix) self.stream.setsockopt(zmq.SUBSCRIBE, topicPrefix) handlerId = (topicPrefix, self.counter) topicRegistry[self.counter] = handler self.counter += 1 return handlerId def subscribeJson(self, topicPrefix, handler): def jsonHandler(topicPrefix, body): return handler(topicPrefix, convertToDotDictRecurse(json.loads(body))) return self.subscribeRaw(topicPrefix, jsonHandler) def subscribeDjango(self, topicPrefix, handler): def djangoHandler(topicPrefix, body): obj = json.loads(body) dataText = json.dumps([obj['data']]) modelInstance = list(self.deserializer(dataText))[0] return handler(topicPrefix, modelInstance.object) return self.subscribeRaw(topicPrefix, djangoHandler) def unsubscribe(self, handlerId): topicPrefix, index = handlerId topicRegistry = self.handlers[topicPrefix] del topicRegistry[index] if not topicRegistry: logging.info('zmq.subscriber: unsubscribe %s', topicPrefix) self.stream.setsockopt(zmq.UNSUBSCRIBE, topicPrefix) def connect(self, endpoint): self.stream.connect(endpoint) def replay(self): numReplayed = 0 numHandled = 0 for replayPath in self.replayPaths: print '=== replaying messages from %s' % replayPath if replayPath == '-': replayFile = sys.stdin else: replayFile = open(replayPath, 'rb') stream = LogParser(replayFile) for rec in stream: numReplayed += 1 numHandled += self.routeMessage(rec.msg) if numReplayed % 10000 == 0: print 'replayed %d messages, %d handled' % (numReplayed, numHandled)
class ZmqSubscriber(object): def __init__(self, moduleName, centralHost=SUBSCRIBER_OPT_DEFAULTS['centralHost'], context=None, centralPublishEndpoint=SUBSCRIBER_OPT_DEFAULTS['centralPublishEndpoint'], replay=None): self.moduleName = moduleName self.centralHost = centralHost if context is None: context = zmq.Context.instance() self.context = context self.centralPublishEndpoint = parseEndpoint(centralPublishEndpoint, defaultPort=DEFAULT_CENTRAL_PUBLISH_PORT, centralHost=self.centralHost) self.replayPaths = replay if self.replayPaths is None: self.replayPaths = [] self.handlers = {} self.counter = 0 self.deserializer = serializers.get_deserializer('json') self.stream = None @classmethod def addOptions(cls, parser, defaultModuleName): if not parser.has_option('--centralHost'): parser.add_option('--centralHost', default=SUBSCRIBER_OPT_DEFAULTS['centralHost'], help='Host where central runs [%default]') if not parser.has_option('--moduleName'): parser.add_option('--moduleName', default=defaultModuleName, help='Name to use for this module [%default]') if not parser.has_option('--centralPublishEndpoint'): parser.add_option('--centralPublishEndpoint', default=SUBSCRIBER_OPT_DEFAULTS['centralPublishEndpoint'], help='Endpoint where central publishes messages [%default]') if not parser.has_option('--replay'): parser.add_option('--replay', action='append', help='Replay specified message log (can specify multiple times), or use - to read from stdin') @classmethod def getOptionValues(cls, opts): result = {} for key in SUBSCRIBER_OPT_DEFAULTS.iterkeys(): val = getattr(opts, key, None) if val is not None: result[key] = val return result def start(self): sock = self.context.socket(zmq.SUB) self.stream = ZMQStream(sock) # causes problems with multiple instances #self.stream.setsockopt(zmq.IDENTITY, self.moduleName) self.stream.connect(self.centralPublishEndpoint) logging.info('zmq.subscriber: connected to central at %s', self.centralPublishEndpoint) self.stream.on_recv(self.routeMessages) def routeMessages(self, messages): for msg in messages: self.routeMessage(msg) def routeMessage(self, msg): colonIndex = msg.find(':') topic = msg[:(colonIndex + 1)] body = msg[(colonIndex + 1):] handled = 0 for topicPrefix, registry in self.handlers.iteritems(): if topic.startswith(topicPrefix): for handler in registry.itervalues(): handler(topic[:-1], body) handled = 1 return handled def subscribeRaw(self, topicPrefix, handler): topicRegistry = self.handlers.setdefault(topicPrefix, {}) if not topicRegistry: logging.info('zmq.subscriber: subscribe %s', topicPrefix) self.stream.setsockopt(zmq.SUBSCRIBE, topicPrefix) handlerId = (topicPrefix, self.counter) topicRegistry[self.counter] = handler self.counter += 1 return handlerId def subscribeJson(self, topicPrefix, handler): def jsonHandler(topicPrefix, body): return handler(topicPrefix, convertToDotDictRecurse(json.loads(body))) return self.subscribeRaw(topicPrefix, jsonHandler) def subscribeDjango(self, topicPrefix, handler): def djangoHandler(topicPrefix, body): obj = json.loads(body) dataText = json.dumps([obj['data']]) modelInstance = list(self.deserializer(dataText))[0] return handler(topicPrefix, modelInstance.object) return self.subscribeRaw(topicPrefix, djangoHandler) def unsubscribe(self, handlerId): topicPrefix, index = handlerId topicRegistry = self.handlers[topicPrefix] del topicRegistry[index] if not topicRegistry: logging.info('zmq.subscriber: unsubscribe %s', topicPrefix) self.stream.setsockopt(zmq.UNSUBSCRIBE, topicPrefix) def connect(self, endpoint): self.stream.connect(endpoint) def replay(self): numReplayed = 0 numHandled = 0 for replayPath in self.replayPaths: print '=== replaying messages from %s' % replayPath if replayPath == '-': replayFile = sys.stdin else: replayFile = open(replayPath, 'rb') stream = LogParser(replayFile) for rec in stream: numReplayed += 1 numHandled += self.routeMessage(rec.msg) if numReplayed % 10000 == 0: print 'replayed %d messages, %d handled' % (numReplayed, numHandled)
class ZmqCentral(object): def __init__(self, opts): self.opts = opts self.info = {} def announceConnect(self, moduleName, params): logging.info('module %s connected', moduleName) self.injectStream.send('central.connect.%s:%s' % (moduleName, json.dumps(params))) def announceDisconnect(self, moduleName): logging.info('module %s disconnected', moduleName) self.injectStream.send( 'central.disconnect.%s:%s' % (moduleName, json.dumps({'timestamp': str(getTimestamp())}))) def logMessage(self, msg, posixTime=None, attachmentDir='-'): mlog = self.messageLog mlog.write('@@@ %d %d %s ' % (getTimestamp(posixTime), len(msg), attachmentDir)) mlog.write(msg) mlog.write('\n') def logMessageWithAttachments0(self, msg): parsed = parseMessage(msg) posixTime = time.time() # construct attachment directory dt = datetime.datetime.utcfromtimestamp(posixTime) dateText = dt.strftime('%Y-%m-%d') timeText = dt.strftime('%H-%M-%S') + '.%06d' % dt.microsecond uniq = '%08x' % random.getrandbits(32) attachmentSuffix = os.path.join('attachments', dateText, timeText, parsed['topic'], uniq) attachmentPath = os.path.join(self.logDir, attachmentSuffix) os.makedirs(attachmentPath) # write attachments to attachment directory for attachment in parsed['attachments']: fullName = os.path.join(attachmentPath, attachment.get_filename()) open(fullName, 'wb').write(attachment.get_payload()) # log message with a pointer to the attachment directory self.logMessage(':'.join((parsed['topic'], parsed['json'])), posixTime, attachmentSuffix) def logMessageWithAttachments(self, msg): try: return self.logMessageWithAttachments0(msg) except: # pylint: disable=W0702 self.logException('logging message with attachments') def handleHeartbeat(self, params): moduleName = params['module'].encode('utf-8') now = getTimestamp() oldInfo = self.info.get(moduleName, None) if oldInfo: if oldInfo.get('pub', None) != params.get('pub', None): self.announceDisconnect(moduleName) self.announceConnect(moduleName, params) else: self.announceConnect(moduleName, params) self.info[moduleName] = params keepalive = params.get('keepalive', DEFAULT_KEEPALIVE_US) params['timeout'] = now + keepalive return 'ok' def handleInfo(self): return self.info def logException(self, whileClause): errClass, errObject, errTB = sys.exc_info()[:3] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) logging.warning(''.join(traceback.format_tb(errTB))) logging.warning(errText) logging.warning('[error while %s at time %s]', whileClause, getTimestamp()) def handleMessages(self, messages): for msg in messages: if hasAttachments(msg): self.logMessageWithAttachments(msg) else: self.logMessage(msg) if msg.startswith('central.heartbeat.'): try: _topic, body = msg.split(':', 1) self.handleHeartbeat(json.loads(body)) except: # pylint: disable=W0702 self.logException('handling heartbeat') def handleRpcCall(self, messages): for msg in messages: try: call = json.loads(msg) callId = call['id'] except: # pylint: disable=W0702 self.rpcStream.send( json.dumps({ 'result': None, 'error': 'malformed request' })) try: method = call['method'] _params = call['params'] if method == 'info': result = self.handleInfo() else: raise ValueError('unknown method %s' % method) self.rpcStream.send( json.dumps({ 'result': result, 'error': None, 'id': callId })) except: # pylint: disable=W0702 self.logException('handling rpc message') errClass, errObject = sys.exc_info()[:2] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) self.rpcStream.send( json.dumps({ 'result': None, 'error': errText, 'id': callId })) def handleDisconnectTimer(self): now = getTimestamp() disconnectModules = [] for moduleName, entry in self.info.iteritems(): timeout = entry.get('timeout', None) if timeout is not None and now > timeout: disconnectModules.append(moduleName) for moduleName in disconnectModules: self.announceDisconnect(moduleName) del self.info[moduleName] def readyLog(self, pathTemplate, timestamp): if '%s' in pathTemplate: timeText = timestamp.strftime('%Y-%m-%d-%H-%M-%S') logFile = pathTemplate % timeText else: logFile = pathTemplate if not os.path.exists(self.logDir): os.makedirs(self.logDir) logPath = os.path.join(self.logDir, logFile) if '%s' in pathTemplate: latestPath = os.path.join(self.logDir, pathTemplate % 'latest') if os.path.islink(latestPath): os.unlink(latestPath) os.symlink(logFile, latestPath) return logPath def start(self): # open log files now = datetime.datetime.utcnow() self.logDir = os.path.abspath(self.opts.logDir) self.messageLogPath = self.readyLog(self.opts.messageLog, now) self.messageLog = open(self.messageLogPath, 'a') self.consoleLogPath = self.readyLog(self.opts.consoleLog, now) rootLogger = logging.getLogger() rootLogger.setLevel(logging.DEBUG) fmt = logging.Formatter('%(asctime)s - %(levelname)-7s - %(message)s') fmt.converter = time.gmtime fh = logging.FileHandler(self.consoleLogPath) fh.setFormatter(fmt) fh.setLevel(logging.DEBUG) rootLogger.addHandler(fh) if self.opts.foreground: ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(fmt) rootLogger.addHandler(ch) # daemonize if self.opts.foreground: logging.info('staying in foreground') else: logging.info('daemonizing') pid = os.fork() if pid != 0: os._exit(0) os.setsid() pid = os.fork() if pid != 0: os._exit(0) os.chdir('/') os.close(1) os.close(2) nullFd = os.open('/dev/null', os.O_RDWR) os.dup2(nullFd, 1) os.dup2(nullFd, 2) try: # set up zmq self.context = zmq.Context.instance() self.rpcStream = ZMQStream(self.context.socket(zmq.REP)) self.rpcStream.bind(self.opts.rpcEndpoint) self.rpcStream.on_recv(self.handleRpcCall) self.forwarder = ThreadDevice(zmq.FORWARDER, zmq.SUB, zmq.PUB) self.forwarder.setsockopt_in(zmq.IDENTITY, THIS_MODULE) self.forwarder.setsockopt_out(zmq.IDENTITY, THIS_MODULE) self.forwarder.setsockopt_in(zmq.SUBSCRIBE, '') self.forwarder.setsockopt_out(zmq.HWM, self.opts.highWaterMark) self.forwarder.bind_in(self.opts.subscribeEndpoint) self.forwarder.bind_in(INJECT_ENDPOINT) self.forwarder.bind_out(self.opts.publishEndpoint) self.forwarder.bind_out(MONITOR_ENDPOINT) for entry in self.opts.subscribeTo: try: moduleName, endpoint = entry.split('@') endpoint = parseEndpoint(endpoint) except ValueError: raise ValueError( '--subscribeTo argument "%s" is not in the format "<moduleName>@<endpoint>"' % entry) self.forwarder.connect_in(endpoint) self.info[moduleName] = {'module': moduleName, 'pub': endpoint} self.forwarder.start() time.sleep(0.1) # wait for forwarder to bind sockets self.monStream = ZMQStream(self.context.socket(zmq.SUB)) self.monStream.setsockopt(zmq.SUBSCRIBE, '') self.monStream.connect(MONITOR_ENDPOINT) self.monStream.on_recv(self.handleMessages) self.injectStream = ZMQStream(self.context.socket(zmq.PUB)) self.injectStream.connect(INJECT_ENDPOINT) self.disconnectTimer = ioloop.PeriodicCallback( self.handleDisconnectTimer, 5000) self.disconnectTimer.start() except: # pylint: disable=W0702 errClass, errObject, errTB = sys.exc_info()[:3] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) logging.error(''.join(traceback.format_tb(errTB))) logging.error(errText) logging.error('[error during startup -- exiting]') sys.exit(1) def shutdown(self): self.messageLog.flush()
class Subscriber (object): def __init__(self, sub_uri=defaults.subscriber_sub_uri, patterns=None, callbacks=None, ): self.sub_uri = sub_uri if patterns: self.patterns = patterns else: self.patterns = [] if callbacks: self.callbacks = callbacks else: self.callbacks = [] self.setup_logging() self.setup_zmq() self.setup_sockets() self.setup_subscriptions() self.setup_events() def setup_logging(self): self.log = logging.getLogger('zmqevt.subscriber') def setup_zmq(self): self.context = zmq.Context() def setup_sockets(self): self.sub = ZMQStream(self.context.socket(zmq.SUB)) self.sub.connect(self.sub_uri) def setup_subscriptions(self): if self.patterns: for p in self.patterns: self.subscribe(p) def subscribe(self, pattern): self.log.debug('Subcribe to "%s".' % pattern) self.sub.setsockopt(zmq.SUBSCRIBE, pattern) def setup_events(self): self.sub.on_recv(self.on_recv) def register_callback(self, func, data=None): self.callbacks.append((func,data)) def unregister_callback(self, func): self.callbacks = [x for x in self.callbacks if x[0] is not func] def on_recv(self, msg): self.log.debug('Receive: %s' % (str(msg))) assert len(msg) == 2, 'Received invalid message.' # This regenerates the event to ensure that we don't # pass on invalid data. try: evt = event.Event.load(msg) except Exception, detail: self.log.error('Error processing message: %s' % detail) return self.log.debug('Event: %s' % (str(evt.dump()))) for func, data in self.callbacks: func(evt, data=data)
class ZmqSubscriber(object): def __init__(self, moduleName, context=None, centralPublishEndpoint=SUBSCRIBER_OPT_DEFAULTS['centralPublishEndpoint']): self.moduleName = moduleName if context is None: context = zmq.Context.instance() self.context = context self.centralPublishEndpoint = parseEndpoint(centralPublishEndpoint, defaultPort=DEFAULT_CENTRAL_PUBLISH_PORT) self.handlers = {} self.counter = 0 self.deserializer = serializers.get_deserializer('json') @classmethod def addOptions(cls, parser, defaultModuleName): if not parser.has_option('--moduleName'): parser.add_option('--moduleName', default=defaultModuleName, help='Name to use for this module [%default]') if not parser.has_option('--centralPublishEndpoint'): parser.add_option('--centralPublishEndpoint', default=SUBSCRIBER_OPT_DEFAULTS['centralPublishEndpoint'], help='Endpoint where central publishes messages [%default]') @classmethod def getOptionValues(cls, opts): result = {} for key in SUBSCRIBER_OPT_DEFAULTS.iterkeys(): val = getattr(opts, key, None) if val is not None: result[key] = val return result def start(self): sock = self.context.socket(zmq.SUB) self.stream = ZMQStream(sock) self.stream.setsockopt(zmq.IDENTITY, self.moduleName) self.stream.connect(self.centralPublishEndpoint) self.stream.on_recv(self.routeMessage) def routeMessage(self, messages): for msg in messages: colonIndex = msg.find(':') topic = msg[:(colonIndex + 1)] body = msg[(colonIndex + 1):] for topicPrefix, registry in self.handlers.iteritems(): if topic.startswith(topicPrefix): topicRegistry = registry break for handler in topicRegistry.itervalues(): handler(topic[:-1], body) def subscribeRaw(self, topic, handler): topicRegistry = self.handlers.setdefault(topic, {}) if not topicRegistry: self.stream.setsockopt(zmq.SUBSCRIBE, topic) handlerId = (topic, self.counter) topicRegistry[self.counter] = handler self.counter += 1 return handlerId def subscribeJson(self, topic, handler): def jsonHandler(topic, body): return handler(topic, json.loads(body)) return self.subscribeRaw(topic, jsonHandler) def subscribeDjango(self, topic, handler): def djangoHandler(topic, body): obj = json.loads(body) dataText = json.dumps([obj['data']]) modelInstance = self.deserializer(dataText)[0] return handler(topic, modelInstance) return self.subscribeRaw(topic, djangoHandler) def unsubscribe(self, handlerId): topic, index = handlerId topicRegistry = self.handlers[topic] del topicRegistry[index] if not topicRegistry: self.stream.setsockopt(zmq.UNSUBSCRIBE, topic) def connect(self, endpoint): self.stream.connect(endpoint)
def init_hub(self): """construct""" client_iface = "%s://%s:" % (self.client_transport, self.client_ip) + "%i" engine_iface = "%s://%s:" % (self.engine_transport, self.engine_ip) + "%i" ctx = self.context loop = self.loop # Registrar socket q = ZMQStream(ctx.socket(zmq.ROUTER), loop) q.bind(client_iface % self.regport) self.log.info("Hub listening on %s for registration.", client_iface % self.regport) if self.client_ip != self.engine_ip: q.bind(engine_iface % self.regport) self.log.info("Hub listening on %s for registration.", engine_iface % self.regport) ### Engine connections ### # heartbeat hpub = ctx.socket(zmq.PUB) hpub.bind(engine_iface % self.hb[0]) hrep = ctx.socket(zmq.ROUTER) hrep.bind(engine_iface % self.hb[1]) self.heartmonitor = HeartMonitor(loop=loop, config=self.config, log=self.log, pingstream=ZMQStream(hpub,loop), pongstream=ZMQStream(hrep,loop) ) ### Client connections ### # Notifier socket n = ZMQStream(ctx.socket(zmq.PUB), loop) n.bind(client_iface%self.notifier_port) ### build and launch the queues ### # monitor socket sub = ctx.socket(zmq.SUB) sub.setsockopt(zmq.SUBSCRIBE, b"") sub.bind(self.monitor_url) sub.bind('inproc://monitor') sub = ZMQStream(sub, loop) # connect the db self.log.info('Hub using DB backend: %r'%(self.db_class.split()[-1])) # cdir = self.config.Global.cluster_dir self.db = import_item(str(self.db_class))(session=self.session.session, config=self.config, log=self.log) time.sleep(.25) try: scheme = self.config.TaskScheduler.scheme_name except AttributeError: from .scheduler import TaskScheduler scheme = TaskScheduler.scheme_name.get_default_value() # build connection dicts self.engine_info = { 'control' : engine_iface%self.control[1], 'mux': engine_iface%self.mux[1], 'heartbeat': (engine_iface%self.hb[0], engine_iface%self.hb[1]), 'task' : engine_iface%self.task[1], 'iopub' : engine_iface%self.iopub[1], # 'monitor' : engine_iface%self.mon_port, } self.client_info = { 'control' : client_iface%self.control[0], 'mux': client_iface%self.mux[0], 'task' : (scheme, client_iface%self.task[0]), 'iopub' : client_iface%self.iopub[0], 'notification': client_iface%self.notifier_port } self.log.debug("Hub engine addrs: %s", self.engine_info) self.log.debug("Hub client addrs: %s", self.client_info) # resubmit stream r = ZMQStream(ctx.socket(zmq.DEALER), loop) url = util.disambiguate_url(self.client_info['task'][-1]) r.setsockopt(zmq.IDENTITY, self.session.bsession) r.connect(url) self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor, query=q, notifier=n, resubmit=r, db=self.db, engine_info=self.engine_info, client_info=self.client_info, log=self.log)
class ZMQNodePubSub(object): def __init__(self, node, request_callback, subscribe_callback): self.callback = request_callback self.subscribe_callback = subscribe_callback self.node = node def connect(self): # print("Node PUbsub connect", flush=True) self.context = zmq.Context() self.socket = self.context.socket(zmq.DEALER) # self.socket.identity = b'tada123' # print(f'ConnecT to Router {self.node.router_address}') self.socket.connect(self.node.router_address) self.stream = ZMQStream(self.socket) self.stream.setsockopt(zmq.LINGER, 0) self.stream.on_recv(self.callback) self.subscriber = self.context.socket(zmq.SUB) self.subscriber.connect(self.node.publisher_address) self.subscriber = ZMQStream(self.subscriber) self.subscriber.setsockopt(zmq.LINGER, 0) # self.subscriber.setsockopt( ZMQ:IMMEDIATE) self.subscriber.on_recv(self.subscribe_callback) def subscribe(self, to, topic=''): subscribeto = to if len(topic) > 0: subscribeto = f"{subscribeto}.{topic}" subscribeto = self.node.identity + b'.' + subscribeto.encode('ascii') self.subscriber.setsockopt(zmq.SUBSCRIBE, subscribeto) def unsubscribe(self, to, topic=''): subscribetopic = to if len(topic) > 0: subscribetopic = f"{subscribetopic}.{topic}" subscribetopic = subscribetopic.encode('ascii') self.subscriber.setsockopt(zmq.UNSUBSCRIBE, subscribetopic) def close(self): if self.subscriber: self.subscriber.stop_on_recv() self.subscriber.close() self.subscriber = None if self.stream: self.stream.stop_on_recv() self.stream.close() self.stream = None def make_request(self, target, action, msg=None): kind, *cmds = action.split(".") method = action if len(cmds) > 0: method = cmds[0] wrapped = {"data": msg} # send_to_socket = [self.node.identity, method.encode('ascii'), json.dumps(wrapped).encode('ascii')] # print(f'Sending {send_to_socket}') # self.stream.send_multipart(send_to_socket) # self.socket.send_multipart([method.encode('ascii'), json.dumps(wrapped).encode('ascii'), target.encode('ascii')]) return self.node.run_action(method, wrapped, target)
class ZmqPublisher(object): def __init__(self, moduleName, context=None, centralSubscribeEndpoint=PUBLISHER_OPT_DEFAULTS['centralSubscribeEndpoint'], publishEndpoint=PUBLISHER_OPT_DEFAULTS['publishEndpoint'], heartbeatPeriodMsecs=PUBLISHER_OPT_DEFAULTS['heartbeatPeriodMsecs'], highWaterMark=PUBLISHER_OPT_DEFAULTS['highWaterMark']): self.moduleName = moduleName if context is None: context = zmq.Context.instance() self.context = context self.centralSubscribeEndpoint = parseEndpoint(centralSubscribeEndpoint, defaultPort=DEFAULT_CENTRAL_SUBSCRIBE_PORT) self.publishEndpoint = parseEndpoint(publishEndpoint, defaultPort='random') self.heartbeatPeriodMsecs = heartbeatPeriodMsecs self.highWaterMark = highWaterMark self.pubStream = None self.heartbeatTimer = None self.serializer = serializers.get_serializer('json')() @classmethod def addOptions(cls, parser, defaultModuleName): if not parser.has_option('--moduleName'): parser.add_option('--moduleName', default=defaultModuleName, help='Name to use for this module [%default]') if not parser.has_option('--centralSubscribeEndpoint'): parser.add_option('--centralSubcribeEndpoint', default=PUBLISHER_OPT_DEFAULTS['centralSubscribeEndpoint'], help='Endpoint where central listens for messages [%default]') if not parser.has_option('--publishEndpoint'): parser.add_option('--publishEndpoint', default=PUBLISHER_OPT_DEFAULTS['publishEndpoint'], help='Endpoint to publish messages on [%default]') if not parser.has_option('--heartbeatPeriodMsecs'): parser.add_option('--heartbeatPeriodMsecs', default=PUBLISHER_OPT_DEFAULTS['heartbeatPeriodMsecs'], type='int', help='Period for sending heartbeats to central [%default]') if not parser.has_option('--highWaterMark'): parser.add_option('--highWaterMark', default=PUBLISHER_OPT_DEFAULTS['highWaterMark'], type='int', help='High-water mark for publish socket (see 0MQ docs) [%default]') @classmethod def getOptionValues(cls, opts): result = {} for key in PUBLISHER_OPT_DEFAULTS.iterkeys(): val = getattr(opts, key, None) if val is not None: result[key] = val return result def heartbeat(self): logging.debug('ZmqPublisher: heartbeat') self.sendJson('central.heartbeat.%s' % self.moduleName, {'host': getShortHostName(), 'pub': self.publishEndpoint}) def sendRaw(self, topic, body): self.pubStream.send('%s:%s' % (topic, body)) def sendJson(self, topic, obj): if isinstance(obj, dict): obj.setdefault('module', self.moduleName) obj.setdefault('timestamp', str(getTimestamp())) self.sendRaw(topic, json.dumps(obj)) def sendDjango(self, modelInstance, topic=None, topicSuffix=None): dataText = self.serializer.serialize([modelInstance]) data = json.loads(dataText)[0] if topic is None: topic = data['model'].encode('utf-8') if topicSuffix is not None: topic += topicSuffix self.sendJson(topic, {'data': data}) def start(self): pubSocket = self.context.socket(zmq.PUB) self.pubStream = ZMQStream(pubSocket) # self.pubStream.setsockopt(zmq.IDENTITY, self.moduleName) self.pubStream.setsockopt(zmq.HWM, self.highWaterMark) self.pubStream.connect(self.centralSubscribeEndpoint) if self.publishEndpoint.endswith(':random'): endpointWithoutPort = re.sub(r':random$', '', self.publishEndpoint) port = self.pubStream.bind_to_random_port(endpointWithoutPort) self.publishEndpoint = '%s:%d' % (endpointWithoutPort, port) else: self.pubStream.bind(self.publishEndpoint) self.heartbeatTimer = ioloop.PeriodicCallback(self.heartbeat, self.heartbeatPeriodMsecs) self.heartbeatTimer.start() self.heartbeat()
class ZmqPublisher(object): def __init__(self, moduleName, context=None, centralSubscribeEndpoint=PUBLISHER_OPT_DEFAULTS[ 'centralSubscribeEndpoint'], publishEndpoint=PUBLISHER_OPT_DEFAULTS['publishEndpoint'], heartbeatPeriodMsecs=PUBLISHER_OPT_DEFAULTS[ 'heartbeatPeriodMsecs'], highWaterMark=PUBLISHER_OPT_DEFAULTS['highWaterMark']): self.moduleName = moduleName if context is None: context = zmq.Context.instance() self.context = context self.centralSubscribeEndpoint = parseEndpoint( centralSubscribeEndpoint, defaultPort=DEFAULT_CENTRAL_SUBSCRIBE_PORT) self.publishEndpoint = parseEndpoint(publishEndpoint, defaultPort='random') self.heartbeatPeriodMsecs = heartbeatPeriodMsecs self.highWaterMark = highWaterMark self.pubStream = None self.heartbeatTimer = None self.serializer = serializers.get_serializer('json')() @classmethod def addOptions(cls, parser, defaultModuleName): if not parser.has_option('--moduleName'): parser.add_option('--moduleName', default=defaultModuleName, help='Name to use for this module [%default]') if not parser.has_option('--centralSubscribeEndpoint'): parser.add_option( '--centralSubcribeEndpoint', default=PUBLISHER_OPT_DEFAULTS['centralSubscribeEndpoint'], help='Endpoint where central listens for messages [%default]') if not parser.has_option('--publishEndpoint'): parser.add_option( '--publishEndpoint', default=PUBLISHER_OPT_DEFAULTS['publishEndpoint'], help='Endpoint to publish messages on [%default]') if not parser.has_option('--heartbeatPeriodMsecs'): parser.add_option( '--heartbeatPeriodMsecs', default=PUBLISHER_OPT_DEFAULTS['heartbeatPeriodMsecs'], type='int', help='Period for sending heartbeats to central [%default]') if not parser.has_option('--highWaterMark'): parser.add_option( '--highWaterMark', default=PUBLISHER_OPT_DEFAULTS['highWaterMark'], type='int', help= 'High-water mark for publish socket (see 0MQ docs) [%default]') @classmethod def getOptionValues(cls, opts): result = {} for key in PUBLISHER_OPT_DEFAULTS.iterkeys(): val = getattr(opts, key, None) if val is not None: result[key] = val return result def heartbeat(self): logging.debug('ZmqPublisher: heartbeat') self.sendJson('central.heartbeat.%s' % self.moduleName, { 'host': getShortHostName(), 'pub': self.publishEndpoint }) def sendRaw(self, topic, body): self.pubStream.send('%s:%s' % (topic, body)) def sendJson(self, topic, obj): if isinstance(obj, dict): obj.setdefault('module', self.moduleName) obj.setdefault('timestamp', str(getTimestamp())) self.sendRaw(topic, json.dumps(obj)) def sendDjango(self, modelInstance, topic=None, topicSuffix=None): dataText = self.serializer.serialize([modelInstance]) data = json.loads(dataText)[0] if topic is None: topic = data['model'].encode('utf-8') if topicSuffix is not None: topic += topicSuffix self.sendJson(topic, {'data': data}) def start(self): pubSocket = self.context.socket(zmq.PUB) self.pubStream = ZMQStream(pubSocket) # self.pubStream.setsockopt(zmq.IDENTITY, self.moduleName) self.pubStream.setsockopt(zmq.HWM, self.highWaterMark) self.pubStream.connect(self.centralSubscribeEndpoint) if self.publishEndpoint.endswith(':random'): endpointWithoutPort = re.sub(r':random$', '', self.publishEndpoint) port = self.pubStream.bind_to_random_port(endpointWithoutPort) self.publishEndpoint = '%s:%d' % (endpointWithoutPort, port) else: self.pubStream.bind(self.publishEndpoint) self.heartbeatTimer = ioloop.PeriodicCallback( self.heartbeat, self.heartbeatPeriodMsecs) self.heartbeatTimer.start() self.heartbeat()
class BaseService(App): __actions__ = ["get_state", "log_stuff"] def __init__(self, model, **kwargs): super().__init__() self.connector = None self.model = model # if not (self.config.logging and self.config.logging.directory): model_changed = False logging_config = self.model.configuration.get("logging", {}) if not logging_config.get("directory"): logging_config["directory"] = "/".join( [self.model.directory, 'logs']) self.model.configuration["logging"] = logging_config model_changed = True # self.config.update(self.model.configuration) logsettings = self.model.settings.get("logging") if not logsettings: default_log_settings = { "stdout": { "on": False, "level": "WARNING" }, "file": { "on": True, "level": "INFO", "maxBytes": 16_000_000, "backupCount": 5 } } self.model.settings["logging"] = default_log_settings model_changed = True if model_changed: self.model.save() self.load_config(self.model.configuration) self.config._add_change_listener( functools.partial(self.config_changed, 'config')) # self.add_hook("config", functools.partial(self.settings_changed, "config")) self.name = model.name self.encoded_name = self.name.encode('ascii') self.identity = f"service{self.model.id}".encode('ascii') self.ctx = zmq.Context.instance() self.data_handlers = [] self.task_queue = Queue() self.current_task = {} self.settings = self.config._make_overlay() # self.settings = ConfigDict()._make_overlay() # def state_changed(event, oldstate, key, newval): # print(f"INSIDE STATE CHANGED HOOK EVENT: {event}, {oldstate}, {key}, {newval}", flush=True) # st._add_change_listener(functools.partial(state_changed, 'state')) self.events = [] self.pusher = self.ctx.socket(zmq.PUSH) self.pusher.connect(f"ipc://collector.ipc") publisher_address = kwargs.get("publisher_address") if publisher_address: event_stream = self.ctx.socket(zmq.SUB) event_stream.connect(publisher_address) self.event_stream = ZMQStream(event_stream) self.event_stream.linger = 0 self.event_stream.on_recv(self.on_message) self.settings._add_change_listener( functools.partial(self.settings_changed, 'settings')) self.settings.load_dict(model.settings) self.event_handlers = ConfigDict()._make_overlay() self.event_handlers._add_change_listener( functools.partial(self.events_changed, 'event')) self.event_handlers.update(model.event_listeners) self.state = ConfigDict()._make_overlay() self.state._add_change_listener( functools.partial(self.state_changed, 'state')) self.setup_routes() self.event_class = EventService def cleanup(self): print("cleanup service", flush=True) self.pusher.close() self.event_stream.close() # self.data_stream.close() def __del__(self): self.cleanup() def update_model(self, service_model): self.model = service_model self.name = service_model.name self.encoded_name = self.name.encode('ascii') if self.connector: self.connector.update_model() def load_config(self, dic): self.config.load_dict(dic) def events_changed(self, event, oldval, key, newval): # print(f"INSIDE event_changed CHANGED HOOK EVENT: {event}, {oldval}, {key}, {newval}", flush=True) if not newval: self.event_stream.setsockopt(zmq.UNSUBSCRIBE, key.encode('ascii')) else: # print(f"SUBSCRIBING TO event {key}", flush=True) self.event_stream.setsockopt(zmq.SUBSCRIBE, key.encode('ascii')) def config_changed(self, event, oldval, key, newval): # print(f"INSIDE config CHANGED HOOK EVENT: {event}, {oldval}, {key}, {newval}", flush=True) pass def settings_changed(self, event, oldval, key, newval): pass # print(f"INSIDE settings CHANGED HOOK EVENT: {event}, {oldval}, {key}, {newval}", flush=True) # self.event_stream.setsockopt(zmq.SUBSCRIBE, b"test") # self.event_stream.setsockopt(zmq.SUBSCRIBE, b"test") # if key == "event_handlers": # oldkeys = [] # if "event_handlers" in oldval: # oldkeys = oldval["event_handlers"].keys() # newkeys = newval.keys() # for t in oldkeys - newkeys: # self.event_stream.setsockopt(zmq.UNSUBSCRIBE, t.encode('ascii')) # for t in newkeys - oldkeys: # self.event_stream.setsockopt(zmq.SUBSCRIBE, t.encode('ascii')) # if key in oldval: # for item in newval: # self.fire_event(self.event_class.state.changed, {f"{key}": newval}) def setup_routes(self): self.route('/state', 'GET', self.get_state) # self.route('/actions', 'GET', self.list_actions) # self.route('/options', ['GET', 'POST', 'PATCH', 'DELETE'], self.test) def state_changed(self, event, oldval, key, newval): # print(f"INSIDE STATE CHANGED HOOK EVENT: {event}, {oldval}, {key}, {newval}", flush=True) self.fire_event(self.event_class.state.changed, {f"{key}": newval}) def get_state(self, *args): return self.state def log_stuff(self, epack, *args): # print(f"INSIDE LOG STUFF {epack.to_json()}", flush=True) pass def list_actions(self, *args): actions = [] for base in self.__class__.__bases__: if hasattr(base, 'list_actions'): res = base.list_actions(base) if res: actions = actions + res actions.extend(x for x in self.__actions__ if x not in actions) return actions def register_data_handlers(self, obj): self.data_handlers.append(obj) # def register_event_handler(self, evtname, action): # if action in self.list_actions(): # self._event_hand # self.event_stream.setsockopt(zmq.SUBSCRIBE, evtname) # def unsubscribe(self, to, topic=''): # subscribetopic = to # if len(topic) > 0: # subscribetopic = f"{subscribetopic}.{topic}" # subscribetopic = subscribetopic.encode('ascii') # # if type(topic) != bytes: # # topic = topic.encode('ascii') # print("subtopic= ", subscribetopic) # # self.request.on_recv(callback) # self.subscriber.setsockopt(zmq.UNSUBSCRIBE, subscribetopic) # self.event_stream def on_message(self, msg): # print("ON MESSGE", msg) if not msg or len(msg) < 3: return topic, ident, pstring, *other = msg topic = topic.decode('utf-8') ident = ident.decode('utf-8') data = pstring.decode('utf-8') try: data = json.loads(data) except Exception as e: print(f"Received Invalid JSON Message - {str(e)}") epack = EventPack(topic, ident, data) el = self.event_handlers or {} for ekey in self.event_handlers.keys(): if topic.startswith(ekey): for action_item in el.get(ekey) or []: action = action_item.get("action") if hasattr(self, action): method = getattr(self, action) # if method: # method(epack) if method: res = b'' try: if len(self.plugins): for plugin in self.plugins: method = plugin.apply(method, self) res = method({"data": data}) except AncillaResponse as ar: res = ar except Exception as e: print(f"Handle Event Error {str(e)}") continue # res = AncillaError(404, {"error": str(e)}) # else: # # newres = b'{"error": "No Method"}' # res = AncillaError(404, {"error": "No Method"}) # self.zmq_router.send_multipart([replyto, seq, err.encode()]) # return if yields(res): future = asyncio.run_coroutine_threadsafe( res, asyncio.get_running_loop()) # zmqrouter = self.zmq_router def onfinish(fut): # res = b'' try: newres = fut.result(1) except Exception as a: # res = ar.encode() print(f'Event Handle Error {str(e)}') future.add_done_callback(onfinish) def on_data(self, data): # print("ON DATA", data) for d in self.data_handlers: data = d.handle(data) self.pusher.send_multipart(data) def stop(self): pass def start(self): pass async def _process_tasks(self): # print("About to get queue", flush=True) async for dtask in self.task_queue: # print('consuming {}...'.format(item)) self.current_task[dtask.name] = dtask res = await dtask.run(self) rj = json.dumps(res, cls=ServiceJsonEncoder).encode('ascii') self.pusher.send_multipart( [self.identity + b'.task', b'finished', rj]) # self.pusher.publish() del self.current_task[dtask.name] print(f"PROCESSED TASK = {res}", flush=True) async def _add_task(self, msg): await self.task_queue.put(msg) def fire_event(self, evtname, payload): # print(f"fire event {evtname}", flush=True) if isinstance(evtname, Event): evtname = evtname.value() evtname = evtname.encode('utf-8') # payload["device"] = self.name pstring = json.dumps(payload, cls=ServiceJsonEncoder) # print(f"JSON DUMP = {pstring}", flush=True) pstring = pstring.encode('utf-8') self.pusher.send_multipart( [b'events.' + evtname, self.identity, pstring])
def init_hub(self): """construct""" client_iface = "%s://%s:" % (self.client_transport, self.client_ip) + "%i" engine_iface = "%s://%s:" % (self.engine_transport, self.engine_ip) + "%i" ctx = self.context loop = self.loop # Registrar socket q = ZMQStream(ctx.socket(zmq.ROUTER), loop) q.bind(client_iface % self.regport) self.log.info("Hub listening on %s for registration.", client_iface % self.regport) if self.client_ip != self.engine_ip: q.bind(engine_iface % self.regport) self.log.info("Hub listening on %s for registration.", engine_iface % self.regport) ### Engine connections ### # heartbeat hpub = ctx.socket(zmq.PUB) hpub.bind(engine_iface % self.hb[0]) hrep = ctx.socket(zmq.ROUTER) hrep.bind(engine_iface % self.hb[1]) self.heartmonitor = HeartMonitor(loop=loop, config=self.config, log=self.log, pingstream=ZMQStream(hpub,loop), pongstream=ZMQStream(hrep,loop) ) ### Client connections ### # Notifier socket n = ZMQStream(ctx.socket(zmq.PUB), loop) n.bind(client_iface%self.notifier_port) ### build and launch the queues ### # monitor socket sub = ctx.socket(zmq.SUB) sub.setsockopt(zmq.SUBSCRIBE, b"") sub.bind(self.monitor_url) sub.bind('inproc://monitor') sub = ZMQStream(sub, loop) # connect the db self.log.info('Hub using DB backend: %r'%(self.db_class.split()[-1])) # cdir = self.config.Global.cluster_dir self.db = import_item(str(self.db_class))(session=self.session.session, config=self.config, log=self.log) time.sleep(.25) try: scheme = self.config.TaskScheduler.scheme_name except AttributeError: from .scheduler import TaskScheduler scheme = TaskScheduler.scheme_name.get_default_value() # build connection dicts self.engine_info = { 'control' : engine_iface%self.control[1], 'mux': engine_iface%self.mux[1], 'heartbeat': (engine_iface%self.hb[0], engine_iface%self.hb[1]), 'task' : engine_iface%self.task[1], 'iopub' : engine_iface%self.iopub[1], # 'monitor' : engine_iface%self.mon_port, } self.client_info = { 'control' : client_iface%self.control[0], 'mux': client_iface%self.mux[0], 'task' : (scheme, client_iface%self.task[0]), 'iopub' : client_iface%self.iopub[0], 'notification': client_iface%self.notifier_port } self.log.debug("Hub engine addrs: %s", self.engine_info) self.log.debug("Hub client addrs: %s", self.client_info) # resubmit stream r = ZMQStream(ctx.socket(zmq.DEALER), loop) url = util.disambiguate_url(self.client_info['task'][-1]) r.setsockopt(zmq.IDENTITY, self.session.bsession) r.connect(url) self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor, query=q, notifier=n, resubmit=r, db=self.db, engine_info=self.engine_info, client_info=self.client_info, log=self.log)
class ZmqCentral(object): def __init__(self, opts): self.opts = opts self.info = {} def announceConnect(self, moduleName, params): logging.info('module %s connected', moduleName) self.injectStream.send('central.connect.%s:%s' % (moduleName, json.dumps(params))) def announceDisconnect(self, moduleName): logging.info('module %s disconnected', moduleName) self.injectStream.send('central.disconnect.%s:%s' % (moduleName, json.dumps({'timestamp': str(getTimestamp())}))) def logMessage(self, msg, posixTime=None, attachmentDir='-'): mlog = self.messageLog mlog.write('@@@ %d %d %s ' % (getTimestamp(posixTime), len(msg), attachmentDir)) mlog.write(msg) mlog.write('\n') def logMessageWithAttachments0(self, msg): parsed = parseMessage(msg) posixTime = time.time() # construct attachment directory dt = datetime.datetime.utcfromtimestamp(posixTime) dateText = dt.strftime('%Y-%m-%d') timeText = dt.strftime('%H-%M-%S') + '.%06d' % dt.microsecond uniq = '%08x' % random.getrandbits(32) attachmentSuffix = os.path.join('attachments', dateText, timeText, parsed['topic'], uniq) attachmentPath = os.path.join(self.logDir, attachmentSuffix) os.makedirs(attachmentPath) # write attachments to attachment directory for attachment in parsed['attachments']: fullName = os.path.join(attachmentPath, attachment.get_filename()) open(fullName, 'wb').write(attachment.get_payload()) # log message with a pointer to the attachment directory self.logMessage(':'.join((parsed['topic'], parsed['json'])), posixTime, attachmentSuffix) def logMessageWithAttachments(self, msg): try: return self.logMessageWithAttachments0(msg) except: # pylint: disable=W0702 self.logException('logging message with attachments') def handleHeartbeat(self, params): moduleName = params['module'].encode('utf-8') now = getTimestamp() oldInfo = self.info.get(moduleName, None) if oldInfo: if oldInfo.get('pub', None) != params.get('pub', None): self.announceDisconnect(moduleName) self.announceConnect(moduleName, params) else: self.announceConnect(moduleName, params) self.info[moduleName] = params keepalive = params.get('keepalive', DEFAULT_KEEPALIVE_US) params['timeout'] = now + keepalive return 'ok' def handleInfo(self): return self.info def logException(self, whileClause): errClass, errObject, errTB = sys.exc_info()[:3] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) logging.warning(''.join(traceback.format_tb(errTB))) logging.warning(errText) logging.warning('[error while %s at time %s]', whileClause, getTimestamp()) def handleMessages(self, messages): for msg in messages: if hasAttachments(msg): self.logMessageWithAttachments(msg) else: self.logMessage(msg) if msg.startswith('central.heartbeat.'): try: _topic, body = msg.split(':', 1) self.handleHeartbeat(json.loads(body)) except: # pylint: disable=W0702 self.logException('handling heartbeat') def handleRpcCall(self, messages): for msg in messages: try: call = json.loads(msg) callId = call['id'] except: # pylint: disable=W0702 self.rpcStream.send(json.dumps({'result': None, 'error': 'malformed request'})) try: method = call['method'] _params = call['params'] if method == 'info': result = self.handleInfo() else: raise ValueError('unknown method %s' % method) self.rpcStream.send(json.dumps({'result': result, 'error': None, 'id': callId})) except: # pylint: disable=W0702 self.logException('handling rpc message') errClass, errObject = sys.exc_info()[:2] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) self.rpcStream.send(json.dumps({'result': None, 'error': errText, 'id': callId})) def handleDisconnectTimer(self): now = getTimestamp() disconnectModules = [] for moduleName, entry in self.info.iteritems(): timeout = entry.get('timeout', None) if timeout is not None and now > timeout: disconnectModules.append(moduleName) for moduleName in disconnectModules: self.announceDisconnect(moduleName) del self.info[moduleName] def readyLog(self, pathTemplate, timestamp): if '%s' in pathTemplate: timeText = timestamp.strftime('%Y-%m-%d-%H-%M-%S') logFile = pathTemplate % timeText else: logFile = pathTemplate if not os.path.exists(self.logDir): os.makedirs(self.logDir) logPath = os.path.join(self.logDir, logFile) if '%s' in pathTemplate: latestPath = os.path.join(self.logDir, pathTemplate % 'latest') if os.path.islink(latestPath): os.unlink(latestPath) os.symlink(logFile, latestPath) return logPath def start(self): # open log files now = datetime.datetime.utcnow() self.logDir = os.path.abspath(self.opts.logDir) self.messageLogPath = self.readyLog(self.opts.messageLog, now) self.messageLog = open(self.messageLogPath, 'a') self.consoleLogPath = self.readyLog(self.opts.consoleLog, now) rootLogger = logging.getLogger() rootLogger.setLevel(logging.DEBUG) fmt = logging.Formatter('%(asctime)s - %(levelname)-7s - %(message)s') fmt.converter = time.gmtime fh = logging.FileHandler(self.consoleLogPath) fh.setFormatter(fmt) fh.setLevel(logging.DEBUG) rootLogger.addHandler(fh) if self.opts.foreground: ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(fmt) rootLogger.addHandler(ch) # daemonize if self.opts.foreground: logging.info('staying in foreground') else: logging.info('daemonizing') pid = os.fork() if pid != 0: os._exit(0) os.setsid() pid = os.fork() if pid != 0: os._exit(0) os.chdir('/') os.close(1) os.close(2) nullFd = os.open('/dev/null', os.O_RDWR) os.dup2(nullFd, 1) os.dup2(nullFd, 2) try: # set up zmq self.context = zmq.Context.instance() self.rpcStream = ZMQStream(self.context.socket(zmq.REP)) self.rpcStream.bind(self.opts.rpcEndpoint) self.rpcStream.on_recv(self.handleRpcCall) self.forwarder = ThreadDevice(zmq.FORWARDER, zmq.SUB, zmq.PUB) self.forwarder.setsockopt_in(zmq.IDENTITY, THIS_MODULE) self.forwarder.setsockopt_out(zmq.IDENTITY, THIS_MODULE) self.forwarder.setsockopt_in(zmq.SUBSCRIBE, '') self.forwarder.setsockopt_out(zmq.HWM, self.opts.highWaterMark) self.forwarder.bind_in(self.opts.subscribeEndpoint) self.forwarder.bind_in(INJECT_ENDPOINT) self.forwarder.bind_out(self.opts.publishEndpoint) self.forwarder.bind_out(MONITOR_ENDPOINT) for entry in self.opts.subscribeTo: try: moduleName, endpoint = entry.split('@') endpoint = parseEndpoint(endpoint) except ValueError: raise ValueError('--subscribeTo argument "%s" is not in the format "<moduleName>@<endpoint>"' % entry) self.forwarder.connect_in(endpoint) self.info[moduleName] = {'module': moduleName, 'pub': endpoint} self.forwarder.start() time.sleep(0.1) # wait for forwarder to bind sockets self.monStream = ZMQStream(self.context.socket(zmq.SUB)) self.monStream.setsockopt(zmq.SUBSCRIBE, '') self.monStream.connect(MONITOR_ENDPOINT) self.monStream.on_recv(self.handleMessages) self.injectStream = ZMQStream(self.context.socket(zmq.PUB)) self.injectStream.connect(INJECT_ENDPOINT) self.disconnectTimer = ioloop.PeriodicCallback(self.handleDisconnectTimer, 5000) self.disconnectTimer.start() except: # pylint: disable=W0702 errClass, errObject, errTB = sys.exc_info()[:3] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) logging.error(''.join(traceback.format_tb(errTB))) logging.error(errText) logging.error('[error during startup -- exiting]') sys.exit(1) def shutdown(self): self.messageLog.flush()