class Publisher(object): def __init__(self, context, pub_endpoint): self.context = context self.pub_endpoint = pub_endpoint socket = self.context.socket(zmq.PUB) ioloop = IOLoop.instance() self.publisher = ZMQStream(socket, ioloop) self.publisher.socket.setsockopt(zmq.LINGER, 0) self.publisher.bind(self.pub_endpoint) return def shutdown(self): self.publisher.socket.unbind(self.pub_endpoint) self.publisher.socket.close() self.publisher.close() self.publisher = None return def send(self, msg): logger.debug("Publisher sending: {0}".format(msg)) self.publisher.send_multipart(msg) return
class DeviceServicePublisher(object): def __init__(self, context, publisher_endpoint): self.context = context self.publisher_endpoint = publisher_endpoint socket = self.context.socket(zmq.PUB) ioloop = IOLoop.instance() self.publisher = ZMQStream(socket, ioloop) self.publisher.socket.setsockopt(zmq.LINGER, 0) self.publisher.bind(self.publisher_endpoint) return def shutdown(self): self.publisher.socket.close() self.publisher.close() self.publisher = None return def send(self, msg, topic='all'): pub_msg = [] pub_msg.append(topic) pub_msg.append(msg) self.publisher.send_multipart(pub_msg) return
class Broker (object): def __init__(self, pub_uri=defaults.broker_pub_uri, sub_uri=defaults.broker_sub_uri, patterns=None, ): self.pub_uri = pub_uri self.sub_uri = sub_uri if patterns: self.patterns = patterns else: self.patterns = [] self.setup_logging() self.setup_zmq() self.setup_sockets() self.setup_subscriptions() self.setup_events() def setup_logging(self): self.log = logging.getLogger('zmqevt.broker') def setup_zmq(self): self.context = zmq.Context() def setup_sockets(self): self.sub = ZMQStream(self.context.socket(zmq.SUB)) self.sub.bind(self.sub_uri) self.pub = ZMQStream(self.context.socket(zmq.PUB)) self.pub.bind(self.pub_uri) def setup_subscriptions(self): if self.patterns: for p in self.patterns: self.subscribe(p) def subscribe(self, pattern): self.log.debug('Subcribe to "%s".' % pattern) self.sub.setsockopt(zmq.SUBSCRIBE, pattern) def setup_events(self): self.sub.on_recv(self.publish) def publish(self, msg): assert len(msg) == 2, 'Received invalid message.' # This regenerates the event to ensure that we don't # pass on invalid data. try: evt = event.Event.load(msg) except Exception, detail: self.log.error('Error processing message: %s' % detail) return self.log.debug('Event: %s' % (str(evt.dump()))) self.pub.send_multipart(evt.dump())
def launch_scheduler(in_addr, out_addr, mon_addr, not_addr, config=None, logname='ZMQ', log_addr=None, loglevel=logging.DEBUG, scheme='lru', identity=b'task'): from zmq.eventloop import ioloop from zmq.eventloop.zmqstream import ZMQStream if config: # unwrap dict back into Config config = Config(config) ctx = zmq.Context() loop = ioloop.IOLoop() ins = ZMQStream(ctx.socket(zmq.XREP), loop) ins.setsockopt(zmq.IDENTITY, identity) ins.bind(in_addr) outs = ZMQStream(ctx.socket(zmq.XREP), loop) outs.setsockopt(zmq.IDENTITY, identity) outs.bind(out_addr) mons = ZMQStream(ctx.socket(zmq.PUB), loop) mons.connect(mon_addr) nots = ZMQStream(ctx.socket(zmq.SUB), loop) nots.setsockopt(zmq.SUBSCRIBE, '') nots.connect(not_addr) scheme = globals().get(scheme, None) # setup logging if log_addr: connect_logger(logname, ctx, log_addr, root="scheduler", loglevel=loglevel) else: local_logger(logname, loglevel) scheduler = TaskScheduler(client_stream=ins, engine_stream=outs, mon_stream=mons, notifier_stream=nots, scheme=scheme, loop=loop, logname=logname, config=config) scheduler.start() try: loop.start() except KeyboardInterrupt: print("interrupted, exiting...", file=sys.__stderr__)
def launch_scheduler(in_addr, out_addr, mon_addr, not_addr, config=None,logname='ZMQ', log_addr=None, loglevel=logging.DEBUG, scheme='lru', identity=b'task'): from zmq.eventloop import ioloop from zmq.eventloop.zmqstream import ZMQStream if config: # unwrap dict back into Config config = Config(config) ctx = zmq.Context() loop = ioloop.IOLoop() ins = ZMQStream(ctx.socket(zmq.XREP),loop) ins.setsockopt(zmq.IDENTITY, identity) ins.bind(in_addr) outs = ZMQStream(ctx.socket(zmq.XREP),loop) outs.setsockopt(zmq.IDENTITY, identity) outs.bind(out_addr) mons = ZMQStream(ctx.socket(zmq.PUB),loop) mons.connect(mon_addr) nots = ZMQStream(ctx.socket(zmq.SUB),loop) nots.setsockopt(zmq.SUBSCRIBE, '') nots.connect(not_addr) scheme = globals().get(scheme, None) # setup logging if log_addr: connect_logger(logname, ctx, log_addr, root="scheduler", loglevel=loglevel) else: local_logger(logname, loglevel) scheduler = TaskScheduler(client_stream=ins, engine_stream=outs, mon_stream=mons, notifier_stream=nots, scheme=scheme, loop=loop, logname=logname, config=config) scheduler.start() try: loop.start() except KeyboardInterrupt: print ("interrupted, exiting...", file=sys.__stderr__)
class LocalRequestProxy: ''' This class is responsible for routing client requests coming from a particular server to the RouterPubSubProxy, which will route them to the workers. ''' def __init__(self, front_end_name, back_end_name, loop): ''' Initializes an instance of LocalRequestProxy @param front_end_name - name of the front end socket. It will be initialized with the Router socket. @param back_end_name - name of the back end socket. It will be initialized with the Dealer socket. @param loop - zmq IOLoop ''' self._loop = loop ctx = zmq.Context.instance() # Create the front end stream front_address = ZmqAddress(chan_name=front_end_name, transport=INPROC) self._front_end = ZMQStream(ctx.socket(zmq.ROUTER), io_loop=loop) self._front_end.setsockopt(zmq.ROUTER_MANDATORY, 1) self._front_end.bind(front_address.zmq_url()) # Create the back end stream back_address = ZmqAddress(chan_name=back_end_name) self._back_end = ZMQStream(ctx.socket(zmq.DEALER), io_loop=loop) self._back_end.connect(back_address.zmq_url()) def callback(from_name, to_name, zmq_stream, msgs): log.debug("Routing from {0} to {1} messages {2}" .format(from_name, to_name, msgs)) zmq_stream.send_multipart(msgs) zmq_stream.flush() self._front_end.on_recv(lambda msgs: callback(front_end_name, back_end_name, self._back_end, msgs)) self._back_end.on_recv(lambda msgs: callback(back_end_name, front_end_name, self._front_end, msgs))
class RouterPubSubProxy: ''' This is a proxy that has one front end socket, and two backend sockets. The front end socket is a router that passes the messages to backend Pub. Pub broadcasts them to all subscribers, which respond with results to backend Sub. All communications on this proxy are done through IPC. ''' def __init__(self, front, back_out, back_in, loop): ''' Initializes the instance of RouterPubSubProxy. @param front - channel name to be the routing stream @param back_out - channel name of the publishing stream @param back_in - channel name of result receiving stream @param loop - IOLoop ''' self._loop = loop ctx = zmq.Context.instance() # Create the front end stream front_address = ZmqAddress(chan_name=front) self._front_stream = ZMQStream(ctx.socket(zmq.ROUTER), io_loop=loop) self._front_stream.setsockopt(zmq.ROUTER_MANDATORY, 1) self._front_stream.bind(front_address.zmq_url()) # Create the back end streams back_out_address = ZmqAddress(chan_name=back_out) self._back_out_stream = ZMQStream(ctx.socket(zmq.PUB), io_loop=loop) self._back_out_stream.bind(back_out_address.zmq_url()) back_in_address = ZmqAddress(chan_name=back_in) self._back_in_stream = ZMQStream(ctx.socket(zmq.SUB), io_loop=loop) self._back_in_stream.setsockopt(zmq.SUBSCRIBE, b'') self._back_in_stream.bind(back_in_address.zmq_url()) def callback(from_name, to_name, zmq_stream, msgs): log.info("Routing from {0} to {1} messages {2}" .format(from_name, to_name, msgs)) zmq_stream.send_multipart(msgs) zmq_stream.flush() self._front_stream.on_recv(lambda msgs: callback(front, back_out, self._back_out_stream, msgs)) self._back_in_stream.on_recv(lambda msgs: callback(back_in, front, self._front_stream, msgs)) def start(self): ''' Start this proxy. ''' self._loop.start()
def stream(self, sock_type, sock_addr, sock_bind, callback=None, subscribe=''): assert self.ctx is not None sock_addr = sock_addr % { 'port': random.randint(1024,65535), } s = ZMQStream( self.ctx.socket(sock_type)) if sock_type == zmq.SUB: s.setsockopt(zmq.SUBSCRIBE, subscribe) if sock_bind: s.bind(sock_addr) else: s.connect(sock_addr) if callback: s.on_recv(callback) return (s, sock_addr)
class ZmqCentral(object): def __init__(self, opts): self.opts = opts self.info = {} def announceConnect(self, moduleName, params): logging.info('module %s connected', moduleName) self.injectStream.send('central.connect.%s:%s' % (moduleName, json.dumps(params))) def announceDisconnect(self, moduleName): logging.info('module %s disconnected', moduleName) self.injectStream.send('central.disconnect.%s:%s' % (moduleName, json.dumps({'timestamp': str(getTimestamp())}))) def logMessage(self, msg, posixTime=None, attachmentDir='-'): mlog = self.messageLog mlog.write('@@@ %d %d %s ' % (getTimestamp(posixTime), len(msg), attachmentDir)) mlog.write(msg) mlog.write('\n') def logMessageWithAttachments0(self, msg): parsed = parseMessage(msg) posixTime = time.time() # construct attachment directory dt = datetime.datetime.utcfromtimestamp(posixTime) dateText = dt.strftime('%Y-%m-%d') timeText = dt.strftime('%H-%M-%S') + '.%06d' % dt.microsecond uniq = '%08x' % random.getrandbits(32) attachmentSuffix = os.path.join('attachments', dateText, timeText, parsed['topic'], uniq) attachmentPath = os.path.join(self.logDir, attachmentSuffix) os.makedirs(attachmentPath) # write attachments to attachment directory for attachment in parsed['attachments']: fullName = os.path.join(attachmentPath, attachment.get_filename()) open(fullName, 'wb').write(attachment.get_payload()) # log message with a pointer to the attachment directory self.logMessage(':'.join((parsed['topic'], parsed['json'])), posixTime, attachmentSuffix) def logMessageWithAttachments(self, msg): try: return self.logMessageWithAttachments0(msg) except: # pylint: disable=W0702 self.logException('logging message with attachments') def handleHeartbeat(self, params): moduleName = params['module'].encode('utf-8') now = getTimestamp() oldInfo = self.info.get(moduleName, None) if oldInfo: if oldInfo.get('pub', None) != params.get('pub', None): self.announceDisconnect(moduleName) self.announceConnect(moduleName, params) else: self.announceConnect(moduleName, params) self.info[moduleName] = params keepalive = params.get('keepalive', DEFAULT_KEEPALIVE_US) params['timeout'] = now + keepalive return 'ok' def handleInfo(self): return self.info def logException(self, whileClause): errClass, errObject, errTB = sys.exc_info()[:3] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) logging.warning(''.join(traceback.format_tb(errTB))) logging.warning(errText) logging.warning('[error while %s at time %s]', whileClause, getTimestamp()) def handleMessages(self, messages): for msg in messages: if hasAttachments(msg): self.logMessageWithAttachments(msg) else: self.logMessage(msg) if msg.startswith('central.heartbeat.'): try: _topic, body = msg.split(':', 1) self.handleHeartbeat(json.loads(body)) except: # pylint: disable=W0702 self.logException('handling heartbeat') def handleRpcCall(self, messages): for msg in messages: try: call = json.loads(msg) callId = call['id'] except: # pylint: disable=W0702 self.rpcStream.send(json.dumps({'result': None, 'error': 'malformed request'})) try: method = call['method'] _params = call['params'] if method == 'info': result = self.handleInfo() else: raise ValueError('unknown method %s' % method) self.rpcStream.send(json.dumps({'result': result, 'error': None, 'id': callId})) except: # pylint: disable=W0702 self.logException('handling rpc message') errClass, errObject = sys.exc_info()[:2] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) self.rpcStream.send(json.dumps({'result': None, 'error': errText, 'id': callId})) def handleDisconnectTimer(self): now = getTimestamp() disconnectModules = [] for moduleName, entry in self.info.iteritems(): timeout = entry.get('timeout', None) if timeout is not None and now > timeout: disconnectModules.append(moduleName) for moduleName in disconnectModules: self.announceDisconnect(moduleName) del self.info[moduleName] def readyLog(self, pathTemplate, timestamp): if '%s' in pathTemplate: timeText = timestamp.strftime('%Y-%m-%d-%H-%M-%S') logFile = pathTemplate % timeText else: logFile = pathTemplate if not os.path.exists(self.logDir): os.makedirs(self.logDir) logPath = os.path.join(self.logDir, logFile) if '%s' in pathTemplate: latestPath = os.path.join(self.logDir, pathTemplate % 'latest') if os.path.islink(latestPath): os.unlink(latestPath) os.symlink(logFile, latestPath) return logPath def start(self): # open log files now = datetime.datetime.utcnow() self.logDir = os.path.abspath(self.opts.logDir) self.messageLogPath = self.readyLog(self.opts.messageLog, now) self.messageLog = open(self.messageLogPath, 'a') self.consoleLogPath = self.readyLog(self.opts.consoleLog, now) rootLogger = logging.getLogger() rootLogger.setLevel(logging.DEBUG) fmt = logging.Formatter('%(asctime)s - %(levelname)-7s - %(message)s') fmt.converter = time.gmtime fh = logging.FileHandler(self.consoleLogPath) fh.setFormatter(fmt) fh.setLevel(logging.DEBUG) rootLogger.addHandler(fh) if self.opts.foreground: ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(fmt) rootLogger.addHandler(ch) # daemonize if self.opts.foreground: logging.info('staying in foreground') else: logging.info('daemonizing') pid = os.fork() if pid != 0: os._exit(0) os.setsid() pid = os.fork() if pid != 0: os._exit(0) os.chdir('/') os.close(1) os.close(2) nullFd = os.open('/dev/null', os.O_RDWR) os.dup2(nullFd, 1) os.dup2(nullFd, 2) try: # set up zmq self.context = zmq.Context.instance() self.rpcStream = ZMQStream(self.context.socket(zmq.REP)) self.rpcStream.bind(self.opts.rpcEndpoint) self.rpcStream.on_recv(self.handleRpcCall) self.forwarder = ThreadDevice(zmq.FORWARDER, zmq.SUB, zmq.PUB) self.forwarder.setsockopt_in(zmq.IDENTITY, THIS_MODULE) self.forwarder.setsockopt_out(zmq.IDENTITY, THIS_MODULE) self.forwarder.setsockopt_in(zmq.SUBSCRIBE, '') self.forwarder.setsockopt_out(zmq.HWM, self.opts.highWaterMark) self.forwarder.bind_in(self.opts.subscribeEndpoint) self.forwarder.bind_in(INJECT_ENDPOINT) self.forwarder.bind_out(self.opts.publishEndpoint) self.forwarder.bind_out(MONITOR_ENDPOINT) for entry in self.opts.subscribeTo: try: moduleName, endpoint = entry.split('@') endpoint = parseEndpoint(endpoint) except ValueError: raise ValueError('--subscribeTo argument "%s" is not in the format "<moduleName>@<endpoint>"' % entry) self.forwarder.connect_in(endpoint) self.info[moduleName] = {'module': moduleName, 'pub': endpoint} self.forwarder.start() time.sleep(0.1) # wait for forwarder to bind sockets self.monStream = ZMQStream(self.context.socket(zmq.SUB)) self.monStream.setsockopt(zmq.SUBSCRIBE, '') self.monStream.connect(MONITOR_ENDPOINT) self.monStream.on_recv(self.handleMessages) self.injectStream = ZMQStream(self.context.socket(zmq.PUB)) self.injectStream.connect(INJECT_ENDPOINT) self.disconnectTimer = ioloop.PeriodicCallback(self.handleDisconnectTimer, 5000) self.disconnectTimer.start() except: # pylint: disable=W0702 errClass, errObject, errTB = sys.exc_info()[:3] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) logging.error(''.join(traceback.format_tb(errTB))) logging.error(errText) logging.error('[error during startup -- exiting]') sys.exit(1) def shutdown(self): self.messageLog.flush()
class TestMNClient(TestCase): endpoint = b'tcp://127.0.0.1:5555' service = b'test' def setUp(self): if _do_print: print('Setting up...') self.context = zmq.Context() self.broker = None self._msgs = [] return def tearDown(self): if _do_print: print('Tearing down...') if self.broker: self._stop_broker() self.broker = None self._msgs = [] self.context.term() self.context = None return def _on_msg(self, msg): self._msgs.append(msg) if _do_print: print('broker received:', msg) if self.broker.do_reply: new_msg = msg[:4] new_msg.append(b'REPLY') self.broker.send_multipart(new_msg) else: IOLoop.instance().stop() return def _start_broker(self, do_reply=False): """Helper activating a fake broker in the ioloop. """ if _do_print: print('Starting broker at', self.endpoint) socket = self.context.socket(zmq.ROUTER) self.broker = ZMQStream(socket) self.broker.socket.setsockopt(zmq.LINGER, 0) self.broker.bind(self.endpoint) self.broker.on_recv(self._on_msg) self.broker.do_reply = do_reply return def _stop_broker(self): if _do_print: print('Stopping broker') if self.broker: self.broker.socket.close() self.broker.close() self.broker = None return # Tests from here def test_01_create_01(self): """Test MNClient simple create. """ client = MNClient(self.context, self.endpoint, self.service) self.assertEqual(self.endpoint, client.endpoint) self.assertEqual(self.service, client.service) client.shutdown() return def test_02_send_01(self): """Test MNClient simple request. """ self._start_broker() client = MNClient(self.context, self.endpoint, self.service) client.request(b'XXX') IOLoop.instance().start() client.shutdown() self.assertEqual(len(self._msgs), 1) rmsg = self._msgs[0] # msg[0] is identity of sender self.assertEqual(rmsg[1], b'') # routing delimiter self.assertEqual(rmsg[2], client._proto_version) self.assertEqual(rmsg[3], self.service) self.assertEqual(rmsg[4], b'XXX') self._stop_broker() return def test_02_send_02(self): """Test MNClient multipart request. """ mydata = [b'AAA', b'bbb'] self._start_broker() client = MNClient(self.context, self.endpoint, self.service) client.request(mydata) IOLoop.instance().start() client.shutdown() self.assertEqual(len(self._msgs), 1) rmsg = self._msgs[0] # msg[0] is identity of sender self.assertEqual(rmsg[1], b'') # routing delimiter self.assertEqual(rmsg[2], client._proto_version) self.assertEqual(rmsg[3], self.service) self.assertEqual(rmsg[4:], mydata) self._stop_broker() return def test_02_send_03(self): """Test MNClient request in invalid state. """ client = MNClient(self.context, self.endpoint, self.service) client.request(b'XXX') # ok self.assertRaises(InvalidStateError, client.request, b'AAA') client.shutdown() return def test_03_timeout_01(self): """Test MNClient request w/ timeout. """ client = MyClient(self.context, self.endpoint, self.service) client.request(b'XXX', 20) # 20 millisecs timeout IOLoop.instance().start() client.shutdown() self.assertEqual(client.timed_out, True) return def test_04_receive_01(self): """Test MNClient message receive. """ self._start_broker(do_reply=True) client = MyClient(self.context, self.endpoint, self.service) client.request(b'XXX') IOLoop.instance().start() client.shutdown() self._stop_broker() self.assertEqual(True, hasattr(client, 'last_msg')) self.assertEqual(3, len(client.last_msg)) self.assertEqual(b'REPLY', client.last_msg[-1]) self.assertEqual(self.service, client.last_msg[-2]) return
def construct_hub(self): """construct""" client_iface = "%s://%s:"%(self.client_transport, self.client_ip) + "%i" engine_iface = "%s://%s:"%(self.engine_transport, self.engine_ip) + "%i" ctx = self.context loop = self.loop # Registrar socket q = ZMQStream(ctx.socket(zmq.XREP), loop) q.bind(client_iface % self.regport) self.log.info("Hub listening on %s for registration."%(client_iface%self.regport)) if self.client_ip != self.engine_ip: q.bind(engine_iface % self.regport) self.log.info("Hub listening on %s for registration."%(engine_iface%self.regport)) ### Engine connections ### # heartbeat hpub = ctx.socket(zmq.PUB) hpub.bind(engine_iface % self.hb[0]) hrep = ctx.socket(zmq.XREP) hrep.bind(engine_iface % self.hb[1]) self.heartmonitor = HeartMonitor(loop=loop, pingstream=ZMQStream(hpub,loop), pongstream=ZMQStream(hrep,loop), period=self.ping, logname=self.log.name) ### Client connections ### # Notifier socket n = ZMQStream(ctx.socket(zmq.PUB), loop) n.bind(client_iface%self.notifier_port) ### build and launch the queues ### # monitor socket sub = ctx.socket(zmq.SUB) sub.setsockopt(zmq.SUBSCRIBE, "") sub.bind(self.monitor_url) sub.bind('inproc://monitor') sub = ZMQStream(sub, loop) # connect the db self.log.info('Hub using DB backend: %r'%(self.db_class.split()[-1])) # cdir = self.config.Global.cluster_dir self.db = import_item(self.db_class)(session=self.session.session, config=self.config) time.sleep(.25) # build connection dicts self.engine_info = { 'control' : engine_iface%self.control[1], 'mux': engine_iface%self.mux[1], 'heartbeat': (engine_iface%self.hb[0], engine_iface%self.hb[1]), 'task' : engine_iface%self.task[1], 'iopub' : engine_iface%self.iopub[1], # 'monitor' : engine_iface%self.mon_port, } self.client_info = { 'control' : client_iface%self.control[0], 'mux': client_iface%self.mux[0], 'task' : (self.scheme, client_iface%self.task[0]), 'iopub' : client_iface%self.iopub[0], 'notification': client_iface%self.notifier_port } self.log.debug("Hub engine addrs: %s"%self.engine_info) self.log.debug("Hub client addrs: %s"%self.client_info) self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor, query=q, notifier=n, db=self.db, engine_info=self.engine_info, client_info=self.client_info, logname=self.log.name)
class Test_MDPWorker(unittest.TestCase): endpoint = b'tcp://127.0.0.1:7777' service = b'test' def setUp(self): print 'set up' sys.stdout.flush() self.context = zmq.Context() self.broker = None self._msgs = [] return def tearDown(self): print 'tear down' sys.stdout.flush() if self.broker: self._stop_broker() self.broker = None ## self.context.term() self.context = None return def _on_msg(self, msg): if _do_print: print 'broker received:', pprint(msg) self.target = msg.pop(0) if msg[1] == chr(1): # ready print 'READY' self.target = msg[0] return if msg[1] == chr(4): # ready print 'HB' return if msg[1] == chr(3): # reply IOLoop.instance().stop() return return def _start_broker(self, do_reply=False): """Helper activating a fake broker in the ioloop. """ socket = self.context.socket(zmq.XREP) self.broker = ZMQStream(socket) self.broker.socket.setsockopt(zmq.LINGER, 0) self.broker.bind(self.endpoint) self.broker.on_recv(self._on_msg) self.broker.do_reply = do_reply self.broker.ticker = PeriodicCallback(self._tick, MyWorker.HB_INTERVAL) self.broker.ticker.start() self.target = None return def _stop_broker(self): if self.broker: self.broker.ticker.stop() self.broker.ticker = None self.broker.socket.close() self.broker.close() self.broker = None return def _tick(self): if self.broker and self.target: msg = [self.target, b'MPDW01', chr(4)] self.broker.send_multipart(msg) return def send_req(self): data = ['AA', 'bb'] msg = [self.target, b'MPDW01', chr(2), self.target, b''] + data print 'broker sending:', pprint(msg) self.broker.send_multipart(msg) return def stop_test(self): IOLoop.instance().stop() return # tests follow def test_01_simple_01(self): """Test MDPWorker simple req/reply. """ self._start_broker() time.sleep(0.2) worker = MyWorker(self.context, self.endpoint, self.service) sender = DelayedCallback(self.send_req, 500) stopper = DelayedCallback(self.stop_test, 2500) sender.start() stopper.start() IOLoop.instance().start() worker.shutdown() self._stop_broker() return
def init_hub(self): """construct""" client_iface = "%s://%s:" % (self.client_transport, self.client_ip) + "%i" engine_iface = "%s://%s:" % (self.engine_transport, self.engine_ip) + "%i" ctx = self.context loop = self.loop # Registrar socket q = ZMQStream(ctx.socket(zmq.ROUTER), loop) q.bind(client_iface % self.regport) self.log.info("Hub listening on %s for registration.", client_iface % self.regport) if self.client_ip != self.engine_ip: q.bind(engine_iface % self.regport) self.log.info("Hub listening on %s for registration.", engine_iface % self.regport) ### Engine connections ### # heartbeat hpub = ctx.socket(zmq.PUB) hpub.bind(engine_iface % self.hb[0]) hrep = ctx.socket(zmq.ROUTER) hrep.bind(engine_iface % self.hb[1]) self.heartmonitor = HeartMonitor(loop=loop, config=self.config, log=self.log, pingstream=ZMQStream(hpub,loop), pongstream=ZMQStream(hrep,loop) ) ### Client connections ### # Notifier socket n = ZMQStream(ctx.socket(zmq.PUB), loop) n.bind(client_iface%self.notifier_port) ### build and launch the queues ### # monitor socket sub = ctx.socket(zmq.SUB) sub.setsockopt(zmq.SUBSCRIBE, b"") sub.bind(self.monitor_url) sub.bind('inproc://monitor') sub = ZMQStream(sub, loop) # connect the db self.log.info('Hub using DB backend: %r'%(self.db_class.split()[-1])) # cdir = self.config.Global.cluster_dir self.db = import_item(str(self.db_class))(session=self.session.session, config=self.config, log=self.log) time.sleep(.25) try: scheme = self.config.TaskScheduler.scheme_name except AttributeError: from .scheduler import TaskScheduler scheme = TaskScheduler.scheme_name.get_default_value() # build connection dicts self.engine_info = { 'control' : engine_iface%self.control[1], 'mux': engine_iface%self.mux[1], 'heartbeat': (engine_iface%self.hb[0], engine_iface%self.hb[1]), 'task' : engine_iface%self.task[1], 'iopub' : engine_iface%self.iopub[1], # 'monitor' : engine_iface%self.mon_port, } self.client_info = { 'control' : client_iface%self.control[0], 'mux': client_iface%self.mux[0], 'task' : (scheme, client_iface%self.task[0]), 'iopub' : client_iface%self.iopub[0], 'notification': client_iface%self.notifier_port } self.log.debug("Hub engine addrs: %s", self.engine_info) self.log.debug("Hub client addrs: %s", self.client_info) # resubmit stream r = ZMQStream(ctx.socket(zmq.DEALER), loop) url = util.disambiguate_url(self.client_info['task'][-1]) r.setsockopt(zmq.IDENTITY, self.session.bsession) r.connect(url) self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor, query=q, notifier=n, resubmit=r, db=self.db, engine_info=self.engine_info, client_info=self.client_info, log=self.log)
class ZmqCentral(object): def __init__(self, opts): self.opts = opts self.info = {} def announceConnect(self, moduleName, params): logging.info('module %s connected', moduleName) self.injectStream.send('central.connect.%s:%s' % (moduleName, json.dumps(params))) def announceDisconnect(self, moduleName): logging.info('module %s disconnected', moduleName) self.injectStream.send( 'central.disconnect.%s:%s' % (moduleName, json.dumps({'timestamp': str(getTimestamp())}))) def logMessage(self, msg, posixTime=None, attachmentDir='-'): mlog = self.messageLog mlog.write('@@@ %d %d %s ' % (getTimestamp(posixTime), len(msg), attachmentDir)) mlog.write(msg) mlog.write('\n') def logMessageWithAttachments0(self, msg): parsed = parseMessage(msg) posixTime = time.time() # construct attachment directory dt = datetime.datetime.utcfromtimestamp(posixTime) dateText = dt.strftime('%Y-%m-%d') timeText = dt.strftime('%H-%M-%S') + '.%06d' % dt.microsecond uniq = '%08x' % random.getrandbits(32) attachmentSuffix = os.path.join('attachments', dateText, timeText, parsed['topic'], uniq) attachmentPath = os.path.join(self.logDir, attachmentSuffix) os.makedirs(attachmentPath) # write attachments to attachment directory for attachment in parsed['attachments']: fullName = os.path.join(attachmentPath, attachment.get_filename()) open(fullName, 'wb').write(attachment.get_payload()) # log message with a pointer to the attachment directory self.logMessage(':'.join((parsed['topic'], parsed['json'])), posixTime, attachmentSuffix) def logMessageWithAttachments(self, msg): try: return self.logMessageWithAttachments0(msg) except: # pylint: disable=W0702 self.logException('logging message with attachments') def handleHeartbeat(self, params): moduleName = params['module'].encode('utf-8') now = getTimestamp() oldInfo = self.info.get(moduleName, None) if oldInfo: if oldInfo.get('pub', None) != params.get('pub', None): self.announceDisconnect(moduleName) self.announceConnect(moduleName, params) else: self.announceConnect(moduleName, params) self.info[moduleName] = params keepalive = params.get('keepalive', DEFAULT_KEEPALIVE_US) params['timeout'] = now + keepalive return 'ok' def handleInfo(self): return self.info def logException(self, whileClause): errClass, errObject, errTB = sys.exc_info()[:3] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) logging.warning(''.join(traceback.format_tb(errTB))) logging.warning(errText) logging.warning('[error while %s at time %s]', whileClause, getTimestamp()) def handleMessages(self, messages): for msg in messages: if hasAttachments(msg): self.logMessageWithAttachments(msg) else: self.logMessage(msg) if msg.startswith('central.heartbeat.'): try: _topic, body = msg.split(':', 1) self.handleHeartbeat(json.loads(body)) except: # pylint: disable=W0702 self.logException('handling heartbeat') def handleRpcCall(self, messages): for msg in messages: try: call = json.loads(msg) callId = call['id'] except: # pylint: disable=W0702 self.rpcStream.send( json.dumps({ 'result': None, 'error': 'malformed request' })) try: method = call['method'] _params = call['params'] if method == 'info': result = self.handleInfo() else: raise ValueError('unknown method %s' % method) self.rpcStream.send( json.dumps({ 'result': result, 'error': None, 'id': callId })) except: # pylint: disable=W0702 self.logException('handling rpc message') errClass, errObject = sys.exc_info()[:2] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) self.rpcStream.send( json.dumps({ 'result': None, 'error': errText, 'id': callId })) def handleDisconnectTimer(self): now = getTimestamp() disconnectModules = [] for moduleName, entry in self.info.iteritems(): timeout = entry.get('timeout', None) if timeout is not None and now > timeout: disconnectModules.append(moduleName) for moduleName in disconnectModules: self.announceDisconnect(moduleName) del self.info[moduleName] def readyLog(self, pathTemplate, timestamp): if '%s' in pathTemplate: timeText = timestamp.strftime('%Y-%m-%d-%H-%M-%S') logFile = pathTemplate % timeText else: logFile = pathTemplate if not os.path.exists(self.logDir): os.makedirs(self.logDir) logPath = os.path.join(self.logDir, logFile) if '%s' in pathTemplate: latestPath = os.path.join(self.logDir, pathTemplate % 'latest') if os.path.islink(latestPath): os.unlink(latestPath) os.symlink(logFile, latestPath) return logPath def start(self): # open log files now = datetime.datetime.utcnow() self.logDir = os.path.abspath(self.opts.logDir) self.messageLogPath = self.readyLog(self.opts.messageLog, now) self.messageLog = open(self.messageLogPath, 'a') self.consoleLogPath = self.readyLog(self.opts.consoleLog, now) rootLogger = logging.getLogger() rootLogger.setLevel(logging.DEBUG) fmt = logging.Formatter('%(asctime)s - %(levelname)-7s - %(message)s') fmt.converter = time.gmtime fh = logging.FileHandler(self.consoleLogPath) fh.setFormatter(fmt) fh.setLevel(logging.DEBUG) rootLogger.addHandler(fh) if self.opts.foreground: ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(fmt) rootLogger.addHandler(ch) # daemonize if self.opts.foreground: logging.info('staying in foreground') else: logging.info('daemonizing') pid = os.fork() if pid != 0: os._exit(0) os.setsid() pid = os.fork() if pid != 0: os._exit(0) os.chdir('/') os.close(1) os.close(2) nullFd = os.open('/dev/null', os.O_RDWR) os.dup2(nullFd, 1) os.dup2(nullFd, 2) try: # set up zmq self.context = zmq.Context.instance() self.rpcStream = ZMQStream(self.context.socket(zmq.REP)) self.rpcStream.bind(self.opts.rpcEndpoint) self.rpcStream.on_recv(self.handleRpcCall) self.forwarder = ThreadDevice(zmq.FORWARDER, zmq.SUB, zmq.PUB) self.forwarder.setsockopt_in(zmq.IDENTITY, THIS_MODULE) self.forwarder.setsockopt_out(zmq.IDENTITY, THIS_MODULE) self.forwarder.setsockopt_in(zmq.SUBSCRIBE, '') self.forwarder.setsockopt_out(zmq.HWM, self.opts.highWaterMark) self.forwarder.bind_in(self.opts.subscribeEndpoint) self.forwarder.bind_in(INJECT_ENDPOINT) self.forwarder.bind_out(self.opts.publishEndpoint) self.forwarder.bind_out(MONITOR_ENDPOINT) for entry in self.opts.subscribeTo: try: moduleName, endpoint = entry.split('@') endpoint = parseEndpoint(endpoint) except ValueError: raise ValueError( '--subscribeTo argument "%s" is not in the format "<moduleName>@<endpoint>"' % entry) self.forwarder.connect_in(endpoint) self.info[moduleName] = {'module': moduleName, 'pub': endpoint} self.forwarder.start() time.sleep(0.1) # wait for forwarder to bind sockets self.monStream = ZMQStream(self.context.socket(zmq.SUB)) self.monStream.setsockopt(zmq.SUBSCRIBE, '') self.monStream.connect(MONITOR_ENDPOINT) self.monStream.on_recv(self.handleMessages) self.injectStream = ZMQStream(self.context.socket(zmq.PUB)) self.injectStream.connect(INJECT_ENDPOINT) self.disconnectTimer = ioloop.PeriodicCallback( self.handleDisconnectTimer, 5000) self.disconnectTimer.start() except: # pylint: disable=W0702 errClass, errObject, errTB = sys.exc_info()[:3] errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__, str(errObject)) logging.error(''.join(traceback.format_tb(errTB))) logging.error(errText) logging.error('[error during startup -- exiting]') sys.exit(1) def shutdown(self): self.messageLog.flush()
class TestMNWorker(TestCase): endpoint = b'tcp://127.0.0.1:5555' service = b'test' def setUp(self): if _do_print: print('Setting up...') sys.stdout.flush() self.context = zmq.Context() self.broker = None self._msgs = [] return def tearDown(self): if _do_print: print('Tearing down...') sys.stdout.flush() if self.broker: self._stop_broker() self.broker = None self.context = None return def _on_msg(self, msg): if _do_print: print('Broker received:', msg) self.target = msg.pop(0) marker_frame = msg.pop(0) if msg[1] == b'\x01': # ready if _do_print: print('READY received') return if msg[1] == b'\x04': # ready if _do_print: print('HB received') return if msg[1] == b'\x03': # reply IOLoop.instance().stop() return return def _start_broker(self, do_reply=False): """Helper activating a fake broker in the ioloop. """ socket = self.context.socket(zmq.ROUTER) self.broker = ZMQStream(socket) self.broker.socket.setsockopt(zmq.LINGER, 0) self.broker.bind(self.endpoint) self.broker.on_recv(self._on_msg) self.broker.do_reply = do_reply self.broker.ticker = PeriodicCallback(self._tick, WorkerRunner.HB_INTERVAL) self.broker.ticker.start() self.target = None if _do_print: print("Broker started") return def _stop_broker(self): if self.broker: self.broker.ticker.stop() self.broker.ticker = None self.broker.socket.close() self.broker.close() self.broker = None if _do_print: print("Broker stopped") return def _tick(self): if self.broker and self.target: msg = [self.target, b'', b'MNPW01', b'\x04'] self.broker.send_multipart(msg) if _do_print: print("Tick sent:", msg) return def send_req(self): data = [b'AA', b'bb'] msg = [self.target, b'', b'MNPW01', b'\x02', self.target, b''] + data self.broker.send_multipart(msg) if _do_print: print('broker sent:', msg) return @staticmethod def stop_loop(): IOLoop.instance().stop() return # Tests follow def test_simple_worker(self): """Test MNWorker simple req/reply. """ self._start_broker() time.sleep(0.2) worker = WorkerRunner(self.context, self.endpoint, self.service) sender = DelayedCallback(self.send_req, 500) stopper = DelayedCallback(self.stop_loop, 2500) sender.start() stopper.start() IOLoop.instance().start() worker.shutdown() self._stop_broker() return
class Test_MDPWorker(unittest.TestCase): endpoint = b'tcp://127.0.0.1:7777' service = b'test' def setUp(self): print 'set up' sys.stdout.flush() self.context = zmq.Context() self.broker = None self._msgs = [] return def tearDown(self): print 'tear down' sys.stdout.flush() if self.broker: self._stop_broker() self.broker = None ## self.context.term() self.context = None return def _on_msg(self, msg): if _do_print: print 'broker received:', pprint(msg) self.target = msg.pop(0) if msg[1] == chr(1): # ready print 'READY' self.target = msg[0] return if msg[1] == chr(4): # ready print 'HB' return if msg[1] == chr(3): # reply IOLoop.instance().stop() return return def _start_broker(self, do_reply=False): """Helper activating a fake broker in the ioloop. """ socket = self.context.socket(zmq.XREP) self.broker = ZMQStream(socket) self.broker.socket.setsockopt(zmq.LINGER, 0) self.broker.bind(self.endpoint) self.broker.on_recv(self._on_msg) self.broker.do_reply = do_reply self.broker.ticker = PeriodicCallback(self._tick, MyWorker.HB_INTERVAL) self.broker.ticker.start() self.target = None return def _stop_broker(self): if self.broker: self.broker.ticker.stop() self.broker.ticker = None self.broker.socket.close() self.broker.close() self.broker = None return def _tick(self): if self.broker and self.target: msg = [self.target, b'MPDW01', chr(4)] self.broker.send_multipart(msg) return def send_req(self): data = ['AA', 'bb'] msg = [self.target, b'MPDW01', chr(2), self.target, b''] + data print 'borker sending:', pprint(msg) self.broker.send_multipart(msg) return # tests follow def test_01_simple_01(self): """Test MDPWorker simple req/reply. """ self._start_broker() time.sleep(0.2) worker = MyWorker(self.context, self.endpoint, self.service) sender = DelayedCallback(self.send_req, 1000) sender.start() IOLoop.instance().start() worker.shutdown() self._stop_broker() return
class ZmqPublisher(object): def __init__(self, moduleName, centralHost=PUBLISHER_OPT_DEFAULTS['centralHost'], context=None, centralSubscribeEndpoint=PUBLISHER_OPT_DEFAULTS['centralSubscribeEndpoint'], publishEndpoint=PUBLISHER_OPT_DEFAULTS['publishEndpoint'], heartbeatPeriodMsecs=PUBLISHER_OPT_DEFAULTS['heartbeatPeriodMsecs'], # highWaterMark=PUBLISHER_OPT_DEFAULTS['highWaterMark'] ): self.moduleName = moduleName self.centralHost = centralHost if context is None: context = zmq.Context.instance() self.context = context self.centralSubscribeEndpoint = parseEndpoint(centralSubscribeEndpoint, defaultPort=DEFAULT_CENTRAL_SUBSCRIBE_PORT, centralHost=self.centralHost) self.publishEndpoint = parseEndpoint(publishEndpoint, defaultPort='random') self.heartbeatPeriodMsecs = heartbeatPeriodMsecs #self.highWaterMark = highWaterMark self.pubStream = None self.heartbeatTimer = None self.serializer = serializers.get_serializer('json')() @classmethod def addOptions(cls, parser, defaultModuleName): if not parser.has_option('--centralHost'): parser.add_option('--centralHost', default=PUBLISHER_OPT_DEFAULTS['centralHost'], help='Host where central runs [%default]') if not parser.has_option('--moduleName'): parser.add_option('--moduleName', default=defaultModuleName, help='Name to use for this module [%default]') if not parser.has_option('--centralSubscribeEndpoint'): parser.add_option('--centralSubcribeEndpoint', default=PUBLISHER_OPT_DEFAULTS['centralSubscribeEndpoint'], help='Endpoint where central listens for messages [%default]') if not parser.has_option('--publishEndpoint'): parser.add_option('--publishEndpoint', default=PUBLISHER_OPT_DEFAULTS['publishEndpoint'], help='Endpoint to publish messages on [%default]') if not parser.has_option('--heartbeatPeriodMsecs'): parser.add_option('--heartbeatPeriodMsecs', default=PUBLISHER_OPT_DEFAULTS['heartbeatPeriodMsecs'], type='int', help='Period for sending heartbeats to central [%default]') #if not parser.has_option('--highWaterMark'): # parser.add_option('--highWaterMark', # default=PUBLISHER_OPT_DEFAULTS['highWaterMark'], # type='int', # help='High-water mark for publish socket (see 0MQ docs) [%default]') @classmethod def getOptionValues(cls, opts): result = {} for key in PUBLISHER_OPT_DEFAULTS.iterkeys(): val = getattr(opts, key, None) if val is not None: result[key] = val return result def heartbeat(self): logging.debug('ZmqPublisher: heartbeat') self.sendJson('central.heartbeat.%s' % self.moduleName, {'host': getShortHostName(), 'pub': self.publishEndpoint}) def sendRaw(self, topic, body): self.pubStream.send('%s:%s' % (topic, body)) self.pubStream.flush() def sendJson(self, topic, obj): if isinstance(obj, dict): obj.setdefault('module', self.moduleName) obj.setdefault('timestamp', str(getTimestamp())) self.sendRaw(topic, json.dumps(obj)) def sendDjango(self, modelInstance, topic=None, topicSuffix=None): dataText = self.serializer.serialize([modelInstance]) data = json.loads(dataText)[0] if topic is None: topic = data['model'].encode('utf-8') if topicSuffix is not None: topic += topicSuffix self.sendJson(topic, {'data': data}) def start(self): pubSocket = self.context.socket(zmq.PUB) self.pubStream = ZMQStream(pubSocket) # self.pubStream.setsockopt(zmq.IDENTITY, self.moduleName) # self.pubStream.setsockopt(zmq.HWM, self.highWaterMark) self.pubStream.connect(self.centralSubscribeEndpoint) logging.info('zmq.publisher: connected to central at %s', self.centralSubscribeEndpoint) if self.publishEndpoint.endswith(':random'): endpointWithoutPort = re.sub(r':random$', '', self.publishEndpoint) port = self.pubStream.bind_to_random_port(endpointWithoutPort) self.publishEndpoint = '%s:%d' % (endpointWithoutPort, port) else: self.pubStream.bind(self.publishEndpoint) self.heartbeatTimer = ioloop.PeriodicCallback(self.heartbeat, self.heartbeatPeriodMsecs) self.heartbeatTimer.start() self.heartbeat()
def construct_hub(self): """construct""" client_iface = "%s://%s:" % (self.client_transport, self.client_ip) + "%i" engine_iface = "%s://%s:" % (self.engine_transport, self.engine_ip) + "%i" ctx = self.context loop = self.loop # Registrar socket q = ZMQStream(ctx.socket(zmq.XREP), loop) q.bind(client_iface % self.regport) self.log.info("Hub listening on %s for registration." % (client_iface % self.regport)) if self.client_ip != self.engine_ip: q.bind(engine_iface % self.regport) self.log.info("Hub listening on %s for registration." % (engine_iface % self.regport)) ### Engine connections ### # heartbeat hpub = ctx.socket(zmq.PUB) hpub.bind(engine_iface % self.hb[0]) hrep = ctx.socket(zmq.XREP) hrep.bind(engine_iface % self.hb[1]) self.heartmonitor = HeartMonitor(loop=loop, pingstream=ZMQStream(hpub, loop), pongstream=ZMQStream(hrep, loop), period=self.ping, logname=self.log.name) ### Client connections ### # Notifier socket n = ZMQStream(ctx.socket(zmq.PUB), loop) n.bind(client_iface % self.notifier_port) ### build and launch the queues ### # monitor socket sub = ctx.socket(zmq.SUB) sub.setsockopt(zmq.SUBSCRIBE, "") sub.bind(self.monitor_url) sub.bind('inproc://monitor') sub = ZMQStream(sub, loop) # connect the db self.log.info('Hub using DB backend: %r' % (self.db_class.split()[-1])) # cdir = self.config.Global.cluster_dir self.db = import_item(self.db_class)(session=self.session.session, config=self.config) time.sleep(.25) # build connection dicts self.engine_info = { 'control': engine_iface % self.control[1], 'mux': engine_iface % self.mux[1], 'heartbeat': (engine_iface % self.hb[0], engine_iface % self.hb[1]), 'task': engine_iface % self.task[1], 'iopub': engine_iface % self.iopub[1], # 'monitor' : engine_iface%self.mon_port, } self.client_info = { 'control': client_iface % self.control[0], 'mux': client_iface % self.mux[0], 'task': (self.scheme, client_iface % self.task[0]), 'iopub': client_iface % self.iopub[0], 'notification': client_iface % self.notifier_port } self.log.debug("Hub engine addrs: %s" % self.engine_info) self.log.debug("Hub client addrs: %s" % self.client_info) self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor, query=q, notifier=n, db=self.db, engine_info=self.engine_info, client_info=self.client_info, logname=self.log.name)
class Test_MDPClient(unittest.TestCase): endpoint = b'tcp://127.0.0.1:7777' service = b'test' def setUp(self): self.context = zmq.Context() self.broker = None self._msgs = [] return def tearDown(self): if self.broker: self._stop_broker() self.broker = None self._msgs = [] self.context.term() self.context = None return def _on_msg(self, msg): self._msgs.append(msg) if _do_print: print 'broker received:', pprint(msg) if self.broker.do_reply: new_msg = msg[:4] new_msg.append(b'REPLY') self.broker.send_multipart(new_msg) else: IOLoop.instance().stop() return def _start_broker(self, do_reply=False): """Helper activating a fake broker in the ioloop. """ socket = self.context.socket(zmq.XREP) self.broker = ZMQStream(socket) self.broker.socket.setsockopt(zmq.LINGER, 0) self.broker.bind(self.endpoint) self.broker.on_recv(self._on_msg) self.broker.do_reply = do_reply return def _stop_broker(self): if self.broker: self.broker.socket.close() self.broker.close() self.broker = None return # tests follow def test_01_create_01(self): """Test MDPclient simple create. """ client = MDPClient(self.context, self.endpoint, self.service) self.assertEquals(self.endpoint, client.endpoint) self.assertEquals(self.service, client.service) client.shutdown() return def test_02_send_01(self): """Test MDPclient simple request. """ self._start_broker() client = MDPClient(self.context, self.endpoint, self.service) client.request(b'XXX') IOLoop.instance().start() client.shutdown() self.assertEquals(len(self._msgs), 1) rmsg = self._msgs[0] # msg[0] is identity of sender self.assertEquals(rmsg[1], b'') # routing delimiter self.assertEquals(rmsg[2], client._proto_version) self.assertEquals(rmsg[3], self.service) self.assertEquals(rmsg[4], b'XXX') self._stop_broker() return def test_02_send_02(self): """Test MDPclient multipart request. """ mydata = [b'AAA', b'bbb'] self._start_broker() client = MDPClient(self.context, self.endpoint, self.service) client.request(mydata) IOLoop.instance().start() client.shutdown() self.assertEquals(len(self._msgs), 1) rmsg = self._msgs[0] # msg[0] is identity of sender self.assertEquals(rmsg[1], b'') # routing delimiter self.assertEquals(rmsg[2], client._proto_version) self.assertEquals(rmsg[3], self.service) self.assertEquals(rmsg[4:], mydata) self._stop_broker() return def test_02_send_03(self): """Test MDPclient request in invalid state. """ client = MDPClient(self.context, self.endpoint, self.service) client.request(b'XXX') # ok self.assertRaises(InvalidStateError, client.request, b'AAA') client.shutdown() return def test_03_timeout_01(self): """Test MDPclient request w/ timeout. """ client = MyClient(self.context, self.endpoint, self.service) client.request(b'XXX', 20) # 20 millisecs timeout IOLoop.instance().start() client.shutdown() self.assertEquals(client.timed_out, True) return def test_04_receive_01(self): """Test MDPclient message receive. """ self._start_broker(do_reply=True) client = MyClient(self.context, self.endpoint, self.service) client.request(b'XXX') IOLoop.instance().start() client.shutdown() self._stop_broker() self.assertEquals(True, hasattr(client, b'last_msg')) self.assertEquals(3, len(client.last_msg)) self.assertEquals(b'REPLY', client.last_msg[-1]) self.assertEquals(self.service, client.last_msg[-2]) return
class ZmqPublisher(object): def __init__(self, moduleName, context=None, centralSubscribeEndpoint=PUBLISHER_OPT_DEFAULTS[ 'centralSubscribeEndpoint'], publishEndpoint=PUBLISHER_OPT_DEFAULTS['publishEndpoint'], heartbeatPeriodMsecs=PUBLISHER_OPT_DEFAULTS[ 'heartbeatPeriodMsecs'], highWaterMark=PUBLISHER_OPT_DEFAULTS['highWaterMark']): self.moduleName = moduleName if context is None: context = zmq.Context.instance() self.context = context self.centralSubscribeEndpoint = parseEndpoint( centralSubscribeEndpoint, defaultPort=DEFAULT_CENTRAL_SUBSCRIBE_PORT) self.publishEndpoint = parseEndpoint(publishEndpoint, defaultPort='random') self.heartbeatPeriodMsecs = heartbeatPeriodMsecs self.highWaterMark = highWaterMark self.pubStream = None self.heartbeatTimer = None self.serializer = serializers.get_serializer('json')() @classmethod def addOptions(cls, parser, defaultModuleName): if not parser.has_option('--moduleName'): parser.add_option('--moduleName', default=defaultModuleName, help='Name to use for this module [%default]') if not parser.has_option('--centralSubscribeEndpoint'): parser.add_option( '--centralSubcribeEndpoint', default=PUBLISHER_OPT_DEFAULTS['centralSubscribeEndpoint'], help='Endpoint where central listens for messages [%default]') if not parser.has_option('--publishEndpoint'): parser.add_option( '--publishEndpoint', default=PUBLISHER_OPT_DEFAULTS['publishEndpoint'], help='Endpoint to publish messages on [%default]') if not parser.has_option('--heartbeatPeriodMsecs'): parser.add_option( '--heartbeatPeriodMsecs', default=PUBLISHER_OPT_DEFAULTS['heartbeatPeriodMsecs'], type='int', help='Period for sending heartbeats to central [%default]') if not parser.has_option('--highWaterMark'): parser.add_option( '--highWaterMark', default=PUBLISHER_OPT_DEFAULTS['highWaterMark'], type='int', help= 'High-water mark for publish socket (see 0MQ docs) [%default]') @classmethod def getOptionValues(cls, opts): result = {} for key in PUBLISHER_OPT_DEFAULTS.iterkeys(): val = getattr(opts, key, None) if val is not None: result[key] = val return result def heartbeat(self): logging.debug('ZmqPublisher: heartbeat') self.sendJson('central.heartbeat.%s' % self.moduleName, { 'host': getShortHostName(), 'pub': self.publishEndpoint }) def sendRaw(self, topic, body): self.pubStream.send('%s:%s' % (topic, body)) def sendJson(self, topic, obj): if isinstance(obj, dict): obj.setdefault('module', self.moduleName) obj.setdefault('timestamp', str(getTimestamp())) self.sendRaw(topic, json.dumps(obj)) def sendDjango(self, modelInstance, topic=None, topicSuffix=None): dataText = self.serializer.serialize([modelInstance]) data = json.loads(dataText)[0] if topic is None: topic = data['model'].encode('utf-8') if topicSuffix is not None: topic += topicSuffix self.sendJson(topic, {'data': data}) def start(self): pubSocket = self.context.socket(zmq.PUB) self.pubStream = ZMQStream(pubSocket) # self.pubStream.setsockopt(zmq.IDENTITY, self.moduleName) self.pubStream.setsockopt(zmq.HWM, self.highWaterMark) self.pubStream.connect(self.centralSubscribeEndpoint) if self.publishEndpoint.endswith(':random'): endpointWithoutPort = re.sub(r':random$', '', self.publishEndpoint) port = self.pubStream.bind_to_random_port(endpointWithoutPort) self.publishEndpoint = '%s:%d' % (endpointWithoutPort, port) else: self.pubStream.bind(self.publishEndpoint) self.heartbeatTimer = ioloop.PeriodicCallback( self.heartbeat, self.heartbeatPeriodMsecs) self.heartbeatTimer.start() self.heartbeat()
def init_hub(self): """construct""" client_iface = "%s://%s:" % (self.client_transport, self.client_ip) + "%i" engine_iface = "%s://%s:" % (self.engine_transport, self.engine_ip) + "%i" ctx = self.context loop = self.loop # Registrar socket q = ZMQStream(ctx.socket(zmq.ROUTER), loop) q.bind(client_iface % self.regport) self.log.info("Hub listening on %s for registration.", client_iface % self.regport) if self.client_ip != self.engine_ip: q.bind(engine_iface % self.regport) self.log.info("Hub listening on %s for registration.", engine_iface % self.regport) ### Engine connections ### # heartbeat hpub = ctx.socket(zmq.PUB) hpub.bind(engine_iface % self.hb[0]) hrep = ctx.socket(zmq.ROUTER) hrep.bind(engine_iface % self.hb[1]) self.heartmonitor = HeartMonitor(loop=loop, config=self.config, log=self.log, pingstream=ZMQStream(hpub,loop), pongstream=ZMQStream(hrep,loop) ) ### Client connections ### # Notifier socket n = ZMQStream(ctx.socket(zmq.PUB), loop) n.bind(client_iface%self.notifier_port) ### build and launch the queues ### # monitor socket sub = ctx.socket(zmq.SUB) sub.setsockopt(zmq.SUBSCRIBE, b"") sub.bind(self.monitor_url) sub.bind('inproc://monitor') sub = ZMQStream(sub, loop) # connect the db self.log.info('Hub using DB backend: %r'%(self.db_class.split()[-1])) # cdir = self.config.Global.cluster_dir self.db = import_item(str(self.db_class))(session=self.session.session, config=self.config, log=self.log) time.sleep(.25) try: scheme = self.config.TaskScheduler.scheme_name except AttributeError: from .scheduler import TaskScheduler scheme = TaskScheduler.scheme_name.get_default_value() # build connection dicts self.engine_info = { 'control' : engine_iface%self.control[1], 'mux': engine_iface%self.mux[1], 'heartbeat': (engine_iface%self.hb[0], engine_iface%self.hb[1]), 'task' : engine_iface%self.task[1], 'iopub' : engine_iface%self.iopub[1], # 'monitor' : engine_iface%self.mon_port, } self.client_info = { 'control' : client_iface%self.control[0], 'mux': client_iface%self.mux[0], 'task' : (scheme, client_iface%self.task[0]), 'iopub' : client_iface%self.iopub[0], 'notification': client_iface%self.notifier_port } self.log.debug("Hub engine addrs: %s", self.engine_info) self.log.debug("Hub client addrs: %s", self.client_info) # resubmit stream r = ZMQStream(ctx.socket(zmq.DEALER), loop) url = util.disambiguate_url(self.client_info['task'][-1]) r.setsockopt(zmq.IDENTITY, self.session.bsession) r.connect(url) self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor, query=q, notifier=n, resubmit=r, db=self.db, engine_info=self.engine_info, client_info=self.client_info, log=self.log)