Example #1
0
class WebSocketZMQBridgeHandler(websocket.WebSocketHandler):
    def open(self, *args, **kwargs):
        self.currentMessage = []
        self.__endpoint = 'tcp://localhost:224'
        socket = zmq.Context().socket(zmq.DEALER)
        self.__stream = ZMQStream(socket, IOLoop.current())
        self.__stream.on_recv(self.__onReceive)
        self.__stream.socket.setsockopt(zmq.LINGER, 0)
        self.__stream.connect(self.__endpoint)

    def on_close(self, *args, **kwargs):
        self.__stream.close()

    def on_message(self, message):
        hasMore = message[0]
        self.currentMessage.append(message[1:])
        if not hasMore:
            sendingMessage = self.currentMessage
            self.currentMessage = []
            self.__stream.send_multipart(sendingMessage)

    def __onReceive(self, msg):
        for frame in msg[:-1]:
            self.write_message(b'\x01' + frame, binary=True)
        self.write_message(b'\x00' + msg[-1], binary=True)
Example #2
0
class Subscriber(object):

    def __init__(self, context, sub_address, sub_topics):
        self.context = context
        self.subscriber_address = sub_address
        self.subscriber_topics = sub_topics

        socket = self.context.socket(zmq.SUB)
        ioloop = IOLoop.instance()
        self.subscriber = ZMQStream(socket, ioloop)
        self.subscriber.setsockopt(zmq.LINGER, 0)
        self.subscriber.on_recv(callback=self.subscriber_recv)
        self.subscriber.setsockopt(zmq.SUBSCRIBE, "")
        self.subscriber.connect(self.subscriber_address)

        return

    def shutdown(self):

        self.subscriber.on_recv(callback=None)
        self.subscriber.socket.disconnect(self.subscriber_address)
        self.subscriber = None

        return

    def subscriber_recv(self, msg):

        return
Example #3
0
def launch_scheduler(in_addr,
                     out_addr,
                     mon_addr,
                     not_addr,
                     config=None,
                     logname='ZMQ',
                     log_addr=None,
                     loglevel=logging.DEBUG,
                     scheme='lru',
                     identity=b'task'):
    from zmq.eventloop import ioloop
    from zmq.eventloop.zmqstream import ZMQStream

    if config:
        # unwrap dict back into Config
        config = Config(config)

    ctx = zmq.Context()
    loop = ioloop.IOLoop()
    ins = ZMQStream(ctx.socket(zmq.XREP), loop)
    ins.setsockopt(zmq.IDENTITY, identity)
    ins.bind(in_addr)

    outs = ZMQStream(ctx.socket(zmq.XREP), loop)
    outs.setsockopt(zmq.IDENTITY, identity)
    outs.bind(out_addr)
    mons = ZMQStream(ctx.socket(zmq.PUB), loop)
    mons.connect(mon_addr)
    nots = ZMQStream(ctx.socket(zmq.SUB), loop)
    nots.setsockopt(zmq.SUBSCRIBE, '')
    nots.connect(not_addr)

    scheme = globals().get(scheme, None)
    # setup logging
    if log_addr:
        connect_logger(logname,
                       ctx,
                       log_addr,
                       root="scheduler",
                       loglevel=loglevel)
    else:
        local_logger(logname, loglevel)

    scheduler = TaskScheduler(client_stream=ins,
                              engine_stream=outs,
                              mon_stream=mons,
                              notifier_stream=nots,
                              scheme=scheme,
                              loop=loop,
                              logname=logname,
                              config=config)
    scheduler.start()
    try:
        loop.start()
    except KeyboardInterrupt:
        print("interrupted, exiting...", file=sys.__stderr__)
Example #4
0
    async def handle_stream(self, stream, address):
        def onReceive(msg):
            msg[-2] = b'JSON'
            doc = msgpack.unpackb(msg[-1], encoding='UTF-8')
            if doc.__contains__('ResponseID'):
                doc['ResponseID'] = int(doc['ResponseID'])
            msg[-1] = json.dumps(doc).encode('UTF-8')
            # print("ready to send to Arduino. ", msg)
            stream.write(b'\xAE\xAE\xAE\xAE')
            for frame in msg[:-1]:
                stream.write(bytes([len(frame) + 1]) + b'\x01' + frame)
            stream.write(bytes([len(msg[-1]) + 1]) + b'\x00' + msg[-1])
            stream.write(b'\xAF\xAF\xAF\xAF')

        endpoint = 'tcp://localhost:224'
        socket = zmq.Context().socket(zmq.DEALER)
        zmqStream = ZMQStream(socket, IOLoop.current())
        zmqStream.on_recv(onReceive)
        zmqStream.socket.setsockopt(zmq.LINGER, 0)
        zmqStream.connect(endpoint)

        def on_message(message):
            frames = []
            position = 0
            while True:
                if len(message) <= position: break
                size = message[position]
                if len(message) < position + size + 1:
                    raise RuntimeError("Invalid Frame.")
                frames.append(message[position + 2:position + size + 1])
                position += size + 1
            if len(frames) != 7: raise RuntimeError("Bad Frame.")
            if len(frames[0]) != 0: raise RuntimeError("Bad Frame 0.")
            if frames[1] != b'IF1': raise RuntimeError("Bad Protocol.")
            if frames[3] != b'Broker' and frames[3] != b'Service' and frames[
                    3] != b'Direct':
                raise RuntimeError("Bad Distributing Mode.")
            frames[-2] = b'Msgpack'
            doc = json.loads(frames[-1])
            frames[-1] = msgpack.packb(doc)
            # print('send a message a ZMQ server: ', frames)
            zmqStream.send_multipart(frames)

        errorCount = 0
        while True:
            try:
                await stream.read_until(b'\xAE\xAE\xAE\xAE')
                data = await stream.read_until(b'\xAF\xAF\xAF\xAF')
                data = data[:-4]
                on_message(data)
            except StreamClosedError:
                print('closed')
                break
            except RuntimeError as e:
                print(e)
Example #5
0
class Worker(object):

    def __init__(endpoints):
        context = zmq.Context()
        socket = context.socket(zmq.REQ)
        ioloop = IOLoop.instance()
        self.endpoints = endpoints
        self.stream = ZMQStream(socket, ioloop)
        self.stream.on_recv(self._on_message)
        self.stream.socket.setsocketopt(zmq.LINGER, 0)
        for endpoint in endpoints:
            self.stream.connect(endpoint)
        self._send_ready()
Example #6
0
def launch_scheduler(in_addr, out_addr, mon_addr, not_addr, config=None,logname='ZMQ', 
                            log_addr=None, loglevel=logging.DEBUG, scheme='lru',
                            identity=b'task'):
    from zmq.eventloop import ioloop
    from zmq.eventloop.zmqstream import ZMQStream
    
    if config:
        # unwrap dict back into Config
        config = Config(config)

    ctx = zmq.Context()
    loop = ioloop.IOLoop()
    ins = ZMQStream(ctx.socket(zmq.XREP),loop)
    ins.setsockopt(zmq.IDENTITY, identity)
    ins.bind(in_addr)
    
    outs = ZMQStream(ctx.socket(zmq.XREP),loop)
    outs.setsockopt(zmq.IDENTITY, identity)
    outs.bind(out_addr)
    mons = ZMQStream(ctx.socket(zmq.PUB),loop)
    mons.connect(mon_addr)
    nots = ZMQStream(ctx.socket(zmq.SUB),loop)
    nots.setsockopt(zmq.SUBSCRIBE, '')
    nots.connect(not_addr)
    
    scheme = globals().get(scheme, None)
    # setup logging
    if log_addr:
        connect_logger(logname, ctx, log_addr, root="scheduler", loglevel=loglevel)
    else:
        local_logger(logname, loglevel)
    
    scheduler = TaskScheduler(client_stream=ins, engine_stream=outs,
                            mon_stream=mons, notifier_stream=nots,
                            scheme=scheme, loop=loop, logname=logname,
                            config=config)
    scheduler.start()
    try:
        loop.start()
    except KeyboardInterrupt:
        print ("interrupted, exiting...", file=sys.__stderr__)
Example #7
0
class LocalRequestProxy:
    '''
    This class is responsible for routing client requests coming from a
    particular server to the RouterPubSubProxy, which will route them to the
    workers.
    '''
    def __init__(self, front_end_name, back_end_name, loop):
        '''
        Initializes an instance of LocalRequestProxy

        @param front_end_name - name of the front end socket. It will be
                                initialized with the Router socket.
        @param back_end_name - name of the back end socket. It will be
                               initialized with the Dealer socket.
        @param loop - zmq IOLoop
        '''
        self._loop = loop

        ctx = zmq.Context.instance()

        # Create the front end stream
        front_address = ZmqAddress(chan_name=front_end_name, transport=INPROC)
        self._front_end = ZMQStream(ctx.socket(zmq.ROUTER), io_loop=loop)
        self._front_end.setsockopt(zmq.ROUTER_MANDATORY, 1)
        self._front_end.bind(front_address.zmq_url())

        # Create the back end stream
        back_address = ZmqAddress(chan_name=back_end_name)
        self._back_end = ZMQStream(ctx.socket(zmq.DEALER), io_loop=loop)
        self._back_end.connect(back_address.zmq_url())

        def callback(from_name, to_name, zmq_stream, msgs):
            log.debug("Routing from {0} to {1} messages {2}"
                      .format(from_name, to_name, msgs))
            zmq_stream.send_multipart(msgs)
            zmq_stream.flush()

        self._front_end.on_recv(lambda msgs:
                                callback(front_end_name, back_end_name, self._back_end, msgs))
        self._back_end.on_recv(lambda msgs:
                               callback(back_end_name, front_end_name, self._front_end, msgs))
Example #8
0
    def stream(self, sock_type, sock_addr, sock_bind, 
            callback=None, subscribe=''):

        assert self.ctx is not None

        sock_addr = sock_addr % {
                'port': random.randint(1024,65535),
                }

        s = ZMQStream(
                self.ctx.socket(sock_type))

        if sock_type == zmq.SUB:
            s.setsockopt(zmq.SUBSCRIBE, subscribe)

        if sock_bind:
            s.bind(sock_addr)
        else:
            s.connect(sock_addr)

        if callback:
            s.on_recv(callback)

        return (s, sock_addr)
Example #9
0
class MNClient(MN_object):
    """Class for the MN client side.

    Thin asynchronous encapsulation of a zmq.REQ socket.
    Provides a :func:`request` method with optional timeout.

    :param context:  the ZeroMQ context to create the socket in.
    :type context:   zmq.Context
    :param endpoint: the endpoint to connect to.
    :type endpoint:  str
    :param service:  the service the client should use
    :type service:   str
    """

    _proto_version = CLIENT_PROTO

    def __init__(self, context, endpoint, service):
        """Initialize the MNClient.
        """
        self.context = context
        self.service = service
        self.endpoint = endpoint
        self.can_send = True
        self._proto_prefix = ['', CLIENT_PROTO, service]
        self._tmo = None
        self.timed_out = False
        self._create_stream()
        return

    def _create_stream(self):
        """Helper for opening a stream.
        """
        socket = self.context.socket(zmq.DEALER)
        ioloop = IOLoop.instance()
        self.stream = ZMQStream(socket, ioloop)
        self.stream.on_recv(self._on_message)
        self.stream.socket.setsockopt(zmq.LINGER, 0)
        self.stream.connect(self.endpoint)

    def shutdown(self):
        """Method to deactivate the client connection completely.

        Will delete the stream and the underlying socket.

        .. warning:: The instance MUST not be used after :func:`shutdown` has been called.

        :rtype: None
        """
        if not self.stream:
            return
        self.stream.socket.close()
        self.stream.close()
        self.stream = None
        return

    def request(self, msg, timeout=None):
        """Send the given message.

        :param msg:     message parts to send.
        :type msg:      list of str
        :param timeout: time to wait in milliseconds.
        :type timeout:  int

        :rtype None:
        """
        if not self.can_send:
            raise InvalidStateError()
        if isinstance(msg, bytes):
            msg = [msg]
        # prepare full message
        # to_send = self._proto_prefix[:]
        # to_send.extend(msg)
        to_send = msg
        if self.stream.closed():
            self._create_stream()
            # TODO check this
        self.stream.send_multipart(to_send)
        self.can_send = False
        if timeout:
            self._start_timeout(timeout)
        return

    def _on_timeout(self):
        """Helper called after timeout.
        """
        self.timed_out = True
        self._tmo = None
        self.on_timeout()
        return

    def _start_timeout(self, timeout):
        """Helper for starting the timeout.

        :param timeout:  the time to wait in milliseconds.
        :type timeout:   int
        """
        self._tmo = DelayedCallback(self._on_timeout, timeout)
        self._tmo.start()
        return

    def _on_message(self, msg):
        """Helper method called on message receive.

        :param msg:   list of message parts.
        :type msg:    list of str
        """
        if self._tmo:
            # disable timout
            self._tmo.stop()
            self._tmo = None
        # setting state before invoking on_message, so we can request from there
        self.can_send = True
        self.on_message(msg)
        return

    def on_message(self, msg):
        """Public method called when a message arrived.

        .. note:: Does nothing. Should be overloaded!
        """
        pass

    def on_timeout(self):
        """Public method called when a timeout occurred.

        .. note:: Does nothing. Should be overloaded!
        """
        pass
Example #10
0
class MNWorker(MN_object):
    """Class for the MN worker side.

    Thin encapsulation of a zmq.DEALER socket.
    Provides a send method with optional timeout parameter.

    Will use a timeout to indicate a broker failure.

    :param context:    the context to use for socket creation.
    :type context:     zmq.Context
    :param endpoint:   endpoint to connect to.
    :type endpoint:    str
    :param service:    the name of the service we support.
    :type service:     byte-string
    """

    _proto_version = b'MNPW01'  # worker protocol version

    def __init__(self, context, endpoint, service, worker_type, address,
                 protocols):
        """Initialize the MNWorker.
        """
        self.context = context
        self.endpoint = endpoint
        self.service = service
        self.type = worker_type
        self.address = address
        self.protocols = protocols
        self.envelope = None
        self.HB_RETRIES = HB_RETRIES
        self.HB_INTERVAL = HB_INTERVAL
        self._data = {}
        self.stream = None
        self._tmo = None
        self.timed_out = False
        self.need_handshake = True
        self.connected = False
        self.ticker = None
        self._delayed_cb = None
        self._create_stream()
        _LOG.info("Worker initialized and can be found at '%s'" % endpoint)
        return

    def _create_stream(self):
        """Helper to create the socket and the stream.
        """
        socket = self.context.socket(zmq.DEALER)
        ioloop = IOLoop.instance()
        self.stream = ZMQStream(socket, ioloop)
        self.stream.on_recv(self._on_message)
        self.stream.socket.setsockopt(zmq.LINGER, 0)
        self.stream.connect(self.endpoint)
        self.ticker = PeriodicCallback(self._tick, self.HB_INTERVAL)
        self._send_ready()
        self.ticker.start()
        return

    def _send_ready(self):
        """Helper method to prepare and send the workers READY message.
        """
        _LOG.debug("Informing broker I am ready")
        ready_msg = [
            b'', WORKER_PROTO, MSG_READY, self.service, self.type,
            self.address, self.protocols
        ]
        if self.stream.closed():
            self.shutdown()
        self.stream.send_multipart(ready_msg)
        self.curr_retries = self.HB_RETRIES
        return

    def _tick(self):
        """Method called every HB_INTERVAL milliseconds.
        """
        self.curr_retries -= 1
        self.send_hb()
        if self.curr_retries >= 0:
            return
        # connection seems to be dead
        self.shutdown()
        # try to recreate it
        # self._delayed_cb = IOLoop.call_later(self._create_stream, 5000)
        # self._delayed_cb = IOLoop.add_timeout(self._create_stream, 5000)
        self._delayed_cb = DelayedCallback(self._create_stream,
                                           self.HB_INTERVAL)
        self._delayed_cb.start()
        return

    def send_hb(self):
        """Construct and send HB message to broker.
        """
        _LOG.debug("Sending heartbeat")
        msg = [b'', WORKER_PROTO, MSG_HEARTBEAT]
        if self.stream.closed():
            self.shutdown()
        self.stream.send_multipart(msg)
        return

    def shutdown(self):
        """Method to deactivate the worker connection completely.

        Will delete the stream and the underlying socket.
        """
        if self.ticker:
            self.ticker.stop()
            self.ticker = None
        if not self.stream:
            return
        self.stream.socket.close()
        self.stream.close()
        self.stream = None
        self.timed_out = False
        self.need_handshake = True
        self.connected = False
        return

    def reply(self, msg):
        """Send the given message.

        :param msg:    full message to send.
        :type msg:     can either be a byte-string or a list of byte-strings
        """
        if self.need_handshake:
            raise ConnectionNotReadyError()
        to_send = self.envelope
        self.envelope = None
        if isinstance(msg, list):
            to_send.extend(msg)
        else:
            to_send.append(msg)
        if self.stream.closed():
            self.shutdown()
        self.stream.send_multipart(to_send)
        return

    def _on_message(self, msg):
        """Helper method called on message receive.

        :param msg:    a list w/ the message parts
        :type msg:     a list of byte-strings
        """
        _LOG.debug("Received: %s." % msg)
        # 1st part is empty
        msg.pop(0)
        # 2nd part is protocol version
        proto = msg.pop(0)
        if proto != WORKER_PROTO:
            # ignore message from not supported protocol
            pass
        # 3rd part is message type
        msg_type = msg.pop(0)
        # XXX: hardcoded message types!
        # any message resets the retries counter
        self.need_handshake = False
        self.curr_retries = self.HB_RETRIES
        if msg_type == MSG_DISCONNECT:  # disconnect
            _LOG.info("Broker wants us to disconnect.")
            self.curr_retries = 0  # reconnect will be triggered by hb timer
        elif msg_type == MSG_QUERY:  # request
            # remaining parts are the user message
            _LOG.debug("Received new request: %s." % msg)
            envelope, msg = split_address(msg)
            envelope.append(b'')
            envelope = [b'', WORKER_PROTO, MSG_REPLY] + envelope  # reply
            self.envelope = envelope
            self.on_request(msg)
        else:
            # invalid message
            # ignored
            _LOG.debug('ignoring message with invalid id')
            pass
        return

    def on_request(self, msg):
        """Public method called when a request arrived.

        :param msg:    a list w/ the message parts
        :type msg:     a list of byte-strings

        Must be overloaded to provide support for various services!
        """
        pass
Example #11
0
class MDPWorker(object):
    """Class for the MDP worker side.

    Thin encapsulation of a zmq.DEALER socket.
    Provides a send method with optional timeout parameter.

    Will use a timeout to indicate a broker failure.
    """

    _proto_version = b'MDPW01'

    # TODO: integrate that into API
    HB_INTERVAL = 1000  # in milliseconds
    HB_LIVENESS = 3  # HBs to miss before connection counts as dead

    def __init__(self, context, endpoint, service):
        """Initialize the MDPWorker.

        context is the zmq context to create the socket from.
        service is a byte-string with the service name.
        """
        self.context = context
        self.endpoint = endpoint
        self.service = service
        self.stream = None
        self._tmo = None
        self.need_handshake = True
        self.ticker = None
        self._delayed_cb = None
        self._create_stream()
        return

    def _create_stream(self):
        """Helper to create the socket and the stream.
        """
        socket = self.context.socket(zmq.DEALER)
        ioloop = IOLoop.instance()
        self.stream = ZMQStream(socket, ioloop)
        self.stream.on_recv(self._on_message)
        self.stream.socket.setsockopt(zmq.LINGER, 0)
        self.stream.connect(self.endpoint)
        self.ticker = PeriodicCallback(self._tick, self.HB_INTERVAL)
        self._send_ready()
        self.ticker.start()
        return

    def _send_ready(self):
        """Helper method to prepare and send the workers READY message.
        """
        ready_msg = [b'', self._proto_version, b'\x01', self.service]
        self.stream.send_multipart(ready_msg)
        self.curr_liveness = self.HB_LIVENESS
        return

    def _tick(self):
        """Method called every HB_INTERVAL milliseconds.
        """
        self.curr_liveness -= 1
        ##         print '%.3f tick - %d' % (time.time(), self.curr_liveness)
        self.send_hb()
        if self.curr_liveness >= 0:
            return
        ## print '%.3f lost connection' % time.time()
        # ouch, connection seems to be dead
        self.shutdown()
        # try to recreate it
        self._delayed_cb = DelayedCallback(self._create_stream, 5000)
        self._delayed_cb.start()
        return

    def send_hb(self):
        """Construct and send HB message to broker.
        """
        msg = [b'', self._proto_version, b'\x04']
        self.stream.send_multipart(msg)
        return

    def shutdown(self):
        """Method to deactivate the worker connection completely.

        Will delete the stream and the underlying socket.
        """
        if self.ticker:
            self.ticker.stop()
            self.ticker = None
        if not self.stream:
            return
        self.stream.socket.close()
        self.stream.close()
        self.stream = None
        self.timed_out = False
        self.need_handshake = True
        self.connected = False
        return

    def reply(self, msg):
        """Send the given message.

        msg can either be a byte-string or a list of byte-strings.
        """
        ##         if self.need_handshake:
        ##             raise ConnectionNotReadyError()
        # prepare full message
        to_send = self.envelope
        self.envelope = None
        if isinstance(msg, list):
            to_send.extend(msg)
        else:
            to_send.append(msg)
        self.stream.send_multipart(to_send)
        return

    def _on_message(self, msg):
        """Helper method called on message receive.

        msg is a list w/ the message parts
        """
        # 1st part is empty
        msg.pop(0)
        # 2nd part is protocol version
        # TODO: version check
        proto = msg.pop(0)
        # 3rd part is message type
        msg_type = msg.pop(0)
        # XXX: hardcoded message types!
        # any message resets the liveness counter
        self.need_handshake = False
        self.curr_liveness = self.HB_LIVENESS
        if msg_type == b'\x05':  # disconnect
            self.curr_liveness = 0  # reconnect will be triggered by hb timer
        elif msg_type == b'\x02':  # request
            # remaining parts are the user message
            envelope, msg = split_address(msg)
            envelope.append(b'')
            envelope = [b'', self._proto_version, b'\x03'] + envelope  # REPLY
            self.envelope = envelope
            self.on_request(msg)
        else:
            # invalid message
            # ignored
            pass
        return

    def on_request(self, msg):
        """Public method called when a request arrived.

        Must be overloaded!
        """
        pass
Example #12
0
class MDPWorker(object):

    """Class for the MDP worker side.

    Thin encapsulation of a zmq.XREQ socket.
    Provides a send method with optional timeout parameter.

    Will use a timeout to indicate a broker failure.
    """

    _proto_version = b'MDPW01'

    # TODO: integrate that into API
    HB_INTERVAL = 1000  # in milliseconds
    HB_LIVENESS = 3    # HBs to miss before connection counts as dead

    def __init__(self, context, endpoint, service):
        """Initialize the MDPWorker.

        context is the zmq context to create the socket from.
        service is a byte-string with the service name.
        """
        self.context = context
        self.endpoint = endpoint
        self.service = service
        self.stream = None
        self._tmo = None
        self.need_handshake = True
        self.ticker = None
        self._delayed_cb = None
        self._create_stream()
        return

    def _create_stream(self):
        """Helper to create the socket and the stream.
        """
        socket = self.context.socket(zmq.XREQ)
        ioloop = IOLoop.instance()
        self.stream = ZMQStream(socket, ioloop)
        self.stream.on_recv(self._on_message)
        self.stream.socket.setsockopt(zmq.LINGER, 0)
        self.stream.connect(self.endpoint)
        self.ticker = PeriodicCallback(self._tick, self.HB_INTERVAL)
        self._send_ready()
        self.ticker.start()
        return

    def _send_ready(self):
        """Helper method to prepare and send the workers READY message.
        """
        ready_msg = [ b'', self._proto_version, chr(1), self.service ]
        self.stream.send_multipart(ready_msg)
        self.curr_liveness = self.HB_LIVENESS
        return

    def _tick(self):
        """Method called every HB_INTERVAL milliseconds.
        """
        self.curr_liveness -= 1
##         print '%.3f tick - %d' % (time.time(), self.curr_liveness)
        self.send_hb()
        if self.curr_liveness >= 0:
            return
        print '%.3f lost connection' % time.time()
        # ouch, connection seems to be dead
        self.shutdown()
        # try to recreate it
        self._delayed_cb = DelayedCallback(self._create_stream, 5000)
        self._delayed_cb.start()
        return

    def send_hb(self):
        """Construct and send HB message to broker.
        """
        msg = [ b'', self._proto_version, chr(4) ]
        self.stream.send_multipart(msg)
        return

    def shutdown(self):
        """Method to deactivate the worker connection completely.

        Will delete the stream and the underlying socket.
        """
        if self.ticker:
            self.ticker.stop()
            self.ticker = None
        if not self.stream:
            return
        self.stream.socket.close()
        self.stream.close()
        self.stream = None
        self.timed_out = False
        self.need_handshake = True
        self.connected = False
        return

    def reply(self, msg):
        """Send the given message.

        msg can either be a byte-string or a list of byte-strings.
        """
##         if self.need_handshake:
##             raise ConnectionNotReadyError()
        # prepare full message
        to_send = self.envelope
        self.envelope = None
        if isinstance(msg, list):
            to_send.extend(msg)
        else:
            to_send.append(msg)
        self.stream.send_multipart(to_send)
        return

    def _on_message(self, msg):
        """Helper method called on message receive.

        msg is a list w/ the message parts
        """
        # 1st part is empty
        msg.pop(0)
        # 2nd part is protocol version
        # TODO: version check
        proto = msg.pop(0)
        # 3nd part is message type
        msg_type = msg.pop(0)
        # XXX: hardcoded message types!
        # any message resets the liveness counter
        self.need_handshake = False
        self.curr_liveness = self.HB_LIVENESS
        if msg_type == '\x05': # disconnect
            print '    DISC'
            self.curr_liveness = 0 # reconnect will be triggered by hb timer
        elif msg_type == '\x02': # request
            # remaining parts are the user message
            envelope, msg = split_address(msg)
            envelope.append(b'')
            envelope = [ b'', self._proto_version, '\x03'] + envelope # REPLY
            self.envelope = envelope
            self.on_request(msg)
        else:
            # invalid message
            # ignored
            pass
        return

    def on_request(self, msg):
        """Public method called when a request arrived.

        Must be overloaded!
        """
        pass
Example #13
0
class ZmqSubscriber(object):
    def __init__(self,
                 moduleName,
                 centralHost=SUBSCRIBER_OPT_DEFAULTS['centralHost'],
                 context=None,
                 centralPublishEndpoint=SUBSCRIBER_OPT_DEFAULTS['centralPublishEndpoint'],
                 replay=None):
        self.moduleName = moduleName
        self.centralHost = centralHost

        if context is None:
            context = zmq.Context.instance()
        self.context = context

        self.centralPublishEndpoint = parseEndpoint(centralPublishEndpoint,
                                                    defaultPort=DEFAULT_CENTRAL_PUBLISH_PORT,
                                                    centralHost=self.centralHost)
        self.replayPaths = replay
        if self.replayPaths is None:
            self.replayPaths = []

        self.handlers = {}
        self.counter = 0
        self.deserializer = serializers.get_deserializer('json')
        self.stream = None

    @classmethod
    def addOptions(cls, parser, defaultModuleName):
        if not parser.has_option('--centralHost'):
            parser.add_option('--centralHost',
                              default=SUBSCRIBER_OPT_DEFAULTS['centralHost'],
                              help='Host where central runs [%default]')
        if not parser.has_option('--moduleName'):
            parser.add_option('--moduleName',
                              default=defaultModuleName,
                              help='Name to use for this module [%default]')
        if not parser.has_option('--centralPublishEndpoint'):
            parser.add_option('--centralPublishEndpoint',
                              default=SUBSCRIBER_OPT_DEFAULTS['centralPublishEndpoint'],
                              help='Endpoint where central publishes messages [%default]')
        if not parser.has_option('--replay'):
            parser.add_option('--replay',
                              action='append',
                              help='Replay specified message log (can specify multiple times), or use - to read from stdin')

    @classmethod
    def getOptionValues(cls, opts):
        result = {}
        for key in SUBSCRIBER_OPT_DEFAULTS.iterkeys():
            val = getattr(opts, key, None)
            if val is not None:
                result[key] = val
        return result

    def start(self):
        sock = self.context.socket(zmq.SUB)
        self.stream = ZMQStream(sock)
        # causes problems with multiple instances
        #self.stream.setsockopt(zmq.IDENTITY, self.moduleName)
        self.stream.connect(self.centralPublishEndpoint)
        logging.info('zmq.subscriber: connected to central at %s', self.centralPublishEndpoint)
        self.stream.on_recv(self.routeMessages)

    def routeMessages(self, messages):
        for msg in messages:
            self.routeMessage(msg)

    def routeMessage(self, msg):
        colonIndex = msg.find(':')
        topic = msg[:(colonIndex + 1)]
        body = msg[(colonIndex + 1):]

        handled = 0
        for topicPrefix, registry in self.handlers.iteritems():
            if topic.startswith(topicPrefix):
                for handler in registry.itervalues():
                    handler(topic[:-1], body)
                    handled = 1

        return handled

    def subscribeRaw(self, topicPrefix, handler):
        topicRegistry = self.handlers.setdefault(topicPrefix, {})
        if not topicRegistry:
            logging.info('zmq.subscriber: subscribe %s', topicPrefix)
            self.stream.setsockopt(zmq.SUBSCRIBE, topicPrefix)
        handlerId = (topicPrefix, self.counter)
        topicRegistry[self.counter] = handler
        self.counter += 1
        return handlerId

    def subscribeJson(self, topicPrefix, handler):
        def jsonHandler(topicPrefix, body):
            return handler(topicPrefix, convertToDotDictRecurse(json.loads(body)))
        return self.subscribeRaw(topicPrefix, jsonHandler)

    def subscribeDjango(self, topicPrefix, handler):
        def djangoHandler(topicPrefix, body):
            obj = json.loads(body)
            dataText = json.dumps([obj['data']])
            modelInstance = list(self.deserializer(dataText))[0]
            return handler(topicPrefix, modelInstance.object)
        return self.subscribeRaw(topicPrefix, djangoHandler)

    def unsubscribe(self, handlerId):
        topicPrefix, index = handlerId
        topicRegistry = self.handlers[topicPrefix]
        del topicRegistry[index]
        if not topicRegistry:
            logging.info('zmq.subscriber: unsubscribe %s', topicPrefix)
            self.stream.setsockopt(zmq.UNSUBSCRIBE, topicPrefix)

    def connect(self, endpoint):
        self.stream.connect(endpoint)

    def replay(self):
        numReplayed = 0
        numHandled = 0
        for replayPath in self.replayPaths:
            print '=== replaying messages from %s' % replayPath
            if replayPath == '-':
                replayFile = sys.stdin
            else:
                replayFile = open(replayPath, 'rb')
            stream = LogParser(replayFile)
            for rec in stream:
                numReplayed += 1
                numHandled += self.routeMessage(rec.msg)

                if numReplayed % 10000 == 0:
                    print 'replayed %d messages, %d handled' % (numReplayed, numHandled)
Example #14
0
class ZmqPublisher(object):
    def __init__(self,
                 moduleName,
                 context=None,
                 centralSubscribeEndpoint=PUBLISHER_OPT_DEFAULTS[
                     'centralSubscribeEndpoint'],
                 publishEndpoint=PUBLISHER_OPT_DEFAULTS['publishEndpoint'],
                 heartbeatPeriodMsecs=PUBLISHER_OPT_DEFAULTS[
                     'heartbeatPeriodMsecs'],
                 highWaterMark=PUBLISHER_OPT_DEFAULTS['highWaterMark']):
        self.moduleName = moduleName

        if context is None:
            context = zmq.Context.instance()
        self.context = context

        self.centralSubscribeEndpoint = parseEndpoint(
            centralSubscribeEndpoint,
            defaultPort=DEFAULT_CENTRAL_SUBSCRIBE_PORT)
        self.publishEndpoint = parseEndpoint(publishEndpoint,
                                             defaultPort='random')
        self.heartbeatPeriodMsecs = heartbeatPeriodMsecs
        self.highWaterMark = highWaterMark

        self.pubStream = None
        self.heartbeatTimer = None

        self.serializer = serializers.get_serializer('json')()

    @classmethod
    def addOptions(cls, parser, defaultModuleName):
        if not parser.has_option('--moduleName'):
            parser.add_option('--moduleName',
                              default=defaultModuleName,
                              help='Name to use for this module [%default]')
        if not parser.has_option('--centralSubscribeEndpoint'):
            parser.add_option(
                '--centralSubcribeEndpoint',
                default=PUBLISHER_OPT_DEFAULTS['centralSubscribeEndpoint'],
                help='Endpoint where central listens for messages [%default]')
        if not parser.has_option('--publishEndpoint'):
            parser.add_option(
                '--publishEndpoint',
                default=PUBLISHER_OPT_DEFAULTS['publishEndpoint'],
                help='Endpoint to publish messages on [%default]')
        if not parser.has_option('--heartbeatPeriodMsecs'):
            parser.add_option(
                '--heartbeatPeriodMsecs',
                default=PUBLISHER_OPT_DEFAULTS['heartbeatPeriodMsecs'],
                type='int',
                help='Period for sending heartbeats to central [%default]')
        if not parser.has_option('--highWaterMark'):
            parser.add_option(
                '--highWaterMark',
                default=PUBLISHER_OPT_DEFAULTS['highWaterMark'],
                type='int',
                help=
                'High-water mark for publish socket (see 0MQ docs) [%default]')

    @classmethod
    def getOptionValues(cls, opts):
        result = {}
        for key in PUBLISHER_OPT_DEFAULTS.iterkeys():
            val = getattr(opts, key, None)
            if val is not None:
                result[key] = val
        return result

    def heartbeat(self):
        logging.debug('ZmqPublisher: heartbeat')
        self.sendJson('central.heartbeat.%s' % self.moduleName, {
            'host': getShortHostName(),
            'pub': self.publishEndpoint
        })

    def sendRaw(self, topic, body):
        self.pubStream.send('%s:%s' % (topic, body))

    def sendJson(self, topic, obj):
        if isinstance(obj, dict):
            obj.setdefault('module', self.moduleName)
            obj.setdefault('timestamp', str(getTimestamp()))
        self.sendRaw(topic, json.dumps(obj))

    def sendDjango(self, modelInstance, topic=None, topicSuffix=None):
        dataText = self.serializer.serialize([modelInstance])
        data = json.loads(dataText)[0]
        if topic is None:
            topic = data['model'].encode('utf-8')
            if topicSuffix is not None:
                topic += topicSuffix
        self.sendJson(topic, {'data': data})

    def start(self):
        pubSocket = self.context.socket(zmq.PUB)
        self.pubStream = ZMQStream(pubSocket)
        # self.pubStream.setsockopt(zmq.IDENTITY, self.moduleName)
        self.pubStream.setsockopt(zmq.HWM, self.highWaterMark)
        self.pubStream.connect(self.centralSubscribeEndpoint)

        if self.publishEndpoint.endswith(':random'):
            endpointWithoutPort = re.sub(r':random$', '', self.publishEndpoint)
            port = self.pubStream.bind_to_random_port(endpointWithoutPort)
            self.publishEndpoint = '%s:%d' % (endpointWithoutPort, port)
        else:
            self.pubStream.bind(self.publishEndpoint)

        self.heartbeatTimer = ioloop.PeriodicCallback(
            self.heartbeat, self.heartbeatPeriodMsecs)
        self.heartbeatTimer.start()
        self.heartbeat()
Example #15
0
class ZmqCentral(object):
    def __init__(self, opts):
        self.opts = opts
        self.info = {}

    def announceConnect(self, moduleName, params):
        logging.info('module %s connected', moduleName)
        self.injectStream.send('central.connect.%s:%s' %
                               (moduleName, json.dumps(params)))

    def announceDisconnect(self, moduleName):
        logging.info('module %s disconnected', moduleName)
        self.injectStream.send(
            'central.disconnect.%s:%s' %
            (moduleName, json.dumps({'timestamp': str(getTimestamp())})))

    def logMessage(self, msg, posixTime=None, attachmentDir='-'):
        mlog = self.messageLog
        mlog.write('@@@ %d %d %s ' %
                   (getTimestamp(posixTime), len(msg), attachmentDir))
        mlog.write(msg)
        mlog.write('\n')

    def logMessageWithAttachments0(self, msg):
        parsed = parseMessage(msg)
        posixTime = time.time()

        # construct attachment directory
        dt = datetime.datetime.utcfromtimestamp(posixTime)
        dateText = dt.strftime('%Y-%m-%d')
        timeText = dt.strftime('%H-%M-%S') + '.%06d' % dt.microsecond
        uniq = '%08x' % random.getrandbits(32)
        attachmentSuffix = os.path.join('attachments', dateText, timeText,
                                        parsed['topic'], uniq)
        attachmentPath = os.path.join(self.logDir, attachmentSuffix)
        os.makedirs(attachmentPath)

        # write attachments to attachment directory
        for attachment in parsed['attachments']:
            fullName = os.path.join(attachmentPath, attachment.get_filename())
            open(fullName, 'wb').write(attachment.get_payload())

        # log message with a pointer to the attachment directory
        self.logMessage(':'.join((parsed['topic'], parsed['json'])), posixTime,
                        attachmentSuffix)

    def logMessageWithAttachments(self, msg):
        try:
            return self.logMessageWithAttachments0(msg)
        except:  # pylint: disable=W0702
            self.logException('logging message with attachments')

    def handleHeartbeat(self, params):
        moduleName = params['module'].encode('utf-8')
        now = getTimestamp()

        oldInfo = self.info.get(moduleName, None)
        if oldInfo:
            if oldInfo.get('pub', None) != params.get('pub', None):
                self.announceDisconnect(moduleName)
                self.announceConnect(moduleName, params)
        else:
            self.announceConnect(moduleName, params)

        self.info[moduleName] = params
        keepalive = params.get('keepalive', DEFAULT_KEEPALIVE_US)
        params['timeout'] = now + keepalive
        return 'ok'

    def handleInfo(self):
        return self.info

    def logException(self, whileClause):
        errClass, errObject, errTB = sys.exc_info()[:3]
        errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__,
                                 str(errObject))
        logging.warning(''.join(traceback.format_tb(errTB)))
        logging.warning(errText)
        logging.warning('[error while %s at time %s]', whileClause,
                        getTimestamp())

    def handleMessages(self, messages):
        for msg in messages:
            if hasAttachments(msg):
                self.logMessageWithAttachments(msg)
            else:
                self.logMessage(msg)
            if msg.startswith('central.heartbeat.'):
                try:
                    _topic, body = msg.split(':', 1)
                    self.handleHeartbeat(json.loads(body))
                except:  # pylint: disable=W0702
                    self.logException('handling heartbeat')

    def handleRpcCall(self, messages):
        for msg in messages:
            try:
                call = json.loads(msg)
                callId = call['id']
            except:  # pylint: disable=W0702
                self.rpcStream.send(
                    json.dumps({
                        'result': None,
                        'error': 'malformed request'
                    }))

            try:
                method = call['method']
                _params = call['params']
                if method == 'info':
                    result = self.handleInfo()
                else:
                    raise ValueError('unknown method %s' % method)
                self.rpcStream.send(
                    json.dumps({
                        'result': result,
                        'error': None,
                        'id': callId
                    }))
            except:  # pylint: disable=W0702
                self.logException('handling rpc message')
                errClass, errObject = sys.exc_info()[:2]
                errText = '%s.%s: %s' % (errClass.__module__,
                                         errClass.__name__, str(errObject))
                self.rpcStream.send(
                    json.dumps({
                        'result': None,
                        'error': errText,
                        'id': callId
                    }))

    def handleDisconnectTimer(self):
        now = getTimestamp()
        disconnectModules = []
        for moduleName, entry in self.info.iteritems():
            timeout = entry.get('timeout', None)
            if timeout is not None and now > timeout:
                disconnectModules.append(moduleName)
        for moduleName in disconnectModules:
            self.announceDisconnect(moduleName)
            del self.info[moduleName]

    def readyLog(self, pathTemplate, timestamp):
        if '%s' in pathTemplate:
            timeText = timestamp.strftime('%Y-%m-%d-%H-%M-%S')
            logFile = pathTemplate % timeText
        else:
            logFile = pathTemplate
        if not os.path.exists(self.logDir):
            os.makedirs(self.logDir)
        logPath = os.path.join(self.logDir, logFile)
        if '%s' in pathTemplate:
            latestPath = os.path.join(self.logDir, pathTemplate % 'latest')
            if os.path.islink(latestPath):
                os.unlink(latestPath)
        os.symlink(logFile, latestPath)
        return logPath

    def start(self):
        # open log files
        now = datetime.datetime.utcnow()
        self.logDir = os.path.abspath(self.opts.logDir)
        self.messageLogPath = self.readyLog(self.opts.messageLog, now)
        self.messageLog = open(self.messageLogPath, 'a')
        self.consoleLogPath = self.readyLog(self.opts.consoleLog, now)

        rootLogger = logging.getLogger()
        rootLogger.setLevel(logging.DEBUG)
        fmt = logging.Formatter('%(asctime)s - %(levelname)-7s - %(message)s')
        fmt.converter = time.gmtime
        fh = logging.FileHandler(self.consoleLogPath)
        fh.setFormatter(fmt)
        fh.setLevel(logging.DEBUG)
        rootLogger.addHandler(fh)
        if self.opts.foreground:
            ch = logging.StreamHandler()
            ch.setLevel(logging.DEBUG)
            ch.setFormatter(fmt)
            rootLogger.addHandler(ch)

        # daemonize
        if self.opts.foreground:
            logging.info('staying in foreground')
        else:
            logging.info('daemonizing')
            pid = os.fork()
            if pid != 0:
                os._exit(0)
            os.setsid()
            pid = os.fork()
            if pid != 0:
                os._exit(0)
            os.chdir('/')
            os.close(1)
            os.close(2)
            nullFd = os.open('/dev/null', os.O_RDWR)
            os.dup2(nullFd, 1)
            os.dup2(nullFd, 2)

        try:
            # set up zmq
            self.context = zmq.Context.instance()
            self.rpcStream = ZMQStream(self.context.socket(zmq.REP))
            self.rpcStream.bind(self.opts.rpcEndpoint)
            self.rpcStream.on_recv(self.handleRpcCall)

            self.forwarder = ThreadDevice(zmq.FORWARDER, zmq.SUB, zmq.PUB)
            self.forwarder.setsockopt_in(zmq.IDENTITY, THIS_MODULE)
            self.forwarder.setsockopt_out(zmq.IDENTITY, THIS_MODULE)
            self.forwarder.setsockopt_in(zmq.SUBSCRIBE, '')
            self.forwarder.setsockopt_out(zmq.HWM, self.opts.highWaterMark)
            self.forwarder.bind_in(self.opts.subscribeEndpoint)
            self.forwarder.bind_in(INJECT_ENDPOINT)
            self.forwarder.bind_out(self.opts.publishEndpoint)
            self.forwarder.bind_out(MONITOR_ENDPOINT)
            for entry in self.opts.subscribeTo:
                try:
                    moduleName, endpoint = entry.split('@')
                    endpoint = parseEndpoint(endpoint)
                except ValueError:
                    raise ValueError(
                        '--subscribeTo argument "%s" is not in the format "<moduleName>@<endpoint>"'
                        % entry)
                self.forwarder.connect_in(endpoint)
                self.info[moduleName] = {'module': moduleName, 'pub': endpoint}
            self.forwarder.start()
            time.sleep(0.1)  # wait for forwarder to bind sockets

            self.monStream = ZMQStream(self.context.socket(zmq.SUB))
            self.monStream.setsockopt(zmq.SUBSCRIBE, '')
            self.monStream.connect(MONITOR_ENDPOINT)
            self.monStream.on_recv(self.handleMessages)

            self.injectStream = ZMQStream(self.context.socket(zmq.PUB))
            self.injectStream.connect(INJECT_ENDPOINT)

            self.disconnectTimer = ioloop.PeriodicCallback(
                self.handleDisconnectTimer, 5000)
            self.disconnectTimer.start()

        except:  # pylint: disable=W0702
            errClass, errObject, errTB = sys.exc_info()[:3]
            errText = '%s.%s: %s' % (errClass.__module__, errClass.__name__,
                                     str(errObject))
            logging.error(''.join(traceback.format_tb(errTB)))
            logging.error(errText)
            logging.error('[error during startup -- exiting]')
            sys.exit(1)

    def shutdown(self):
        self.messageLog.flush()
Example #16
0
class MDPWorker(object):
    """Class for the MDP worker side.

    Thin encapsulation of a zmq.DEALER socket.
    Provides a send method with optional timeout parameter.

    Will use a timeout to indicate a broker failure.
    """

    _proto_version = W_WORKER

    # TODO: integrate that into API
    HB_INTERVAL = 2500  # in milliseconds
    HB_LIVENESS = 5  # HBs to miss before connection counts as dead
    RECONNECT_PERIOD = 5000

    def __init__(self,
                 context,
                 endpoint,
                 hb_endpoint,
                 service,
                 endpoint_callback=None):
        """Initialize the MDPWorker.

        context is the zmq context to create the socket from.
        service is a byte-string with the service name.
        """
        self.context = context
        self.endpoint = endpoint
        self.hb_endpoint = hb_endpoint
        self.service = service
        self.endpoint_callback = endpoint_callback
        self.stream = None
        self.hb_stream = None
        self.ticker = None
        self._delayed_reconnect = None
        self._unique_id = ''
        self._create_stream()

    def _create_stream(self):
        """Helper to create the socket and the stream.
        """

        logging.debug('Worker creating stream')

        ioloop = IOLoop.instance()

        socket = self.context.socket(zmq.DEALER)
        self.stream = ZMQStream(socket, ioloop)
        self.stream.on_recv(self._on_message)
        self.stream.socket.setsockopt(zmq.LINGER, 0)
        self.stream.connect(self.endpoint)

        socket = self.context.socket(zmq.DEALER)
        self.hb_stream = ZMQStream(socket, ioloop)
        self.hb_stream.on_recv(self._on_message)
        self.hb_stream.socket.setsockopt(zmq.LINGER, 0)
        self.hb_stream.connect(self.hb_endpoint)

        self.ticker = PeriodicCallback(self._tick, self.HB_INTERVAL)
        self._send_ready()
        self.ticker.start()

    def _send_ready(self):
        """Helper method to prepare and send the workers READY message.
        """

        ready_msg = [EMPTY_FRAME, self._proto_version, W_READY, self.service]
        self.stream.send_multipart(ready_msg)
        self.curr_liveness = self.HB_LIVENESS

    def _tick(self):
        """Method called every HB_INTERVAL milliseconds.
        """

        self.curr_liveness -= 1
        logging.debug('Worker HB tick, current liveness: %d' %
                      self.curr_liveness)

        self.send_hb()
        if self.curr_liveness >= 0:
            return

        #
        # Ouch, connection seems to be dead
        #
        logging.debug('Worker lost connection')
        self.shutdown()
        #
        # try to recreate the connection
        #
        self._delayed_reconnect = DelayedCallback(self._recreate_stream,
                                                  self.RECONNECT_PERIOD)
        self._delayed_reconnect.start()

    def _recreate_stream(self):

        logging.debug('Worker trying to recreate stream')

        if self.endpoint_callback is not None:
            #
            # Check, maybe the ip of the proxy changed.
            #
            try:
                self.endpoint, self.hb_endpoint = self.endpoint_callback()
            except:
                #
                # Probably some problem in accessing the server.
                #
                self._delayed_reconnect = DelayedCallback(
                    self._recreate_stream, self.RECONNECT_PERIOD)
                self._delayed_reconnect.start()
                return

        self._create_stream()

    def send_hb(self):
        """Construct and send HB message to broker.
        """

        msg = [EMPTY_FRAME, self._proto_version, W_HEARTBEAT, self._unique_id]

        self.hb_stream.send_multipart(msg)

    def shutdown(self):
        """Method to deactivate the worker connection completely.

        Will delete the stream and the underlying socket.
        """

        logging.debug('Shutdown of the worker')

        if self.ticker:
            logging.debug('Stopping the HB ticker')
            self.ticker.stop()
            self.ticker = None

        if not self.stream:
            return

        logging.debug('Closing the stream')

        self.stream.socket.close()
        self.stream.close()
        self.stream = None

        self.hb_stream.socket.close()
        self.hb_stream.close()
        self.hb_stream = None

        self.timed_out = False
        self.connected = False

    def reply(self, msg):
        """Send the given message.

        msg can either be a byte-string or a list of byte-strings.
        """

        #
        # prepare full message
        #
        to_send = self.envelope
        self.envelope = None

        if isinstance(msg, list):
            to_send.extend(msg)
        else:
            to_send.append(msg)

        self.stream.send_multipart(to_send)

    def _on_message(self, msg):
        """Helper method called on message receive.

        msg is a list w/ the message parts
        """

        logging.debug('Received message: {}'.format(msg))

        #
        # 1st part is empty
        #
        msg.pop(0)

        #
        # 2nd part is protocol version
        # TODO: version check
        #
        proto = msg.pop(0)

        #
        # 3rd part is message type
        #
        msg_type = msg.pop(0)

        #
        # XXX: hardcoded message types!
        # any message resets the liveness counter
        #
        self.curr_liveness = self.HB_LIVENESS

        if msg_type == W_DISCONNECT:
            #
            # Disconnect. Reconnection will be triggered by hb timer
            #
            self.curr_liveness = 0
        elif msg_type == W_READY:
            #
            # The message contains the unique id attached to the worker.
            #
            if len(msg) > 0:
                #
                # This above check is used for supporting older version of
                # the code.
                #
                self._unique_id = msg[0]
        elif msg_type == W_REQUEST:
            #
            # Request. Remaining parts are the user message
            #
            envelope, msg = split_address(msg)
            envelope.append(EMPTY_FRAME)
            envelope = [EMPTY_FRAME, self._proto_version, W_REPLY] + envelope
            self.envelope = envelope

            self.on_request(msg)
        else:
            #
            # invalid message
            # ignored
            #
            pass

    def on_request(self, msg):
        """Public method called when a request arrived.

        Must be overloaded!
        """

        raise NotImplementedError(
            'on_request must be implemented by the subclass.')
Example #17
0
class ZmqSubscriber(object):
    def __init__(self,
                 moduleName,
                 context=None,
                 centralPublishEndpoint=SUBSCRIBER_OPT_DEFAULTS['centralPublishEndpoint']):
        self.moduleName = moduleName

        if context is None:
            context = zmq.Context.instance()
        self.context = context

        self.centralPublishEndpoint = parseEndpoint(centralPublishEndpoint,
                                                    defaultPort=DEFAULT_CENTRAL_PUBLISH_PORT)

        self.handlers = {}
        self.counter = 0
        self.deserializer = serializers.get_deserializer('json')

    @classmethod
    def addOptions(cls, parser, defaultModuleName):
        if not parser.has_option('--moduleName'):
            parser.add_option('--moduleName',
                              default=defaultModuleName,
                              help='Name to use for this module [%default]')
        if not parser.has_option('--centralPublishEndpoint'):
            parser.add_option('--centralPublishEndpoint',
                              default=SUBSCRIBER_OPT_DEFAULTS['centralPublishEndpoint'],
                              help='Endpoint where central publishes messages [%default]')

    @classmethod
    def getOptionValues(cls, opts):
        result = {}
        for key in SUBSCRIBER_OPT_DEFAULTS.iterkeys():
            val = getattr(opts, key, None)
            if val is not None:
                result[key] = val
        return result

    def start(self):
        sock = self.context.socket(zmq.SUB)
        self.stream = ZMQStream(sock)
        self.stream.setsockopt(zmq.IDENTITY, self.moduleName)
        self.stream.connect(self.centralPublishEndpoint)
        self.stream.on_recv(self.routeMessage)

    def routeMessage(self, messages):
        for msg in messages:
            colonIndex = msg.find(':')
            topic = msg[:(colonIndex + 1)]
            body = msg[(colonIndex + 1):]

            for topicPrefix, registry in self.handlers.iteritems():
                if topic.startswith(topicPrefix):
                    topicRegistry = registry
                    break

            for handler in topicRegistry.itervalues():
                handler(topic[:-1], body)

    def subscribeRaw(self, topic, handler):
        topicRegistry = self.handlers.setdefault(topic, {})
        if not topicRegistry:
            self.stream.setsockopt(zmq.SUBSCRIBE, topic)
        handlerId = (topic, self.counter)
        topicRegistry[self.counter] = handler
        self.counter += 1
        return handlerId

    def subscribeJson(self, topic, handler):
        def jsonHandler(topic, body):
            return handler(topic, json.loads(body))
        return self.subscribeRaw(topic, jsonHandler)

    def subscribeDjango(self, topic, handler):
        def djangoHandler(topic, body):
            obj = json.loads(body)
            dataText = json.dumps([obj['data']])
            modelInstance = self.deserializer(dataText)[0]
            return handler(topic, modelInstance)
        return self.subscribeRaw(topic, djangoHandler)

    def unsubscribe(self, handlerId):
        topic, index = handlerId
        topicRegistry = self.handlers[topic]
        del topicRegistry[index]
        if not topicRegistry:
            self.stream.setsockopt(zmq.UNSUBSCRIBE, topic)

    def connect(self, endpoint):
        self.stream.connect(endpoint)
Example #18
0
class ZmqCentral(object):
    def __init__(self, opts):
        self.opts = opts
        self.info = {}

    def announceConnect(self, moduleName, params):
        logging.info('module %s connected', moduleName)
        self.injectStream.send('central.connect.%s:%s'
                               % (moduleName, json.dumps(params)))

    def announceDisconnect(self, moduleName):
        logging.info('module %s disconnected', moduleName)
        self.injectStream.send('central.disconnect.%s:%s'
                               % (moduleName,
                                  json.dumps({'timestamp': str(getTimestamp())})))

    def logMessage(self, msg, posixTime=None, attachmentDir='-'):
        mlog = self.messageLog
        mlog.write('@@@ %d %d %s ' % (getTimestamp(posixTime), len(msg), attachmentDir))
        mlog.write(msg)
        mlog.write('\n')

    def logMessageWithAttachments0(self, msg):
        parsed = parseMessage(msg)
        posixTime = time.time()

        # construct attachment directory
        dt = datetime.datetime.utcfromtimestamp(posixTime)
        dateText = dt.strftime('%Y-%m-%d')
        timeText = dt.strftime('%H-%M-%S') + '.%06d' % dt.microsecond
        uniq = '%08x' % random.getrandbits(32)
        attachmentSuffix = os.path.join('attachments',
                                        dateText,
                                        timeText,
                                        parsed['topic'],
                                        uniq)
        attachmentPath = os.path.join(self.logDir, attachmentSuffix)
        os.makedirs(attachmentPath)

        # write attachments to attachment directory
        for attachment in parsed['attachments']:
            fullName = os.path.join(attachmentPath, attachment.get_filename())
            open(fullName, 'wb').write(attachment.get_payload())

        # log message with a pointer to the attachment directory
        self.logMessage(':'.join((parsed['topic'], parsed['json'])),
                        posixTime,
                        attachmentSuffix)

    def logMessageWithAttachments(self, msg):
        try:
            return self.logMessageWithAttachments0(msg)
        except:  # pylint: disable=W0702
            self.logException('logging message with attachments')

    def handleHeartbeat(self, params):
        moduleName = params['module'].encode('utf-8')
        now = getTimestamp()

        oldInfo = self.info.get(moduleName, None)
        if oldInfo:
            if oldInfo.get('pub', None) != params.get('pub', None):
                self.announceDisconnect(moduleName)
                self.announceConnect(moduleName, params)
        else:
            self.announceConnect(moduleName, params)

        self.info[moduleName] = params
        keepalive = params.get('keepalive', DEFAULT_KEEPALIVE_US)
        params['timeout'] = now + keepalive
        return 'ok'

    def handleInfo(self):
        return self.info

    def logException(self, whileClause):
        errClass, errObject, errTB = sys.exc_info()[:3]
        errText = '%s.%s: %s' % (errClass.__module__,
                                 errClass.__name__,
                                 str(errObject))
        logging.warning(''.join(traceback.format_tb(errTB)))
        logging.warning(errText)
        logging.warning('[error while %s at time %s]', whileClause, getTimestamp())

    def handleMessages(self, messages):
        for msg in messages:
            if hasAttachments(msg):
                self.logMessageWithAttachments(msg)
            else:
                self.logMessage(msg)
            if msg.startswith('central.heartbeat.'):
                try:
                    _topic, body = msg.split(':', 1)
                    self.handleHeartbeat(json.loads(body))
                except:  # pylint: disable=W0702
                    self.logException('handling heartbeat')

    def handleRpcCall(self, messages):
        for msg in messages:
            try:
                call = json.loads(msg)
                callId = call['id']
            except:  # pylint: disable=W0702
                self.rpcStream.send(json.dumps({'result': None,
                                                'error': 'malformed request'}))

            try:
                method = call['method']
                _params = call['params']
                if method == 'info':
                    result = self.handleInfo()
                else:
                    raise ValueError('unknown method %s' % method)
                self.rpcStream.send(json.dumps({'result': result,
                                                'error': None,
                                                'id': callId}))
            except:  # pylint: disable=W0702
                self.logException('handling rpc message')
                errClass, errObject = sys.exc_info()[:2]
                errText = '%s.%s: %s' % (errClass.__module__,
                                         errClass.__name__,
                                         str(errObject))
                self.rpcStream.send(json.dumps({'result': None,
                                                'error': errText,
                                                'id': callId}))

    def handleDisconnectTimer(self):
        now = getTimestamp()
        disconnectModules = []
        for moduleName, entry in self.info.iteritems():
            timeout = entry.get('timeout', None)
            if timeout is not None and now > timeout:
                disconnectModules.append(moduleName)
        for moduleName in disconnectModules:
            self.announceDisconnect(moduleName)
            del self.info[moduleName]

    def readyLog(self, pathTemplate, timestamp):
        if '%s' in pathTemplate:
            timeText = timestamp.strftime('%Y-%m-%d-%H-%M-%S')
            logFile = pathTemplate % timeText
        else:
            logFile = pathTemplate
        if not os.path.exists(self.logDir):
            os.makedirs(self.logDir)
        logPath = os.path.join(self.logDir, logFile)
        if '%s' in pathTemplate:
            latestPath = os.path.join(self.logDir, pathTemplate % 'latest')
            if os.path.islink(latestPath):
                os.unlink(latestPath)
        os.symlink(logFile, latestPath)
        return logPath

    def start(self):
        # open log files
        now = datetime.datetime.utcnow()
        self.logDir = os.path.abspath(self.opts.logDir)
        self.messageLogPath = self.readyLog(self.opts.messageLog, now)
        self.messageLog = open(self.messageLogPath, 'a')
        self.consoleLogPath = self.readyLog(self.opts.consoleLog, now)

        rootLogger = logging.getLogger()
        rootLogger.setLevel(logging.DEBUG)
        fmt = logging.Formatter('%(asctime)s - %(levelname)-7s - %(message)s')
        fmt.converter = time.gmtime
        fh = logging.FileHandler(self.consoleLogPath)
        fh.setFormatter(fmt)
        fh.setLevel(logging.DEBUG)
        rootLogger.addHandler(fh)
        if self.opts.foreground:
            ch = logging.StreamHandler()
            ch.setLevel(logging.DEBUG)
            ch.setFormatter(fmt)
            rootLogger.addHandler(ch)

        # daemonize
        if self.opts.foreground:
            logging.info('staying in foreground')
        else:
            logging.info('daemonizing')
            pid = os.fork()
            if pid != 0:
                os._exit(0)
            os.setsid()
            pid = os.fork()
            if pid != 0:
                os._exit(0)
            os.chdir('/')
            os.close(1)
            os.close(2)
            nullFd = os.open('/dev/null', os.O_RDWR)
            os.dup2(nullFd, 1)
            os.dup2(nullFd, 2)

        try:
            # set up zmq
            self.context = zmq.Context.instance()
            self.rpcStream = ZMQStream(self.context.socket(zmq.REP))
            self.rpcStream.bind(self.opts.rpcEndpoint)
            self.rpcStream.on_recv(self.handleRpcCall)

            self.forwarder = ThreadDevice(zmq.FORWARDER, zmq.SUB, zmq.PUB)
            self.forwarder.setsockopt_in(zmq.IDENTITY, THIS_MODULE)
            self.forwarder.setsockopt_out(zmq.IDENTITY, THIS_MODULE)
            self.forwarder.setsockopt_in(zmq.SUBSCRIBE, '')
            self.forwarder.setsockopt_out(zmq.HWM, self.opts.highWaterMark)
            self.forwarder.bind_in(self.opts.subscribeEndpoint)
            self.forwarder.bind_in(INJECT_ENDPOINT)
            self.forwarder.bind_out(self.opts.publishEndpoint)
            self.forwarder.bind_out(MONITOR_ENDPOINT)
            for entry in self.opts.subscribeTo:
                try:
                    moduleName, endpoint = entry.split('@')
                    endpoint = parseEndpoint(endpoint)
                except ValueError:
                    raise ValueError('--subscribeTo argument "%s" is not in the format "<moduleName>@<endpoint>"' % entry)
                self.forwarder.connect_in(endpoint)
                self.info[moduleName] = {'module': moduleName,
                                         'pub': endpoint}
            self.forwarder.start()
            time.sleep(0.1)  # wait for forwarder to bind sockets

            self.monStream = ZMQStream(self.context.socket(zmq.SUB))
            self.monStream.setsockopt(zmq.SUBSCRIBE, '')
            self.monStream.connect(MONITOR_ENDPOINT)
            self.monStream.on_recv(self.handleMessages)

            self.injectStream = ZMQStream(self.context.socket(zmq.PUB))
            self.injectStream.connect(INJECT_ENDPOINT)

            self.disconnectTimer = ioloop.PeriodicCallback(self.handleDisconnectTimer, 5000)
            self.disconnectTimer.start()

        except:  # pylint: disable=W0702
            errClass, errObject, errTB = sys.exc_info()[:3]
            errText = '%s.%s: %s' % (errClass.__module__,
                                     errClass.__name__,
                                     str(errObject))
            logging.error(''.join(traceback.format_tb(errTB)))
            logging.error(errText)
            logging.error('[error during startup -- exiting]')
            sys.exit(1)

    def shutdown(self):
        self.messageLog.flush()
Example #19
0
    def init_hub(self):
        """construct"""
        client_iface = "%s://%s:" % (self.client_transport, self.client_ip) + "%i"
        engine_iface = "%s://%s:" % (self.engine_transport, self.engine_ip) + "%i"

        ctx = self.context
        loop = self.loop

        # Registrar socket
        q = ZMQStream(ctx.socket(zmq.ROUTER), loop)
        q.bind(client_iface % self.regport)
        self.log.info("Hub listening on %s for registration.", client_iface % self.regport)
        if self.client_ip != self.engine_ip:
            q.bind(engine_iface % self.regport)
            self.log.info("Hub listening on %s for registration.", engine_iface % self.regport)

        ### Engine connections ###

        # heartbeat
        hpub = ctx.socket(zmq.PUB)
        hpub.bind(engine_iface % self.hb[0])
        hrep = ctx.socket(zmq.ROUTER)
        hrep.bind(engine_iface % self.hb[1])
        self.heartmonitor = HeartMonitor(loop=loop, config=self.config, log=self.log,
                                pingstream=ZMQStream(hpub,loop),
                                pongstream=ZMQStream(hrep,loop)
                            )

        ### Client connections ###
        # Notifier socket
        n = ZMQStream(ctx.socket(zmq.PUB), loop)
        n.bind(client_iface%self.notifier_port)

        ### build and launch the queues ###

        # monitor socket
        sub = ctx.socket(zmq.SUB)
        sub.setsockopt(zmq.SUBSCRIBE, b"")
        sub.bind(self.monitor_url)
        sub.bind('inproc://monitor')
        sub = ZMQStream(sub, loop)

        # connect the db
        self.log.info('Hub using DB backend: %r'%(self.db_class.split()[-1]))
        # cdir = self.config.Global.cluster_dir
        self.db = import_item(str(self.db_class))(session=self.session.session,
                                            config=self.config, log=self.log)
        time.sleep(.25)
        try:
            scheme = self.config.TaskScheduler.scheme_name
        except AttributeError:
            from .scheduler import TaskScheduler
            scheme = TaskScheduler.scheme_name.get_default_value()
        # build connection dicts
        self.engine_info = {
            'control' : engine_iface%self.control[1],
            'mux': engine_iface%self.mux[1],
            'heartbeat': (engine_iface%self.hb[0], engine_iface%self.hb[1]),
            'task' : engine_iface%self.task[1],
            'iopub' : engine_iface%self.iopub[1],
            # 'monitor' : engine_iface%self.mon_port,
            }

        self.client_info = {
            'control' : client_iface%self.control[0],
            'mux': client_iface%self.mux[0],
            'task' : (scheme, client_iface%self.task[0]),
            'iopub' : client_iface%self.iopub[0],
            'notification': client_iface%self.notifier_port
            }
        self.log.debug("Hub engine addrs: %s", self.engine_info)
        self.log.debug("Hub client addrs: %s", self.client_info)

        # resubmit stream
        r = ZMQStream(ctx.socket(zmq.DEALER), loop)
        url = util.disambiguate_url(self.client_info['task'][-1])
        r.setsockopt(zmq.IDENTITY, self.session.bsession)
        r.connect(url)

        self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor,
                query=q, notifier=n, resubmit=r, db=self.db,
                engine_info=self.engine_info, client_info=self.client_info,
                log=self.log)
class UniWorker(object):
    """
    Implementation of "simple" ZeroMQ Paranoid Pirate communication scheme.  This class is the DEALER, and performs the
    "reply" in RPC calls.  By design, only supports one remote client (ROUTER) in order to keep example simple.
    Supports a very basic RPC interface, using MessagePack for encoding/decoding.
    """

    __metaclass__ = ABCMeta

    def __init__(self, endpoint, context=None):
        # type: (str, zmq.Context) -> None
        """
        Initialize the worker.
        :param endpoint: ZeroMQ endpoint to connect to.
        :param context: ZeroMQ Context
        """
        self._context = context or zmq.Context.instance()
        self._endpoint = endpoint
        self._stream = None  # type: Optional[ZMQStream]
        self._tmo = None
        self._need_handshake = True
        self._ticker = None  # type: Optional[PeriodicCallback]
        self._delayed_cb = None
        self._connected_event = Event()
        self._lock = Lock()

        self._create_stream()

        self._curr_liveness = HB_LIVENESS
        self._keep_running = True

    def _create_stream(self):
        # type: () -> None
        """
        Helper function to create the ZMQ stream, configure callbacks.
        """
        self.on_log_event("uniworker.connect", "Trying to connect to client")
        socket = self._context.socket(zmq.DEALER)

        self._stream = ZMQStream(socket, IOLoop())
        self._stream.on_recv(self._on_message)
        self._stream.socket.setsockopt(zmq.LINGER, 0)
        self._stream.connect(self._endpoint)

        self._ticker = PeriodicCallback(self._tick, HB_INTERVAL)
        self._send_ready()
        self._ticker.start()

    def run(self):
        # type: () -> None
        """
        Start the IOLoop, a blocking call to send/recv ZMQ messsages until the IOLoop is stopped.
        Note: The name of this function needs to stay the same so UniWorkerThread's run() is overridden with this function.
        """
        if self._keep_running:
            self._stream.io_loop.start()

    def stop(self):
        # type: () -> None
        """
        Stop the IOLoop.
        """
        with self._lock:
            self._keep_running = False
            if self._stream is not None:
                self._stream.io_loop.stop()
            else:
                logger.warning("Can't stop worker-has shutdown() been called?")

    def shutdown(self):
        # type: () -> None
        """
        Close the stream/socket.  This should be called with the final flag when closing the connection for the last time.
        """

        with self._lock:
            if self._ticker:
                self._ticker.stop()
                self._ticker = None
            if not self._stream:
                return

            self._stream.on_recv(None)
            self._send_disconnect()
            self._stream.close()
            self._stream = None
            self._need_handshake = True

    def wait_for_client(self, timeout):
        # type: (float) -> None
        """
        Wait for the worker to establish a connection with the remote client.
        Will return immediately if already connected.
        Typically, the worker provides a service/responds to requests, so this is really only used for unit testing.
        :param timeout: Max time, in seconds, to wait for the connection to establish.
        """
        event_status = self._connected_event.wait(timeout)
        if not event_status:
            raise LostRemoteError("No worker is connected.")

    def is_connected(self):
        # type: () -> bool
        """
        Returns whether worker is connected to a client.
        :return: A boolean flag to indicate whether a connection to a client is established.
        """
        return not self._need_handshake

    def send_reply(self, msg, partial=False, exception=False):
        # type: (Any, bool, bool) -> None
        """
        Send a ZeroMQ message in reply to a client request.
        This should only be called out of the overridden do_work method.

        :param msg: The message to be sent out.
        :param partial: Flag indicating whether the response is a partial or final ZMQ message.
        """

        msg = msgpack.Packer(default=XeroSerializer.encoder).pack(msg)
        if exception:
            to_send = [WORKER_EXCEPTION]
        elif partial:
            to_send = [WORKER_PARTIAL_REPLY]
        else:
            to_send = [WORKER_FINAL_REPLY]
        to_send.append(b'')
        if isinstance(msg, list):
            to_send.extend(msg)
        else:
            to_send.append(msg)

        self._stream.send_multipart(to_send, track=True, copy=False)

    def emit(self, msg):
        # type: (Any) -> None
        if not self.is_connected():
            raise LostRemoteError("No client is connected.")
        msg = msgpack.Packer(default=XeroSerializer.encoder).pack(msg)
        to_send = [WORKER_EMIT]
        to_send.append(b'')
        if isinstance(msg, list):
            to_send.extend(msg)
        else:
            to_send.append(msg)
        self._stream.io_loop.add_callback(
            lambda x: self._stream.send_multipart(x, track=True, copy=False),
            to_send)

    def _tick(self):
        # type: () -> None
        """
        Periodic callback to check connectivity to client.
        """
        if self._curr_liveness >= 0:
            self._curr_liveness -= 1

        if self._curr_liveness > 0:
            self._send_heartbeat()
        elif self._curr_liveness == 0:
            # Connection died, close on our side.
            self.on_log_event(
                "uniworker.tick",
                "Connection to uniclient timed out, disconnecting")
            self._connected_event.clear()
        else:
            self._send_ready()

    def _send_heartbeat(self):
        # type: () -> None
        """
        Send a heartbeat message to the client.
        """
        # Heartbeats should go out immediately, if a lot of messages to be emitted are queued up heartbeats should
        # still be sent out regularly.  Therefore, send it out via the stream's socket, rather than the stream itself
        # See https://pyzmq.readthedocs.io/en/latest/eventloop.html#send
        self._stream.send_multipart([WORKER_HEARTBEAT])

    def _send_disconnect(self):
        # type: () -> None
        """
        Send a disconnect message to the client.
        """
        # Send out via the socket, this message takes priority.
        self._stream.send_multipart([WORKER_DISCONNECT])

    def _send_ready(self):
        # type: () -> None
        """
        Send a ready message to the client.
        """
        self.on_log_event("uniworker.ready", "Sending ready to client.")
        self._stream.send_multipart([WORKER_READY])

    def _on_message(self, msg):
        # type: (List[bytes]) -> None
        """
        Processes a received ZeroMQ message.
        :param msg: List of strings in the format:
            [ ZMQ Client ID, Header, StrMessagePart1, StrMessagePart2...]
        """

        # 2nd part is protocol version
        protocol_version = msg.pop(0)
        if protocol_version != UNI_CLIENT_HEADER:  # version check, ignore old versions
            logger.error(
                "Message doesn't start with {}".format(UNI_CLIENT_HEADER))
            return
        # 3rd part is message type
        msg_type = msg.pop(0)
        # any message resets the liveness counter
        self._need_handshake = False
        self._connected_event.set()
        self._curr_liveness = HB_LIVENESS
        if msg_type == WORKER_DISCONNECT:  # disconnect
            self._curr_liveness = 0  # reconnect will be triggered by hb timer
        elif msg_type == WORKER_REQUEST:  # request
            # remaining parts are the user message
            self._on_request(msg)
        elif msg_type == WORKER_HEARTBEAT:
            # received hardbeat - timer handled above
            pass
        else:
            logger.error("Uniworker received unrecognized message")

    def _on_request(self, message):
        # type: (List[bytes]) -> None
        """
        This gets called on incoming RPC messages, will break up the encoded message into something do_work() can process
        :param message: 
        """
        name = str(message[0], 'utf-8')
        args = msgpack.unpackb(message[1],
                               object_hook=XeroSerializer.decoder,
                               raw=False)
        kwargs = msgpack.unpackb(message[2],
                                 object_hook=XeroSerializer.decoder,
                                 raw=False)
        self.do_work(name, args, kwargs)

    def on_log_event(self, event, message):
        # type: (str, str) -> None
        """
        Called on internal loggable events.  Designed for override.
        :param event: The event type.
        :param message: Loggable message.
        """
        logger.debug("{}: {}".format(event, message))

    @abstractmethod
    def do_work(self, name, args, kwargs):
        # type: (str, List[Any], Dict[Any,Any]) -> None
        """
        Override this method for worker-specific message handling.
        :param name: The 'name' of the function/rpc call.
        :param args: Function call arguments.
        :param kwargs: Function call key arguments.
        """
        raise NotImplementedError()
Example #21
0
class MQRep(object):

    """Class for the MDP worker side.

    Thin encapsulation of a zmq.DEALER socket.
    Provides a send method with optional timeout parameter.

    Will use a timeout to indicate a broker failure.
    """

    _proto_version = b'MDPW01'

    # TODO: integrate that into API
    HB_INTERVAL = 1000  # in milliseconds
    HB_LIVENESS = 3    # HBs to miss before connection counts as dead

    def __init__(self, context, service):
        """Initialize the MDPWorker.

        context is the zmq context to create the socket from.
        service is a byte-string with the service name.
        """
        if DEBUG:
            print("MQRep > __init__")
        cfg = Loader('mq').load()
        config = dict(cfg[1])
        if config['ip'].strip() == "*":
            config['ip'] = get_ip()
        self.endpoint = "tcp://{0}:{1}".format(config['ip'], config['req_rep_port'])
        self.context = context
        self.service = service
        self.stream = None
        self._tmo = None
        self.need_handshake = True
        self.ticker = None
        self._delayed_cb = None
        self._create_stream()

        ### patch fritz
        self._reconnect_in_progress = False
        ### end patch fritz
        return

    def _create_stream(self):
        """Helper to create the socket and the stream.
        """
        if DEBUG:
            print("MQRep > _create_stream")
        socket = ZmqSocket(self.context, zmq.DEALER)
        ioloop = IOLoop.instance()
        self.stream = ZMQStream(socket, ioloop)
        self.stream.on_recv(self._on_mpd_message)
        self.stream.socket.setsockopt(zmq.LINGER, 0)
        self.stream.connect(self.endpoint)
        if self.ticker != None:
            if DEBUG:
                print("MQRep > _create_stream - stop ticker")
            self.ticker.stop()
        self.ticker = PeriodicCallback(self._tick, self.HB_INTERVAL)
        self._send_ready()
        self.ticker.start()
        return

    def _send_ready(self):
        """Helper method to prepare and send the workers READY message.
        """
        if DEBUG:
            print("MQREP > _send_ready")
        ready_msg = [ b'', self._proto_version, b'\x01', self.service ]
        self.stream.send_multipart(ready_msg)
        self.curr_liveness = self.HB_LIVENESS
        if DEBUG:
            print("MQREP > _send_ready > curr_liveness <= {0}".format(self.HB_LIVENESS))
        return

    def _tick(self):
        """Method called every HB_INTERVAL milliseconds.
        """
        if DEBUG:
            print("MQREP > _tick")
        self.curr_liveness -= 1
        if DEBUG:
            print('MQREP > _tick - {0} tick = {1}'.format(time.time(), self.curr_liveness))
        self.send_hb()
        if self.curr_liveness >= 0:
            return
        if DEBUG:
            print('MQREP > _tick - {0} lost connection'.format(time.time()))
        # ouch, connection seems to be dead
        self.shutdown()
        # try to recreate it
        self._delayed_cb = DelayedCallback(self._create_stream, self.HB_INTERVAL)
        self._delayed_cb.start()
        return

    def send_hb(self):
        """Construct and send HB message to broker.
        """
        msg = [ b'', self._proto_version, b'\x04' ]
        self.stream.send_multipart(msg)
        return

    def shutdown(self):
        """Method to deactivate the worker connection completely.

        Will delete the stream and the underlying socket.
        """
        if self.ticker:
            self.ticker.stop()
            self.ticker = None
        if not self.stream:
            return
        self.stream.socket.close()
        self.stream.close()
        self.stream = None
        self.timed_out = False
        self.need_handshake = True
        self.connected = False
        return

    def reply(self, msg):
        """Send the given message.

        msg can either be a byte-string or a list of byte-strings.
        """
##         if self.need_handshake:
##             raise ConnectionNotReadyError()
        # prepare full message
        to_send = self.envelope
        self.envelope = None
        if isinstance(msg, list):
            to_send.extend(msg)
        else:
            to_send.append(msg)
        self.stream.send_multipart(to_send)
        return

    def _on_mpd_message(self, msg):
        """Helper method called on message receive.

        msg is a list w/ the message parts
        """
        if DEBUG:
            print("MQRep > _on_mpd_message : {0} - {1}".format(time.strftime("%H:%M:%S"), msg))
        # 1st part is empty
        msg.pop(0)
        # 2nd part is protocol version
        # TODO: version check
        proto = msg.pop(0)
        # 3rd part is message type
        msg_type = msg.pop(0)
        # XXX: hardcoded message types!
        # any message resets the liveness counter
        self.need_handshake = False
        self.curr_liveness = self.HB_LIVENESS
        if DEBUG:
            print("MQREP > _on_mpd_message > curr_liveness <= {0}".format(self.HB_LIVENESS))
        if msg_type == b'\x05': # disconnect
            if DEBUG:
                print("MQREP > _on_mpd_message > type x05 : disconnect")
            self.curr_liveness = 0 # reconnect will be triggered by hb timer
        elif msg_type == b'\x02': # request
            if DEBUG:
                print("MQREP > _on_mpd_message > type x02 : request")
            # remaining parts are the user message
            envelope, msg = split_address(msg)
            envelope.append(b'')
            envelope = [ b'', self._proto_version, b'\x03'] + envelope # REPLY
            self.envelope = envelope
            mes = MQMessage()
            mes.set(msg)
            #print("MQRep > before self.on_mdp_request")
            #print(self.on_mdp_request)
            #print(mes)
            try:
                self.on_mdp_request(mes)
            except:
                print("ERROR {0}".format(traceback.format_exc()))
        else:
            if DEBUG:
                print("MQREP > _on_mpd_message > type ??? : invalid or hbeat")
            # invalid message
            # ignored
            # if \x04, this is a hbeat message
            pass
        return

    def on_mdp_request(self, msg):
        """Public method called when a request arrived.

        Must be overloaded!
        """
        pass
Example #22
0
class ZmqPublisher(object):
    def __init__(self,
                 moduleName,
                 centralHost=PUBLISHER_OPT_DEFAULTS['centralHost'],
                 context=None,
                 centralSubscribeEndpoint=PUBLISHER_OPT_DEFAULTS['centralSubscribeEndpoint'],
                 publishEndpoint=PUBLISHER_OPT_DEFAULTS['publishEndpoint'],
                 heartbeatPeriodMsecs=PUBLISHER_OPT_DEFAULTS['heartbeatPeriodMsecs'],
                 # highWaterMark=PUBLISHER_OPT_DEFAULTS['highWaterMark']
                 ):
        self.moduleName = moduleName
        self.centralHost = centralHost

        if context is None:
            context = zmq.Context.instance()
        self.context = context

        self.centralSubscribeEndpoint = parseEndpoint(centralSubscribeEndpoint,
                                                      defaultPort=DEFAULT_CENTRAL_SUBSCRIBE_PORT,
                                                      centralHost=self.centralHost)
        self.publishEndpoint = parseEndpoint(publishEndpoint,
                                             defaultPort='random')
        self.heartbeatPeriodMsecs = heartbeatPeriodMsecs
        #self.highWaterMark = highWaterMark

        self.pubStream = None
        self.heartbeatTimer = None

        self.serializer = serializers.get_serializer('json')()

    @classmethod
    def addOptions(cls, parser, defaultModuleName):
        if not parser.has_option('--centralHost'):
            parser.add_option('--centralHost',
                              default=PUBLISHER_OPT_DEFAULTS['centralHost'],
                              help='Host where central runs [%default]')
        if not parser.has_option('--moduleName'):
            parser.add_option('--moduleName',
                              default=defaultModuleName,
                              help='Name to use for this module [%default]')
        if not parser.has_option('--centralSubscribeEndpoint'):
            parser.add_option('--centralSubcribeEndpoint',
                              default=PUBLISHER_OPT_DEFAULTS['centralSubscribeEndpoint'],
                              help='Endpoint where central listens for messages [%default]')
        if not parser.has_option('--publishEndpoint'):
            parser.add_option('--publishEndpoint',
                              default=PUBLISHER_OPT_DEFAULTS['publishEndpoint'],
                              help='Endpoint to publish messages on [%default]')
        if not parser.has_option('--heartbeatPeriodMsecs'):
            parser.add_option('--heartbeatPeriodMsecs',
                              default=PUBLISHER_OPT_DEFAULTS['heartbeatPeriodMsecs'],
                              type='int',
                              help='Period for sending heartbeats to central [%default]')
        #if not parser.has_option('--highWaterMark'):
        #    parser.add_option('--highWaterMark',
        #                      default=PUBLISHER_OPT_DEFAULTS['highWaterMark'],
        #                      type='int',
        #                      help='High-water mark for publish socket (see 0MQ docs) [%default]')

    @classmethod
    def getOptionValues(cls, opts):
        result = {}
        for key in PUBLISHER_OPT_DEFAULTS.iterkeys():
            val = getattr(opts, key, None)
            if val is not None:
                result[key] = val
        return result

    def heartbeat(self):
        logging.debug('ZmqPublisher: heartbeat')
        self.sendJson('central.heartbeat.%s' % self.moduleName,
                      {'host': getShortHostName(),
                       'pub': self.publishEndpoint})

    def sendRaw(self, topic, body):
        self.pubStream.send('%s:%s' % (topic, body))
        self.pubStream.flush()

    def sendJson(self, topic, obj):
        if isinstance(obj, dict):
            obj.setdefault('module', self.moduleName)
            obj.setdefault('timestamp', str(getTimestamp()))
        self.sendRaw(topic, json.dumps(obj))

    def sendDjango(self, modelInstance, topic=None, topicSuffix=None):
        dataText = self.serializer.serialize([modelInstance])
        data = json.loads(dataText)[0]
        if topic is None:
            topic = data['model'].encode('utf-8')
            if topicSuffix is not None:
                topic += topicSuffix
        self.sendJson(topic, {'data': data})

    def start(self):
        pubSocket = self.context.socket(zmq.PUB)
        self.pubStream = ZMQStream(pubSocket)
        # self.pubStream.setsockopt(zmq.IDENTITY, self.moduleName)
        # self.pubStream.setsockopt(zmq.HWM, self.highWaterMark)
        self.pubStream.connect(self.centralSubscribeEndpoint)
        logging.info('zmq.publisher: connected to central at %s', self.centralSubscribeEndpoint)

        if self.publishEndpoint.endswith(':random'):
            endpointWithoutPort = re.sub(r':random$', '', self.publishEndpoint)
            port = self.pubStream.bind_to_random_port(endpointWithoutPort)
            self.publishEndpoint = '%s:%d' % (endpointWithoutPort, port)
        else:
            self.pubStream.bind(self.publishEndpoint)

        self.heartbeatTimer = ioloop.PeriodicCallback(self.heartbeat,
                                                      self.heartbeatPeriodMsecs)
        self.heartbeatTimer.start()
        self.heartbeat()
Example #23
0
class ZmqSubscriber(object):
    def __init__(self,
                 moduleName,
                 centralHost=SUBSCRIBER_OPT_DEFAULTS['centralHost'],
                 context=None,
                 centralPublishEndpoint=SUBSCRIBER_OPT_DEFAULTS[
                     'centralPublishEndpoint'],
                 replay=None):
        self.moduleName = moduleName
        self.centralHost = centralHost

        if context is None:
            context = zmq.Context.instance()
        self.context = context

        self.centralPublishEndpoint = parseEndpoint(
            centralPublishEndpoint,
            defaultPort=DEFAULT_CENTRAL_PUBLISH_PORT,
            centralHost=self.centralHost)
        self.replayPaths = replay
        if self.replayPaths is None:
            self.replayPaths = []

        self.handlers = {}
        self.counter = 0
        self.deserializer = serializers.get_deserializer('json')
        self.stream = None

    @classmethod
    def addOptions(cls, parser, defaultModuleName):
        if not parser.has_option('--centralHost'):
            parser.add_option('--centralHost',
                              default=SUBSCRIBER_OPT_DEFAULTS['centralHost'],
                              help='Host where central runs [%default]')
        if not parser.has_option('--moduleName'):
            parser.add_option('--moduleName',
                              default=defaultModuleName,
                              help='Name to use for this module [%default]')
        if not parser.has_option('--centralPublishEndpoint'):
            parser.add_option(
                '--centralPublishEndpoint',
                default=SUBSCRIBER_OPT_DEFAULTS['centralPublishEndpoint'],
                help='Endpoint where central publishes messages [%default]')
        if not parser.has_option('--replay'):
            parser.add_option(
                '--replay',
                action='append',
                help=
                'Replay specified message log (can specify multiple times), or use - to read from stdin'
            )

    @classmethod
    def getOptionValues(cls, opts):
        result = {}
        for key in SUBSCRIBER_OPT_DEFAULTS.iterkeys():
            val = getattr(opts, key, None)
            if val is not None:
                result[key] = val
        return result

    def start(self):
        sock = self.context.socket(zmq.SUB)
        self.stream = ZMQStream(sock)
        # causes problems with multiple instances
        #self.stream.setsockopt(zmq.IDENTITY, self.moduleName)
        self.stream.connect(self.centralPublishEndpoint)
        logging.info('zmq.subscriber: connected to central at %s',
                     self.centralPublishEndpoint)
        self.stream.on_recv(self.routeMessages)

    def routeMessages(self, messages):
        for msg in messages:
            self.routeMessage(msg)

    def routeMessage(self, msg):
        colonIndex = msg.find(':')
        topic = msg[:(colonIndex + 1)]
        body = msg[(colonIndex + 1):]

        handled = 0
        for topicPrefix, registry in self.handlers.iteritems():
            if topic.startswith(topicPrefix):
                for handler in registry.itervalues():
                    handler(topic[:-1], body)
                    handled = 1

        return handled

    def subscribeRaw(self, topicPrefix, handler):
        topicRegistry = self.handlers.setdefault(topicPrefix, {})
        if not topicRegistry:
            logging.info('zmq.subscriber: subscribe %s', topicPrefix)
            self.stream.setsockopt(zmq.SUBSCRIBE, topicPrefix)
        handlerId = (topicPrefix, self.counter)
        topicRegistry[self.counter] = handler
        self.counter += 1
        return handlerId

    def subscribeJson(self, topicPrefix, handler):
        def jsonHandler(topicPrefix, body):
            return handler(topicPrefix,
                           convertToDotDictRecurse(json.loads(body)))

        return self.subscribeRaw(topicPrefix, jsonHandler)

    def subscribeDjango(self, topicPrefix, handler):
        def djangoHandler(topicPrefix, body):
            obj = json.loads(body)
            dataText = json.dumps([obj['data']])
            modelInstance = list(self.deserializer(dataText))[0]
            return handler(topicPrefix, modelInstance.object)

        return self.subscribeRaw(topicPrefix, djangoHandler)

    def unsubscribe(self, handlerId):
        topicPrefix, index = handlerId
        topicRegistry = self.handlers[topicPrefix]
        del topicRegistry[index]
        if not topicRegistry:
            logging.info('zmq.subscriber: unsubscribe %s', topicPrefix)
            self.stream.setsockopt(zmq.UNSUBSCRIBE, topicPrefix)

    def connect(self, endpoint):
        self.stream.connect(endpoint)

    def replay(self):
        numReplayed = 0
        numHandled = 0
        for replayPath in self.replayPaths:
            print '=== replaying messages from %s' % replayPath
            if replayPath == '-':
                replayFile = sys.stdin
            else:
                replayFile = open(replayPath, 'rb')
            stream = LogParser(replayFile)
            for rec in stream:
                numReplayed += 1
                numHandled += self.routeMessage(rec.msg)

                if numReplayed % 10000 == 0:
                    print 'replayed %d messages, %d handled' % (numReplayed,
                                                                numHandled)
Example #24
0
class Subscriber (object):
    def __init__(self,
            sub_uri=defaults.subscriber_sub_uri,
            patterns=None,
            callbacks=None,
            ):

        self.sub_uri = sub_uri
        
        if patterns:
            self.patterns = patterns
        else:
            self.patterns = []

        if callbacks:
            self.callbacks = callbacks
        else:
            self.callbacks = []
        
        self.setup_logging()
        self.setup_zmq()
        self.setup_sockets()
        self.setup_subscriptions()
        self.setup_events()

    def setup_logging(self):
        self.log = logging.getLogger('zmqevt.subscriber')

    def setup_zmq(self):
        self.context = zmq.Context()

    def setup_sockets(self):
        self.sub = ZMQStream(self.context.socket(zmq.SUB))
        self.sub.connect(self.sub_uri)

    def setup_subscriptions(self):
        if self.patterns:
            for p in self.patterns:
                self.subscribe(p)

    def subscribe(self, pattern):
        self.log.debug('Subcribe to "%s".' % pattern)
        self.sub.setsockopt(zmq.SUBSCRIBE, pattern)

    def setup_events(self):
        self.sub.on_recv(self.on_recv)

    def register_callback(self, func, data=None):
        self.callbacks.append((func,data))

    def unregister_callback(self, func):
        self.callbacks = [x for x in self.callbacks if x[0] is not func]

    def on_recv(self, msg):
        self.log.debug('Receive: %s' % (str(msg)))

        assert len(msg) == 2, 'Received invalid message.'

        # This regenerates the event to ensure that we don't
        # pass on invalid data.
        try:
            evt = event.Event.load(msg)
        except Exception, detail:
            self.log.error('Error processing message: %s' % detail)
            return

        self.log.debug('Event: %s' % (str(evt.dump())))

        for func, data in self.callbacks:
            func(evt, data=data)
Example #25
0
class Worker(object):

    """Class for the MDP worker side.

    Thin encapsulation of a zmq.DEALER socket.
    Provides a send method with optional timeout parameter.

    Will use a timeout to indicate a broker failure.
    """
    max_forks = 10

    ipc = 'ipc:///tmp/zmq-rpc-'+str(uuid4())
    HB_INTERVAL = 1000  # in milliseconds
    HB_LIVENESS = 3    # HBs to miss before connection counts as dead

    def __init__(self, context, endpoint, service, multicasts=()):
        """Initialize the MDPWorker.

        :param context:    is the zmq context to create the socket from
        :type context:     zmq.Context
        :param service:    service name - you can put hostname here
        :type service:     str
        :param multicasts: list of groups to subscribe
        :type multicasts:  list
        """
        self.context = context
        self.endpoint = endpoint
        self.service = service.encode('utf-8')  # convert to byte-string - required in python 3
        self.multicasts = [m.encode('utf-8') for m in multicasts]  # convert to byte-string
        self.stream = None
        self._tmo = None
        self.need_handshake = True
        self.ticker = None
        self._delayed_cb = None
        self._create_stream()
        self.forks = []
        self.curr_liveness = self.HB_LIVENESS

        socket = self.context.socket(zmq.ROUTER)
        socket.bind(self.ipc)
        self.stream_w = ZMQStream(socket)
        self.stream_w.on_recv(self._on_fork_response)
        self.reply_socket = None
        return

    def _create_stream(self):
        """Helper to create the socket and the stream.
        """
        self.on_log_event('broker.connect', 'Trying to connect do broker')
        socket = self.context.socket(zmq.DEALER)
        ioloop = IOLoop.instance()
        self.stream = ZMQStream(socket, ioloop)
        self.stream.on_recv(self._on_message)
        self.stream.socket.setsockopt(zmq.LINGER, 0)
        self.stream.connect(self.endpoint)
        self.ticker = PeriodicCallback(self._tick, self.HB_INTERVAL)
        self._send_ready()
        for m in self.multicasts:
            self._register_worker_to_multicast(m)
        self.ticker.start()
        return

    def _tick(self):
        """Method called every HB_INTERVAL milliseconds.
        """
        self.curr_liveness -= 1
        self.send_hb()
        if self.curr_liveness >= 0:
            return
        # ouch, connection seems to be dead
        self.on_log_event('broker.timeout', 'Connection to broker timeouted, disconnecting')
        self.shutdown(False)
        # try to recreate it
        self._delayed_cb = DelayedCallback(self._create_stream, 5000)
        self._delayed_cb.start()
        return

    def send_hb(self):
        """Construct and send HB message to broker.
        """
        msg = [b'', MDP_WORKER_VERSION, b'\x05']
        self.stream.send_multipart(msg)
        return

    def shutdown(self, final=True):
        """Method to deactivate the worker connection completely.

        Will delete the stream and the underlying socket.

        :param final: if shutdown is final and we want to close all sockets
        :type final:  bool
        """

        if self.ticker:
            self.ticker.stop()
            self.ticker = None
        if not self.stream:
            return

        self.stream.on_recv(None)
        self.disconnect()
        self.stream.socket.close()
        self.stream.close()
        self.stream = None
        self.need_handshake = True

        if final:
            self.stream_w.socket.close()
            self.stream_w.close()
            self.stream = None
        return

    def disconnect(self):
        """Helper method to send the workers DISCONNECT message.
        """
        self.stream.socket.send_multipart([b'', MDP_WORKER_VERSION, b'\x06' ])
        self.curr_liveness = self.HB_LIVENESS
        return

    def _send_ready(self):
        """Helper method to prepare and send the workers READY message.
        """
        self.on_log_event('broker.ready', 'Sending ready to broker.')
        ready_msg = [b'', MDP_WORKER_VERSION, b'\x01', self.service]
        self.stream.send_multipart(ready_msg)
        self.curr_liveness = self.HB_LIVENESS
        return

    def _register_worker_to_multicast(self, name):
        """Helper method to register worker to multicast group

        :param name:  group name
        :type name:   str
        """
        self.on_log_event('broker.register-group', 'Subscribing to group \'%s\'.' % name)
        reg_msg = [b'', MDP_WORKER_VERSION, b'\x07', name]
        self.stream.send_multipart(reg_msg)
        self.curr_liveness = self.HB_LIVENESS
        return

    def _on_message(self, msg):
        """Helper method called on message receive.

        :param msg:  message parts
        :type msg:   list
        """
        # 1st part is empty
        msg.pop(0)
        # 2nd part is protocol version
        protocol_version = msg.pop(0)
        if protocol_version != MDP_WORKER_VERSION:  # version check, ignore old versions
            return
        # 3rd part is message type
        msg_type = msg.pop(0)
        # any message resets the liveness counter
        self.need_handshake = False
        self.curr_liveness = self.HB_LIVENESS
        if msg_type == b'\x06':  # disconnect
            self.curr_liveness = 0  # reconnect will be triggered by hb timer
        elif msg_type == b'\x02':  # request
            # remaining parts are the user message
            addresses, msg = self.split_address(msg)
            self._on_request(addresses, msg)
        elif msg_type == b'\x05':
            # received hardbeat - timer handled above
            pass
        else:
            # invalid message ignored
            pass
        return

    def _on_fork_response(self, to_send):
        """Helper method to send message from forked worker.
        This message will be received by main worker process and resend to broker.

        :param to_send  address and data to send
        :type to_send   list
        """
        self.stream.send_multipart(to_send)
        return

    def send_reply(self, addresses, msg, partial=False, exception=False):
        """Send reply from forked worker process.
        This method can be called only from do_work() method!
        This method will send messages to main worker listening on local socket in /tmp/zmq-rpc-...

        :param addresses: return address stack
        :type addresses:  list of str
        :param msg:       return value from called method
        :type msg:        mixed
        :param partial:   if the message is partial or final
        :type partial:    bool
        :param exception: if the message is exception, msg format is: {'class':'c', 'message':'m', 'traceback':'t'}
        :type exception:  bool
        """
        if not self.reply_socket:
            context = zmq.Context()
            self.reply_socket = context.socket(zmq.DEALER)
            self.reply_socket.connect(self.ipc)
        msg = msgpack.Packer().pack(msg)
        if exception:
            to_send = [b'', MDP_WORKER_VERSION, b'\x08']
        elif partial:
            to_send = [b'', MDP_WORKER_VERSION, b'\x03']
        else:
            to_send = [b'', MDP_WORKER_VERSION, b'\x04']
        to_send.extend(addresses)
        to_send.append(b'')
        if isinstance(msg, list):
            to_send.extend(msg)
        else:
            to_send.append(msg)
        m = self.reply_socket.send_multipart(to_send, track=True, copy=False)
        m.wait()
        if not partial:
            self.reply_socket.close()
            self.reply_socket = None
        return

    def send_message(self, addresses, msg, partial=False, error=False):
        """Send response message from main worker process.
        Please do not call this method from do_work()

        :param addresses: return address stack
        :type addresses:  list of str
        :param msg:       return value from called method
        :type msg:        mixed
        :param partial:   if the message is partial or final
        :type partial:    bool
        :param error:     if the message is error
        :type error:      bool
        """
        to_send = [b'', MDP_WORKER_VERSION]
        if partial:
            to_send.append(b'\x03')
        elif error:
            to_send.append(b'\x09')
        else:
            to_send.append(b'\x04')
        to_send.extend(addresses)
        to_send.append(b'')
        if isinstance(msg, list):
            to_send.extend(msg)
        else:
            to_send.append(msg)
        self.stream.send_multipart(to_send)
        return

    def _on_request(self, addresses, message):
        """Helper method called on RPC message receive.
        """
        # remove finished forks
        self._remove_finished_processes()
        # test max forks
        if len(self.forks) >= self.max_forks:
            self.send_message(addresses, b'max workers limit exceeded', error=True)
            self.on_max_forks(addresses, message)
            return

        name = message[0]
        args = msgpack.unpackb(message[1])
        kwargs = msgpack.unpackb(message[2])

        p = Process(target=self.do_work, args=(addresses, name, args, kwargs))
        p.start()
        p._args = None  # free memory
        self.forks.append(p)
        return

    def _remove_finished_processes(self):
        """Helper method dedicated to cleaning list of forked workers
        """
        for f in [f for f in self.forks if not f.is_alive()]:
            self.forks.remove(f)
        return

    def split_address(self, msg):
        """Function to split return Id and message received by ROUTER socket.

        Returns 2-tuple with return Id and remaining message parts.
        Empty frames after the Id are stripped.
        """
        ret_ids = []
        for i, p in enumerate(msg):
            if p:
                ret_ids.append(p)
            else:
                break
        return ret_ids, msg[i + 1:]

    def on_log_event(self, event, message):
        """Override this method if you want to log events from broker

        :type event:    str
        :param event:   event type - used for filtering
        :type message:  str
        :param message: log message

        :rtype: None
        """
        pass

    def on_max_forks(self, addresses, message):
        """This method is called when max_forks limit is reached
        You can override this method.
        """
        pass

    def do_work(self, addresses, name, args, kwargs):
        """Main method responsible for handling rpc calls, and sending response messages.
         Please override this method!

        :param addresses: return address stack
        :type addresses:  list of str
        :param name:      name of task
        :type name:       str
        :param args:      positional task arguments
        :type args:       list
        :param kwargs:    key-value task arguments
        :type kwargs:     dict
        """
        # this is example of simple response message
        self.send_reply(addresses, 'method not implemented')  # and send message to main worker
        # you can also send partial message and exception - read 'send_reply' docs
        return
Example #26
0
    def init_hub(self):
        """construct"""
        client_iface = "%s://%s:" % (self.client_transport, self.client_ip) + "%i"
        engine_iface = "%s://%s:" % (self.engine_transport, self.engine_ip) + "%i"

        ctx = self.context
        loop = self.loop

        # Registrar socket
        q = ZMQStream(ctx.socket(zmq.ROUTER), loop)
        q.bind(client_iface % self.regport)
        self.log.info("Hub listening on %s for registration.", client_iface % self.regport)
        if self.client_ip != self.engine_ip:
            q.bind(engine_iface % self.regport)
            self.log.info("Hub listening on %s for registration.", engine_iface % self.regport)

        ### Engine connections ###

        # heartbeat
        hpub = ctx.socket(zmq.PUB)
        hpub.bind(engine_iface % self.hb[0])
        hrep = ctx.socket(zmq.ROUTER)
        hrep.bind(engine_iface % self.hb[1])
        self.heartmonitor = HeartMonitor(loop=loop, config=self.config, log=self.log,
                                pingstream=ZMQStream(hpub,loop),
                                pongstream=ZMQStream(hrep,loop)
                            )

        ### Client connections ###
        # Notifier socket
        n = ZMQStream(ctx.socket(zmq.PUB), loop)
        n.bind(client_iface%self.notifier_port)

        ### build and launch the queues ###

        # monitor socket
        sub = ctx.socket(zmq.SUB)
        sub.setsockopt(zmq.SUBSCRIBE, b"")
        sub.bind(self.monitor_url)
        sub.bind('inproc://monitor')
        sub = ZMQStream(sub, loop)

        # connect the db
        self.log.info('Hub using DB backend: %r'%(self.db_class.split()[-1]))
        # cdir = self.config.Global.cluster_dir
        self.db = import_item(str(self.db_class))(session=self.session.session,
                                            config=self.config, log=self.log)
        time.sleep(.25)
        try:
            scheme = self.config.TaskScheduler.scheme_name
        except AttributeError:
            from .scheduler import TaskScheduler
            scheme = TaskScheduler.scheme_name.get_default_value()
        # build connection dicts
        self.engine_info = {
            'control' : engine_iface%self.control[1],
            'mux': engine_iface%self.mux[1],
            'heartbeat': (engine_iface%self.hb[0], engine_iface%self.hb[1]),
            'task' : engine_iface%self.task[1],
            'iopub' : engine_iface%self.iopub[1],
            # 'monitor' : engine_iface%self.mon_port,
            }

        self.client_info = {
            'control' : client_iface%self.control[0],
            'mux': client_iface%self.mux[0],
            'task' : (scheme, client_iface%self.task[0]),
            'iopub' : client_iface%self.iopub[0],
            'notification': client_iface%self.notifier_port
            }
        self.log.debug("Hub engine addrs: %s", self.engine_info)
        self.log.debug("Hub client addrs: %s", self.client_info)

        # resubmit stream
        r = ZMQStream(ctx.socket(zmq.DEALER), loop)
        url = util.disambiguate_url(self.client_info['task'][-1])
        r.setsockopt(zmq.IDENTITY, self.session.bsession)
        r.connect(url)

        self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor,
                query=q, notifier=n, resubmit=r, db=self.db,
                engine_info=self.engine_info, client_info=self.client_info,
                log=self.log)