Beispiel #1
0
def _multi_send(method, context, topic, msg, timeout=None,
                envelope=False, _msg_id=None, allowed_remote_exmods=None):
    """Wraps the sending of messages.

    Dispatches to the matchmaker and sends message to all relevant hosts.
    """
    allowed_remote_exmods = allowed_remote_exmods or []
    conf = CONF
    LOG.debug(' '.join(map(pformat, (topic, msg))))

    queues = _get_matchmaker().queues(topic)
    LOG.debug("Sending message(s) to: %s", queues)

    # Don't stack if we have no matchmaker results
    if not queues:
        LOG.warn(_("No matchmaker results. Not casting."))
        # While not strictly a timeout, callers know how to handle
        # this exception and a timeout isn't too big a lie.
        raise rpc_common.Timeout(_("No match from matchmaker."))

    # This supports brokerless fanout (addresses > 1)
    return_val = None
    for queue in queues:
        _topic, ip_addr = queue
        _addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port)

        if method.__name__ == '_cast':
            eventlet.spawn_n(method, _addr, context,
                             _topic, msg, timeout, envelope, _msg_id)
        else:
            return_val = method(_addr, context, _topic, msg, timeout,
                                envelope, allowed_remote_exmods)

    return return_val
    def _load_notifiers(self):
        """One-time load of notifier config file."""
        self.routing_groups = {}
        self.used_drivers = set()
        filename = CONF.routing_notifier_config
        if not filename:
            return

        # Infer which drivers are used from the config file.
        self.routing_groups = yaml.load(
            self._get_notifier_config_file(filename))
        if not self.routing_groups:
            self.routing_groups = {}  # In case we got None from load()
            return

        for group in self.routing_groups.values():
            self.used_drivers.update(group.keys())

        LOG.debug(_('loading notifiers from %(namespace)s') %
                  {'namespace': self.NOTIFIER_PLUGIN_NAMESPACE})
        self.plugin_manager = dispatch.DispatchExtensionManager(
            namespace=self.NOTIFIER_PLUGIN_NAMESPACE,
            check_func=self._should_load_plugin,
            invoke_on_load=True,
            invoke_args=None)
        if not list(self.plugin_manager):
            LOG.warning(_("Failed to load any notifiers "
                          "for %(namespace)s") %
                        {'namespace': self.NOTIFIER_PLUGIN_NAMESPACE})
Beispiel #3
0
            def publisher(waiter):
                LOG.info(_("Creating proxy for topic: %s"), topic)

                try:
                    # The topic is received over the network,
                    # don't trust this input.
                    if self.badchars.search(topic) is not None:
                        emsg = _("Topic contained dangerous characters.")
                        LOG.warn(emsg)
                        raise RPCException(emsg)

                    out_sock = ZmqSocket("ipc://%s/zmq_topic_%s" %
                                         (ipc_dir, topic),
                                         sock_type, bind=True)
                except RPCException:
                    waiter.send_exception(*sys.exc_info())
                    return

                self.topic_proxy[topic] = eventlet.queue.LightQueue(
                    CONF.rpc_zmq_topic_backlog)
                self.sockets.append(out_sock)

                # It takes some time for a pub socket to open,
                # before we can have any faith in doing a send() to it.
                if sock_type == zmq.PUB:
                    eventlet.sleep(.5)

                waiter.send(True)

                while(True):
                    data = self.topic_proxy[topic].get()
                    out_sock.send(data, copy=False)
Beispiel #4
0
    def _load_notifiers(self):
        """One-time load of notifier config file."""
        self.routing_groups = {}
        self.used_drivers = set()
        filename = CONF.routing_notifier_config
        if not filename:
            return

        # Infer which drivers are used from the config file.
        self.routing_groups = yaml.load(
            self._get_notifier_config_file(filename))
        if not self.routing_groups:
            self.routing_groups = {}  # In case we got None from load()
            return

        for group in self.routing_groups.values():
            self.used_drivers.update(group.keys())

        LOG.debug(
            _('loading notifiers from %(namespace)s') %
            {'namespace': self.NOTIFIER_PLUGIN_NAMESPACE})
        self.plugin_manager = dispatch.DispatchExtensionManager(
            namespace=self.NOTIFIER_PLUGIN_NAMESPACE,
            check_func=self._should_load_plugin,
            invoke_on_load=True,
            invoke_args=None)
        if not list(self.plugin_manager):
            LOG.warning(
                _("Failed to load any notifiers "
                  "for %(namespace)s") %
                {'namespace': self.NOTIFIER_PLUGIN_NAMESPACE})
Beispiel #5
0
    def consume_in_thread(self):
        """Runs the ZmqProxy service."""
        ipc_dir = CONF.rpc_zmq_ipc_dir
        consume_in = "tcp://%s:%s" % \
            (CONF.rpc_zmq_bind_address,
             CONF.rpc_zmq_port)
        consumption_proxy = InternalContext(None)

        try:
            os.makedirs(ipc_dir)
        except os.error:
            if not os.path.isdir(ipc_dir):
                with excutils.save_and_reraise_exception():
                    LOG.error(
                        _("Required IPC directory does not exist at"
                          " %s"), ipc_dir)
        try:
            self.register(consumption_proxy, consume_in, zmq.PULL)
        except zmq.ZMQError:
            if os.access(ipc_dir, os.X_OK):
                with excutils.save_and_reraise_exception():
                    LOG.error(_("Permission denied to IPC directory at"
                                " %s"), ipc_dir)
            with excutils.save_and_reraise_exception():
                LOG.error(
                    _("Could not create ZeroMQ receiver daemon. "
                      "Socket may already be in use."))

        super(ZmqProxy, self).consume_in_thread()
Beispiel #6
0
    def consume_in_thread(self):
        """Runs the ZmqProxy service."""
        ipc_dir = CONF.rpc_zmq_ipc_dir
        consume_in = "tcp://%s:%s" % \
            (CONF.rpc_zmq_bind_address,
             CONF.rpc_zmq_port)
        consumption_proxy = InternalContext(None)

        try:
            os.makedirs(ipc_dir)
        except os.error:
            if not os.path.isdir(ipc_dir):
                with excutils.save_and_reraise_exception():
                    LOG.error(_("Required IPC directory does not exist at"
                                " %s"), ipc_dir)
        try:
            self.register(consumption_proxy,
                          consume_in,
                          zmq.PULL)
        except zmq.ZMQError:
            if os.access(ipc_dir, os.X_OK):
                with excutils.save_and_reraise_exception():
                    LOG.error(_("Permission denied to IPC directory at"
                                " %s"), ipc_dir)
            with excutils.save_and_reraise_exception():
                LOG.error(_("Could not create ZeroMQ receiver daemon. "
                            "Socket may already be in use."))

        super(ZmqProxy, self).consume_in_thread()
Beispiel #7
0
            def publisher(waiter):
                LOG.info(_("Creating proxy for topic: %s"), topic)

                try:
                    # The topic is received over the network,
                    # don't trust this input.
                    if self.badchars.search(topic) is not None:
                        emsg = _("Topic contained dangerous characters.")
                        LOG.warn(emsg)
                        raise RPCException(emsg)

                    out_sock = ZmqSocket("ipc://%s/zmq_topic_%s" %
                                         (ipc_dir, topic),
                                         sock_type,
                                         bind=True)
                except RPCException:
                    waiter.send_exception(*sys.exc_info())
                    return

                self.topic_proxy[topic] = eventlet.queue.LightQueue(
                    CONF.rpc_zmq_topic_backlog)
                self.sockets.append(out_sock)

                # It takes some time for a pub socket to open,
                # before we can have any faith in doing a send() to it.
                if sock_type == zmq.PUB:
                    eventlet.sleep(.5)

                waiter.send(True)

                while (True):
                    data = self.topic_proxy[topic].get()
                    out_sock.send(data, copy=False)
Beispiel #8
0
    def reconnect(self, retry=None):
        """Handles reconnecting and re-establishing sessions and queues.
        Will retry up to retry number of times.
        retry = None or -1 means to retry forever
        retry = 0 means no retry
        retry = N means N retries
        """
        delay = 1
        attempt = 0
        loop_forever = False
        if retry is None or retry < 0:
            loop_forever = True

        while True:
            self._disconnect()

            attempt += 1
            broker = six.next(self.brokers)
            try:
                self._connect(broker)
            except qpid_exceptions.MessagingError as e:
                msg_dict = dict(e=e,
                                delay=delay,
                                retry=retry,
                                broker=broker)
                if not loop_forever and attempt > retry:
                    msg = _('Unable to connect to AMQP server on '
                            '%(broker)s after %(retry)d '
                            'tries: %(e)s') % msg_dict
                    LOG.error(msg)
                    raise exceptions.MessageDeliveryFailure(msg)
                else:
                    msg = _("Unable to connect to AMQP server on %(broker)s: "
                            "%(e)s. Sleeping %(delay)s seconds") % msg_dict
                    LOG.error(msg)
                    time.sleep(delay)
                    delay = min(delay + 1, 5)
            else:
                LOG.info(_('Connected to AMQP server on %s'), broker['host'])
                break

        self.session = self.connection.session()

        if self.consumers:
            consumers = self.consumers
            self.consumers = {}

            for consumer in six.itervalues(consumers):
                consumer.reconnect(self.session)
                self._register_consumer(consumer)

            LOG.debug("Re-established AMQP queues")
Beispiel #9
0
    def reconnect(self, retry=None):
        """Handles reconnecting and re-establishing sessions and queues.
        Will retry up to retry number of times.
        retry = None or -1 means to retry forever
        retry = 0 means no retry
        retry = N means N retries
        """
        delay = 1
        attempt = 0
        loop_forever = False
        if retry is None or retry < 0:
            loop_forever = True

        while True:
            self._disconnect()

            attempt += 1
            broker = six.next(self.brokers)
            try:
                self._connect(broker)
            except qpid_exceptions.MessagingError as e:
                msg_dict = dict(e=e,
                                delay=delay,
                                retry=retry,
                                broker=broker)
                if not loop_forever and attempt > retry:
                    msg = _('Unable to connect to AMQP server on '
                            '%(broker)s after %(retry)d '
                            'tries: %(e)s') % msg_dict
                    LOG.error(msg)
                    raise exceptions.MessageDeliveryFailure(msg)
                else:
                    msg = _("Unable to connect to AMQP server on %(broker)s: "
                            "%(e)s. Sleeping %(delay)s seconds") % msg_dict
                    LOG.error(msg)
                    time.sleep(delay)
                    delay = min(delay + 1, 5)
            else:
                LOG.info(_('Connected to AMQP server on %s'), broker['host'])
                break

        self.session = self.connection.session()

        if self.consumers:
            consumers = self.consumers
            self.consumers = {}

            for consumer in six.itervalues(consumers):
                consumer.reconnect(self.session)
                self._register_consumer(consumer)

            LOG.debug("Re-established AMQP queues")
    def __init__(self, info=None, topic=None, method=None):
        """Initiates Timeout object.

        :param info: Extra info to convey to the user
        :param topic: The topic that the rpc call was sent to
        :param rpc_method_name: The name of the rpc method being
                                called
        """
        self.info = info
        self.topic = topic
        self.method = method
        super(Timeout, self).__init__(None,
                                      info=info or _('<unknown>'),
                                      topic=topic or _('<unknown>'),
                                      method=method or _('<unknown>'))
Beispiel #11
0
 def _error_callback(exc):
     if isinstance(exc, qpid_exceptions.Empty):
         LOG.debug('Timed out waiting for RPC response: %s', exc)
         raise rpc_common.Timeout()
     else:
         LOG.exception(_('Failed to consume message from queue: %s'),
                       exc)
Beispiel #12
0
    def consume(self, sock):
        # TODO(ewindisch): use zero-copy (i.e. references, not copying)
        data = sock.recv()
        LOG.debug("CONSUMER RECEIVED DATA: %s", data)

        proxy = self.proxies[sock]

        if data[2] == 'cast':  # Legacy protocol
            packenv = data[3]

            ctx, msg = _deserialize(packenv)
            request = rpc_common.deserialize_msg(msg)
            ctx = RpcContext.unmarshal(ctx)
        elif data[2] == 'impl_zmq_v2':
            packenv = data[4:]

            msg = unflatten_envelope(packenv)
            request = rpc_common.deserialize_msg(msg)

            # Unmarshal only after verifying the message.
            ctx = RpcContext.unmarshal(data[3])
        else:
            LOG.error(_("ZMQ Envelope version unsupported or unknown."))
            return

        self.pool.spawn_n(self.process, proxy, ctx, request)
Beispiel #13
0
    def _get_response(self, ctx, proxy, topic, data):
        """Process a curried message and cast the result to topic."""
        LOG.debug("Running func with context: %s", ctx.to_dict())
        data.setdefault('version', None)
        data.setdefault('args', {})

        try:
            result = proxy.dispatch(ctx, data['version'], data['method'],
                                    data.get('namespace'), **data['args'])
            return ConsumerBase.normalize_reply(result, ctx.replies)
        except greenlet.GreenletExit:
            # ignore these since they are just from shutdowns
            pass
        except rpc_common.ClientException as e:
            LOG.debug("Expected exception during message handling (%s)",
                      e._exc_info[1])
            return {
                'exc':
                rpc_common.serialize_remote_exception(e._exc_info,
                                                      log_failure=False)
            }
        except Exception:
            LOG.error(_("Exception during message handling"))
            return {
                'exc': rpc_common.serialize_remote_exception(sys.exc_info())
            }
Beispiel #14
0
    def __init__(self, addr, zmq_type, bind=True, subscribe=None):
        self.sock = _get_ctxt().socket(zmq_type)
        self.addr = addr
        self.type = zmq_type
        self.subscriptions = []

        # Support failures on sending/receiving on wrong socket type.
        self.can_recv = zmq_type in (zmq.PULL, zmq.SUB)
        self.can_send = zmq_type in (zmq.PUSH, zmq.PUB)
        self.can_sub = zmq_type in (zmq.SUB, )

        # Support list, str, & None for subscribe arg (cast to list)
        do_sub = {
            list: subscribe,
            str: [subscribe],
            type(None): []
        }[type(subscribe)]

        for f in do_sub:
            self.subscribe(f)

        str_data = {'addr': addr, 'type': self.socket_s(),
                    'subscribe': subscribe, 'bind': bind}

        LOG.debug("Connecting to %(addr)s with %(type)s", str_data)
        LOG.debug("-> Subscribed to %(subscribe)s", str_data)
        LOG.debug("-> bind: %(bind)s", str_data)

        try:
            if bind:
                self.sock.bind(addr)
            else:
                self.sock.connect(addr)
        except Exception:
            raise RPCException(_("Could not open socket."))
    def direct_send(self, msg_id, msg):
        """Send a 'direct' message."""

        timer = rpc_common.DecayingTimer(duration=60)
        timer.start()
        # NOTE(sileht): retry at least 60sec, after we have a good change
        # that the caller is really dead too...

        while True:
            try:
                self.publisher_send(DirectPublisher, msg_id, msg)
            except self.connection.channel_errors as exc:
                # NOTE(noelbk/sileht):
                # If rabbit dies, the consumer can be disconnected before the
                # publisher sends, and if the consumer hasn't declared the
                # queue, the publisher's will send a message to an exchange
                # that's not bound to a queue, and the message wll be lost.
                # So we set passive=True to the publisher exchange and catch
                # the 404 kombu ChannelError and retry until the exchange
                # appears
                if exc.code == 404 and timer.check_return() > 0:
                    LOG.info(
                        _("The exchange to reply to %s doesn't "
                          "exist yet, retrying...") % msg_id)
                    time.sleep(1)
                    continue
                raise
            return
class Timeout(RPCException):
    """Signifies that a timeout has occurred.

    This exception is raised if the rpc_response_timeout is reached while
    waiting for a response from the remote side.
    """
    msg_fmt = _('Timeout while waiting on RPC response - '
                'topic: "%(topic)s", RPC method: "%(method)s" '
                'info: "%(info)s"')

    def __init__(self, info=None, topic=None, method=None):
        """Initiates Timeout object.

        :param info: Extra info to convey to the user
        :param topic: The topic that the rpc call was sent to
        :param rpc_method_name: The name of the rpc method being
                                called
        """
        self.info = info
        self.topic = topic
        self.method = method
        super(Timeout, self).__init__(None,
                                      info=info or _('<unknown>'),
                                      topic=topic or _('<unknown>'),
                                      method=method or _('<unknown>'))
Beispiel #17
0
 def inner_func(*args, **kwargs):
     last_log_time = 0
     last_exc_message = None
     exc_count = 0
     while True:
         try:
             return infunc(*args, **kwargs)
         except Exception as exc:
             if exc.message == last_exc_message:
                 exc_count += 1
             else:
                 exc_count = 1
             # Do not log any more frequently than once a minute unless
             # the exception message changes
             cur_time = int(time.time())
             if (cur_time - last_log_time > 60 or
                     exc.message != last_exc_message):
                 logging.exception(
                     _('Unexpected exception occurred %d time(s)... '
                       'retrying.') % exc_count)
                 last_log_time = cur_time
                 last_exc_message = exc.message
                 exc_count = 0
             # This should be a very rare event. In case it isn't, do
             # a sleep.
             time.sleep(1)
Beispiel #18
0
    def create_consumer(self, topic, proxy, fanout=False):
        # Register with matchmaker.
        _get_matchmaker().register(topic, CONF.rpc_zmq_host)

        # Subscription scenarios
        if fanout:
            sock_type = zmq.SUB
            subscribe = ('', fanout)[type(fanout) == str]
            topic = 'fanout~' + topic.split('.', 1)[0]
        else:
            sock_type = zmq.PULL
            subscribe = None
            topic = '.'.join((topic.split('.', 1)[0], CONF.rpc_zmq_host))

        if topic in self.topics:
            LOG.info(_("Skipping topic registration. Already registered."))
            return

        # Receive messages from (local) proxy
        inaddr = "ipc://%s/zmq_topic_%s" % \
            (CONF.rpc_zmq_ipc_dir, topic)

        LOG.debug("Consumer is a zmq.%s", ['PULL',
                                           'SUB'][sock_type == zmq.SUB])

        self.reactor.register(proxy,
                              inaddr,
                              sock_type,
                              subscribe=subscribe,
                              in_bind=False)
        self.topics.append(topic)
Beispiel #19
0
    def consume(self, sock):
        # TODO(ewindisch): use zero-copy (i.e. references, not copying)
        data = sock.recv()
        LOG.debug("CONSUMER RECEIVED DATA: %s", data)

        proxy = self.proxies[sock]

        if data[2] == 'cast':  # Legacy protocol
            packenv = data[3]

            ctx, msg = _deserialize(packenv)
            request = rpc_common.deserialize_msg(msg)
            ctx = RpcContext.unmarshal(ctx)
        elif data[2] == 'impl_zmq_v2':
            packenv = data[4:]

            msg = unflatten_envelope(packenv)
            request = rpc_common.deserialize_msg(msg)

            # Unmarshal only after verifying the message.
            ctx = RpcContext.unmarshal(data[3])
        else:
            LOG.error(_("ZMQ Envelope version unsupported or unknown."))
            return

        self.pool.spawn_n(self.process, proxy, ctx, request)
 def _call_notify(self, ext, context, message, accepted_drivers):
     """Emit the notification.
     """
     # accepted_drivers is passed in as a result of the map() function
     LOG.info(_("Routing '%(event)s' notification to '%(driver)s' driver") %
              {'event': message.get('event_type'), 'driver': ext.name})
     ext.obj.notify(context, message)
Beispiel #21
0
    def register(self, proxy, in_addr, zmq_type_in,
                 in_bind=True, subscribe=None):

        LOG.info(_("Registering reactor"))

        if zmq_type_in not in (zmq.PULL, zmq.SUB):
            raise RPCException("Bad input socktype")

        # Items push in.
        inq = ZmqSocket(in_addr, zmq_type_in, bind=in_bind,
                        subscribe=subscribe)

        self.proxies[inq] = proxy
        self.sockets.append(inq)

        LOG.info(_("In reactor registered"))
Beispiel #22
0
    def __init__(self, info=None, topic=None, method=None):
        """Initiates Timeout object.

        :param info: Extra info to convey to the user
        :param topic: The topic that the rpc call was sent to
        :param rpc_method_name: The name of the rpc method being
                                called
        """
        self.info = info
        self.topic = topic
        self.method = method
        super(Timeout, self).__init__(
            None,
            info=info or _('<unknown>'),
            topic=topic or _('<unknown>'),
            method=method or _('<unknown>'))
Beispiel #23
0
 def _error_callback(exc):
     if isinstance(exc, qpid_exceptions.Empty):
         LOG.debug('Timed out waiting for RPC response: %s', exc)
         raise rpc_common.Timeout()
     else:
         LOG.exception(_('Failed to consume message from queue: %s'),
                       exc)
Beispiel #24
0
def bool_from_string(subject, strict=False, default=False):
    """Interpret a string as a boolean.

    A case-insensitive match is performed such that strings matching 't',
    'true', 'on', 'y', 'yes', or '1' are considered True and, when
    `strict=False`, anything else returns the value specified by 'default'.

    Useful for JSON-decoded stuff and config file parsing.

    If `strict=True`, unrecognized values, including None, will raise a
    ValueError which is useful when parsing values passed in from an API call.
    Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
    """
    if not isinstance(subject, six.string_types):
        subject = six.text_type(subject)

    lowered = subject.strip().lower()

    if lowered in TRUE_STRINGS:
        return True
    elif lowered in FALSE_STRINGS:
        return False
    elif strict:
        acceptable = ', '.join(
            "'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
        msg = _("Unrecognized value '%(val)s', acceptable values are:"
                " %(acceptable)s") % {'val': subject,
                                      'acceptable': acceptable}
        raise ValueError(msg)
    else:
        return default
Beispiel #25
0
    def create_consumer(self, topic, proxy, fanout=False):
        # Register with matchmaker.
        _get_matchmaker().register(topic, CONF.rpc_zmq_host)

        # Subscription scenarios
        if fanout:
            sock_type = zmq.SUB
            subscribe = ('', fanout)[type(fanout) == str]
            topic = 'fanout~' + topic.split('.', 1)[0]
        else:
            sock_type = zmq.PULL
            subscribe = None
            topic = '.'.join((topic.split('.', 1)[0], CONF.rpc_zmq_host))

        if topic in self.topics:
            LOG.info(_("Skipping topic registration. Already registered."))
            return

        # Receive messages from (local) proxy
        inaddr = "ipc://%s/zmq_topic_%s" % \
            (CONF.rpc_zmq_ipc_dir, topic)

        LOG.debug("Consumer is a zmq.%s",
                  ['PULL', 'SUB'][sock_type == zmq.SUB])

        self.reactor.register(proxy, inaddr, sock_type,
                              subscribe=subscribe, in_bind=False)
        self.topics.append(topic)
Beispiel #26
0
 def run(self, key):
     if not self._ring_has(key):
         LOG.warn(
             _("No key defining hosts for topic '%s', "
               "see ringfile"), key)
         return []
     host = next(self.ring0[key])
     return [(key + '.' + host, host)]
Beispiel #27
0
 def _error_callback(exc):
     if isinstance(exc, socket.timeout):
         LOG.debug('Timed out waiting for RPC response: %s', exc)
         raise rpc_common.Timeout()
     else:
         LOG.exception(_('Failed to consume message from queue: %s'),
                       exc)
         self.do_consume = True
Beispiel #28
0
 def _error_callback(exc):
     if isinstance(exc, socket.timeout):
         LOG.debug('Timed out waiting for RPC response: %s', exc)
         raise rpc_common.Timeout()
     else:
         LOG.exception(_('Failed to consume message from queue: %s'),
                       exc)
         self.do_consume = True
Beispiel #29
0
 def run(self, key):
     # Assume starts with "fanout~", strip it for lookup.
     nkey = key.split('fanout~')[1:][0]
     if not self._ring_has(nkey):
         LOG.warn(
             _("No key defining hosts for topic '%s', "
               "see ringfile"), nkey)
         return []
     return map(lambda x: (key + '.' + x, x), self.ring[nkey])
Beispiel #30
0
 def consume(self):
     """Fetch the message and pass it to the callback object."""
     message = self.receiver.fetch()
     try:
         self._unpack_json_msg(message)
         self.callback(QpidMessage(self.session, message))
     except Exception:
         LOG.exception(_("Failed to process message... skipping it."))
         self.session.acknowledge(message)
 def run(self, key):
     if not self._ring_has(key):
         LOG.warn(
             _("No key defining hosts for topic '%s', "
               "see ringfile"), key
         )
         return []
     host = next(self.ring0[key])
     return [(key + '.' + host, host)]
Beispiel #32
0
 def __exit__(self, exc_type, exc_val, exc_tb):
     if exc_type is not None:
         logging.error(_('Original exception being dropped: %s'),
                       traceback.format_exception(self.type_,
                                                  self.value,
                                                  self.tb))
         return False
     if self.reraise:
         raise self.type_, self.value, self.tb
Beispiel #33
0
 def consume(self):
     """Fetch the message and pass it to the callback object."""
     message = self.receiver.fetch()
     try:
         self._unpack_json_msg(message)
         self.callback(QpidMessage(self.session, message))
     except Exception:
         LOG.exception(_("Failed to process message... skipping it."))
         self.session.acknowledge(message)
Beispiel #34
0
def string_to_bytes(text, unit_system='IEC', return_int=False):
    """Converts a string into an float representation of bytes.

    The units supported for IEC ::

        Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it)
        KB, KiB, MB, MiB, GB, GiB, TB, TiB

    The units supported for SI ::

        kb(it), Mb(it), Gb(it), Tb(it)
        kB, MB, GB, TB

    Note that the SI unit system does not support capital letter 'K'

    :param text: String input for bytes size conversion.
    :param unit_system: Unit system for byte size conversion.
    :param return_int: If True, returns integer representation of text
                       in bytes. (default: decimal)
    :returns: Numerical representation of text in bytes.
    :raises ValueError: If text has an invalid value.

    """
    try:
        base, reg_ex = UNIT_SYSTEM_INFO[unit_system]
    except KeyError:
        msg = _('Invalid unit system: "%s"') % unit_system
        raise ValueError(msg)
    match = reg_ex.match(text)
    if match:
        magnitude = float(match.group(1))
        unit_prefix = match.group(2)
        if match.group(3) in ['b', 'bit']:
            magnitude /= 8
    else:
        msg = _('Invalid string format: %s') % text
        raise ValueError(msg)
    if not unit_prefix:
        res = magnitude
    else:
        res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix])
    if return_int:
        return int(math.ceil(res))
    return res
Beispiel #35
0
 def _call_notify(self, ext, context, message, accepted_drivers):
     """Emit the notification.
     """
     # accepted_drivers is passed in as a result of the map() function
     LOG.info(
         _("Routing '%(event)s' notification to '%(driver)s' driver") % {
             'event': message.get('event_type'),
             'driver': ext.name
         })
     ext.obj.notify(context, message)
Beispiel #36
0
    def unregister(self, key, host):
        """Unregister a topic."""
        if (key, host) in self.host_topic:
            del self.host_topic[(key, host)]

        self.hosts.discard(host)
        self.backend_unregister(key, '.'.join((key, host)))

        LOG.info(_("Matchmaker unregistered: %(key)s, %(host)s"),
                 {'key': key, 'host': host})
 def run(self, key):
     # Assume starts with "fanout~", strip it for lookup.
     nkey = key.split('fanout~')[1:][0]
     if not self._ring_has(nkey):
         LOG.warn(
             _("No key defining hosts for topic '%s', "
               "see ringfile"), nkey
         )
         return []
     return map(lambda x: (key + '.' + x, x), self.ring[nkey])
Beispiel #38
0
def _get_matchmaker(*args, **kwargs):
    global matchmaker
    if not matchmaker:
        mm = CONF.rpc_zmq_matchmaker
        if mm.endswith('matchmaker.MatchMakerRing'):
            mm.replace('matchmaker', 'matchmaker_ring')
            LOG.warn(_('rpc_zmq_matchmaker = %(orig)s is deprecated; use'
                       ' %(new)s instead') % dict(
                     orig=CONF.rpc_zmq_matchmaker, new=mm))
        matchmaker = importutils.import_object(mm, *args, **kwargs)
    return matchmaker
Beispiel #39
0
def _serialize(data):
    """Serialization wrapper.

    We prefer using JSON, but it cannot encode all types.
    Error if a developer passes us bad data.
    """
    try:
        return jsonutils.dumps(data, ensure_ascii=True)
    except TypeError:
        with excutils.save_and_reraise_exception():
            LOG.error(_("JSON serialization failed."))
Beispiel #40
0
def _serialize(data):
    """Serialization wrapper.

    We prefer using JSON, but it cannot encode all types.
    Error if a developer passes us bad data.
    """
    try:
        return jsonutils.dumps(data, ensure_ascii=True)
    except TypeError:
        with excutils.save_and_reraise_exception():
            LOG.error(_("JSON serialization failed."))
    def _callback_handler(self, message, callback):
        """Call callback with deserialized message.

        Messages that are processed and ack'ed.
        """

        try:
            callback(RabbitMessage(message))
        except Exception:
            LOG.exception(_("Failed to process message" " ... skipping it."))
            message.ack()
Beispiel #42
0
    def unregister(self, key, host):
        """Unregister a topic."""
        if (key, host) in self.host_topic:
            del self.host_topic[(key, host)]

        self.hosts.discard(host)
        self.backend_unregister(key, '.'.join((key, host)))

        LOG.info(_("Matchmaker unregistered: %(key)s, %(host)s"), {
            'key': key,
            'host': host
        })
Beispiel #43
0
def is_enabled():
    cert_file = CONF.ssl.cert_file
    key_file = CONF.ssl.key_file
    ca_file = CONF.ssl.ca_file
    use_ssl = cert_file or key_file

    if cert_file and not os.path.exists(cert_file):
        raise RuntimeError(_("Unable to find cert_file : %s") % cert_file)

    if ca_file and not os.path.exists(ca_file):
        raise RuntimeError(_("Unable to find ca_file : %s") % ca_file)

    if key_file and not os.path.exists(key_file):
        raise RuntimeError(_("Unable to find key_file : %s") % key_file)

    if use_ssl and (not cert_file or not key_file):
        raise RuntimeError(_("When running server in SSL mode, you must "
                             "specify both a cert_file and key_file "
                             "option value in your configuration file"))

    return use_ssl
Beispiel #44
0
def _get_matchmaker(*args, **kwargs):
    global matchmaker
    if not matchmaker:
        mm = CONF.rpc_zmq_matchmaker
        if mm.endswith('matchmaker.MatchMakerRing'):
            mm.replace('matchmaker', 'matchmaker_ring')
            LOG.warn(
                _('rpc_zmq_matchmaker = %(orig)s is deprecated; use'
                  ' %(new)s instead') %
                dict(orig=CONF.rpc_zmq_matchmaker, new=mm))
        matchmaker = importutils.import_object(mm, *args, **kwargs)
    return matchmaker
Beispiel #45
0
    def _callback_handler(self, message, callback):
        """Call callback with deserialized message.

        Messages that are processed and ack'ed.
        """

        try:
            callback(RabbitMessage(message))
        except Exception:
            LOG.exception(_("Failed to process message"
                            " ... skipping it."))
            message.ack()
Beispiel #46
0
def _multi_send(method,
                context,
                topic,
                msg,
                timeout=None,
                envelope=False,
                _msg_id=None,
                allowed_remote_exmods=None):
    """Wraps the sending of messages.

    Dispatches to the matchmaker and sends message to all relevant hosts.
    """
    allowed_remote_exmods = allowed_remote_exmods or []
    conf = CONF
    LOG.debug(' '.join(map(pformat, (topic, msg))))

    queues = _get_matchmaker().queues(topic)
    LOG.debug("Sending message(s) to: %s", queues)

    # Don't stack if we have no matchmaker results
    if not queues:
        LOG.warn(_("No matchmaker results. Not casting."))
        # While not strictly a timeout, callers know how to handle
        # this exception and a timeout isn't too big a lie.
        raise rpc_common.Timeout(_("No match from matchmaker."))

    # This supports brokerless fanout (addresses > 1)
    return_val = None
    for queue in queues:
        _topic, ip_addr = queue
        _addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port)

        if method.__name__ == '_cast':
            eventlet.spawn_n(method, _addr, context, _topic, msg, timeout,
                             envelope, _msg_id)
        else:
            return_val = method(_addr, context, _topic, msg, timeout, envelope,
                                allowed_remote_exmods)

    return return_val
Beispiel #47
0
    def register(self,
                 proxy,
                 in_addr,
                 zmq_type_in,
                 in_bind=True,
                 subscribe=None):

        LOG.info(_("Registering reactor"))

        if zmq_type_in not in (zmq.PULL, zmq.SUB):
            raise RPCException("Bad input socktype")

        # Items push in.
        inq = ZmqSocket(in_addr,
                        zmq_type_in,
                        bind=in_bind,
                        subscribe=subscribe)

        self.proxies[inq] = proxy
        self.sockets.append(inq)

        LOG.info(_("In reactor registered"))
Beispiel #48
0
def is_enabled():
    cert_file = CONF.ssl.cert_file
    key_file = CONF.ssl.key_file
    ca_file = CONF.ssl.ca_file
    use_ssl = cert_file or key_file

    if cert_file and not os.path.exists(cert_file):
        raise RuntimeError(_("Unable to find cert_file : %s") % cert_file)

    if ca_file and not os.path.exists(ca_file):
        raise RuntimeError(_("Unable to find ca_file : %s") % ca_file)

    if key_file and not os.path.exists(key_file):
        raise RuntimeError(_("Unable to find key_file : %s") % key_file)

    if use_ssl and (not cert_file or not key_file):
        raise RuntimeError(
            _("When running server in SSL mode, you must "
              "specify both a cert_file and key_file "
              "option value in your configuration file"))

    return use_ssl
Beispiel #49
0
    def check_return(self, timeout_callback, *args, **kwargs):
        maximum = kwargs.pop('maximum', None)
        if self._duration is None:
            return None if maximum is None else maximum
        if self._ends_at is None:
            raise RuntimeError(_("Can not check/return a timeout from a timer"
                               " that has not been started."))

        left = self._ends_at - time.time()
        if left <= 0:
            timeout_callback(*args, **kwargs)

        return left if maximum is None else min(left, maximum)
    def check_return(self, timeout_callback=None, *args, **kwargs):
        maximum = kwargs.pop('maximum', None)
        if self._duration is None:
            return None if maximum is None else maximum
        if self._ends_at is None:
            raise RuntimeError(
                _("Can not check/return a timeout from a timer"
                  " that has not been started."))

        left = self._ends_at - time.time()
        if left <= 0 and timeout_callback is not None:
            timeout_callback(*args, **kwargs)

        return left if maximum is None else min(left, maximum)
 def _connect(self, broker):
     """Connect to rabbit.  Re-establish any queues that may have
     been declared before if we are reconnecting.  Exceptions should
     be handled by the caller.
     """
     LOG.info(_("Connecting to AMQP server on "
                "%(hostname)s:%(port)d"), broker)
     self.connection = kombu.connection.BrokerConnection(**broker)
     self.connection_errors = self.connection.connection_errors
     self.channel_errors = self.connection.channel_errors
     if self.memory_transport:
         # Kludge to speed up tests.
         self.connection.transport.polling_interval = 0.0
     self.do_consume = True
     self.consumer_num = itertools.count(1)
     self.connection.connect()
     self.channel = self.connection.channel()
     # work around 'memory' transport bug in 1.1.3
     if self.memory_transport:
         self.channel._new_queue('ae.undeliver')
     for consumer in self.consumers:
         consumer.reconnect(self.channel)
     LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d'),
              broker)
Beispiel #52
0
 def _connect(self, broker):
     """Connect to rabbit.  Re-establish any queues that may have
     been declared before if we are reconnecting.  Exceptions should
     be handled by the caller.
     """
     LOG.info(_("Connecting to AMQP server on "
                "%(hostname)s:%(port)d"), broker)
     self.connection = kombu.connection.BrokerConnection(**broker)
     self.connection_errors = self.connection.connection_errors
     self.channel_errors = self.connection.channel_errors
     if self.memory_transport:
         # Kludge to speed up tests.
         self.connection.transport.polling_interval = 0.0
     self.do_consume = True
     self.consumer_num = itertools.count(1)
     self.connection.connect()
     self.channel = self.connection.channel()
     # work around 'memory' transport bug in 1.1.3
     if self.memory_transport:
         self.channel._new_queue('ae.undeliver')
     for consumer in self.consumers:
         consumer.reconnect(self.channel)
     LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d'),
              broker)
Beispiel #53
0
    def start_heartbeat(self):
        """Implementation of MatchMakerBase.start_heartbeat.

        Launches greenthread looping send_heartbeats(),
        yielding for CONF.matchmaker_heartbeat_freq seconds
        between iterations.
        """
        if not self.hosts:
            raise MatchMakerException(_("Register before starting heartbeat."))

        def do_heartbeat():
            while True:
                self.send_heartbeats()
                eventlet.sleep(CONF.matchmaker_heartbeat_freq)

        self._heart = eventlet.spawn(do_heartbeat)
Beispiel #54
0
    def __init__(self, message=None, **kwargs):
        self.kwargs = kwargs

        if not message:
            try:
                message = self.msg_fmt % kwargs

            except Exception:
                # kwargs doesn't match a variable in the message
                # log the issue and the kwargs
                LOG.exception(_('Exception in string format operation'))
                for name, value in six.iteritems(kwargs):
                    LOG.error("%s: %s", name, value)
                # at least get the core message out if something happened
                message = self.msg_fmt

        super(RPCException, self).__init__(message)
Beispiel #55
0
    def start_heartbeat(self):
        """Implementation of MatchMakerBase.start_heartbeat.

        Launches greenthread looping send_heartbeats(),
        yielding for CONF.matchmaker_heartbeat_freq seconds
        between iterations.
        """
        if not self.hosts:
            raise MatchMakerException(
                _("Register before starting heartbeat."))

        def do_heartbeat():
            while True:
                self.send_heartbeats()
                eventlet.sleep(CONF.matchmaker_heartbeat_freq)

        self._heart = eventlet.spawn(do_heartbeat)
 def _dispatch_and_reply(self, incoming):
     try:
         incoming.reply(self._dispatch(incoming.ctxt,
                                       incoming.message))
     except ExpectedException as e:
         LOG.debug(u'Expected exception during message handling (%s)',
                   e.exc_info[1])
         incoming.reply(failure=e.exc_info, log_failure=False)
     except Exception as e:
         # sys.exc_info() is deleted by LOG.exception().
         exc_info = sys.exc_info()
         LOG.error(_('Exception during message handling: %s'), e,
                   exc_info=exc_info)
         incoming.reply(failure=exc_info)
         # NOTE(dhellmann): Remove circular object reference
         # between the current stack frame and the traceback in
         # exc_info.
         del exc_info