Пример #1
0
    def __call__(self, message_data):
        """Consumer callback to call a method on a proxy object.

        Parses the message for validity and fires off a thread to call the
        proxy object method.

        Message data should be a dictionary with two keys:
            method: string representing the method to call
            args: dictionary of arg: value

        Example: {'method': 'echo', 'args': {'value': 42}}

        """
        # It is important to clear the context here, because at this point
        # the previous context is stored in local.store.context
        if hasattr(local.store, 'context'):
            del local.store.context
        rpc_common._safe_log(LOG.debug, _('received %s'), message_data)
        ctxt = unpack_context(self.conf, message_data)
        method = message_data.get('method')
        args = message_data.get('args', {})
        version = message_data.get('version', None)
        if not method:
            LOG.warn(_('no method for message: %s') % message_data)
            ctxt.reply(_('No method for message: %s') % message_data,
                       connection_pool=self.connection_pool)
            return
        self.pool.spawn_n(self._process_data, ctxt, version, method, args)
Пример #2
0
    def __init__(self, addr, zmq_type, bind=True, subscribe=None):
        self.sock = ZMQ_CTX.socket(zmq_type)
        self.addr = addr
        self.type = zmq_type
        self.subscriptions = []

        # Support failures on sending/receiving on wrong socket type.
        self.can_recv = zmq_type in (zmq.PULL, zmq.SUB)
        self.can_send = zmq_type in (zmq.PUSH, zmq.PUB)
        self.can_sub = zmq_type in (zmq.SUB, )

        # Support list, str, & None for subscribe arg (cast to list)
        do_sub = {
            list: subscribe,
            str: [subscribe],
            type(None): []
        }[type(subscribe)]

        for f in do_sub:
            self.subscribe(f)

        str_data = {'addr': addr, 'type': self.socket_s(),
                    'subscribe': subscribe, 'bind': bind}

        LOG.debug(_("Connecting to %(addr)s with %(type)s"), str_data)
        LOG.debug(_("-> Subscribed to %(subscribe)s"), str_data)
        LOG.debug(_("-> bind: %(bind)s"), str_data)

        try:
            if bind:
                self.sock.bind(addr)
            else:
                self.sock.connect(addr)
        except Exception:
            raise RPCException(_("Could not open socket."))
Пример #3
0
def _multi_send(method, context, topic, msg, timeout=None):
    """
    Wraps the sending of messages,
    dispatches to the matchmaker and sends
    message to all relevant hosts.
    """
    conf = CONF
    LOG.debug(_("%(msg)s") % {'msg': ' '.join(map(pformat, (topic, msg)))})

    queues = matchmaker.queues(topic)
    LOG.debug(_("Sending message(s) to: %s"), queues)

    # Don't stack if we have no matchmaker results
    if len(queues) == 0:
        LOG.warn(_("No matchmaker results. Not casting."))
        # While not strictly a timeout, callers know how to handle
        # this exception and a timeout isn't too big a lie.
        raise rpc_common.Timeout, "No match from matchmaker."

    # This supports brokerless fanout (addresses > 1)
    for queue in queues:
        (_topic, ip_addr) = queue
        _addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port)

        if method.__name__ == '_cast':
            eventlet.spawn_n(method, _addr, context,
                             _topic, _topic, msg, timeout)
            return
        return method(_addr, context, _topic, _topic, msg, timeout)
Пример #4
0
    def register(self, proxy, in_addr, zmq_type_in, out_addr=None,
                 zmq_type_out=None, in_bind=True, out_bind=True,
                 subscribe=None):

        LOG.info(_("Registering reactor"))

        if zmq_type_in not in (zmq.PULL, zmq.SUB):
            raise RPCException("Bad input socktype")

        # Items push in.
        inq = ZmqSocket(in_addr, zmq_type_in, bind=in_bind,
                        subscribe=subscribe)

        self.proxies[inq] = proxy
        self.sockets.append(inq)

        LOG.info(_("In reactor registered"))

        if not out_addr:
            return

        if zmq_type_out not in (zmq.PUSH, zmq.PUB):
            raise RPCException("Bad output socktype")

        # Items push out.
        outq = ZmqSocket(out_addr, zmq_type_out, bind=out_bind)

        self.mapping[inq] = outq
        self.mapping[outq] = inq
        self.sockets.append(outq)

        LOG.info(_("Out reactor registered"))
Пример #5
0
 def _connect(self):
     """Connect to rabbit.  Re-establish any queues that may have
     been declared before if we are reconnecting.  Exceptions should
     be handled by the caller.
     """
     if self.connection:
         LOG.info(_("Reconnecting to AMQP server on "
                  "%(hostname)s:%(port)d") % self.params)
         try:
             self.connection.close()
         except self.connection_errors:
             pass
         # Setting this in case the next statement fails, though
         # it shouldn't be doing any network operations, yet.
         self.connection = None
     self.connection = kombu.connection.BrokerConnection(**self.params)
     self.connection_errors = self.connection.connection_errors
     if self.memory_transport:
         # Kludge to speed up tests.
         self.connection.transport.polling_interval = 0.0
     self.consumer_num = itertools.count(1)
     self.connection.connect()
     self.channel = self.connection.channel()
     # work around 'memory' transport bug in 1.1.3
     if self.memory_transport:
         self.channel._new_queue('ae.undeliver')
     for consumer in self.consumers:
         consumer.reconnect(self.channel)
     LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d'),
              self.params)
Пример #6
0
def register_opts(conf):
    """Registration of options for this driver."""
    #NOTE(ewindisch): ZMQ_CTX and matchmaker
    # are initialized here as this is as good
    # an initialization method as any.

    # We memoize through these globals
    global ZMQ_CTX
    global matchmaker
    global CONF

    if not CONF:
        conf.register_opts(zmq_opts)
        CONF = conf
    # Don't re-set, if this method is called twice.
    if not ZMQ_CTX:
        ZMQ_CTX = zmq.Context(conf.rpc_zmq_contexts)
    if not matchmaker:
        # rpc_zmq_matchmaker should be set to a 'module.Class'
        mm_path = conf.rpc_zmq_matchmaker.split('.')
        mm_module = '.'.join(mm_path[:-1])
        mm_class = mm_path[-1]

        # Only initialize a class.
        if mm_path[-1][0] not in string.ascii_uppercase:
            LOG.error(_("Matchmaker could not be loaded.\n"
                      "rpc_zmq_matchmaker is not a class."))
            raise RPCException(_("Error loading Matchmaker."))

        mm_impl = importutils.import_module(mm_module)
        mm_constructor = getattr(mm_impl, mm_class)
        matchmaker = mm_constructor()
Пример #7
0
 def _error_callback(exc):
     if isinstance(exc, socket.timeout):
         LOG.exception(_('Timed out waiting for RPC response: %s') %
                       str(exc))
         raise rpc_common.Timeout()
     else:
         LOG.exception(_('Failed to consume message from queue: %s') %
                       str(exc))
         info['do_consume'] = True
Пример #8
0
def notify(context, publisher_id, event_type, priority, payload):
    """Sends a notification using the specified driver

    :param publisher_id: the source worker_type.host of the message
    :param event_type:   the literal type of event (ex. Instance Creation)
    :param priority:     patterned after the enumeration of Python logging
                         levels in the set (DEBUG, WARN, INFO, ERROR, CRITICAL)
    :param payload:       A python dictionary of attributes

    Outgoing message format includes the above parameters, and appends the
    following:

    message_id
      a UUID representing the id for this notification

    timestamp
      the GMT timestamp the notification was sent at

    The composite message will be constructed as a dictionary of the above
    attributes, which will then be sent via the transport mechanism defined
    by the driver.

    Message example::

        {'message_id': str(uuid.uuid4()),
         'publisher_id': 'compute.host1',
         'timestamp': timeutils.utcnow(),
         'priority': 'WARN',
         'event_type': 'compute.create_instance',
         'payload': {'instance_id': 12, ... }}

    """
    if priority not in log_levels:
        raise BadPriorityException(
            _('%s not in valid priorities') % priority)

    # Ensure everything is JSON serializable.
    payload = jsonutils.to_primitive(payload, convert_instances=True)

    msg = dict(message_id=str(uuid.uuid4()),
               publisher_id=publisher_id,
               event_type=event_type,
               priority=priority,
               payload=payload,
               timestamp=str(timeutils.utcnow()))

    for driver in _get_drivers():
        try:
            driver.notify(context, msg)
        except Exception, e:
            LOG.exception(_("Problem '%(e)s' attempting to "
                            "send to notification system. "
                            "Payload=%(payload)s") % locals())
Пример #9
0
def serialize_remote_exception(failure_info):
    """Prepares exception data to be sent over rpc.

    Failure_info should be a sys.exc_info() tuple.

    """
    tb = traceback.format_exception(*failure_info)
    failure = failure_info[1]
    LOG.error(_("Returning exception %s to caller"), unicode(failure))
    LOG.error(tb)

    kwargs = {}
    if hasattr(failure, 'kwargs'):
        kwargs = failure.kwargs

    data = {
        'class': str(failure.__class__.__name__),
        'module': str(failure.__class__.__module__),
        'message': unicode(failure),
        'tb': tb,
        'args': failure.args,
        'kwargs': kwargs
    }

    json_data = jsonutils.dumps(data)

    return json_data
Пример #10
0
 def _callback(raw_message):
     message = self.channel.message_to_python(raw_message)
     try:
         callback(message.payload)
         message.ack()
     except Exception:
         LOG.exception(_("Failed to process message... skipping it."))
Пример #11
0
def notify(conf, context, topic, msg, connection_pool):
    """Sends a notification event on a topic."""
    event_type = msg.get('event_type')
    LOG.debug(_('Sending %(event_type)s on %(topic)s'), locals())
    pack_context(msg, context)
    with ConnectionContext(conf, connection_pool) as conn:
        conn.notify_send(topic, msg)
Пример #12
0
 def run(self, key):
     # Assume starts with "fanout~", strip it for lookup.
     nkey = key.split("fanout~")[1:][0]
     if not self._ring_has(nkey):
         LOG.warn(_("No key defining hosts for topic '%s', " "see ringfile") % (nkey,))
         return []
     return map(lambda x: (key + "." + x, x), self.ring[nkey] + ["localhost"])
Пример #13
0
def multicall(conf, context, topic, msg, timeout, connection_pool):
    """Make a call that returns multiple times."""
    # Can't use 'with' for multicall, as it returns an iterator
    # that will continue to use the connection.  When it's done,
    # connection.close() will get called which will put it back into
    # the pool
    LOG.debug(_('Making asynchronous call on %s ...'), topic)
    msg_id = uuid.uuid4().hex
    msg.update({'_msg_id': msg_id})
    LOG.debug(_('MSG_ID is %s') % (msg_id))
    pack_context(msg, context)

    conn = ConnectionContext(conf, connection_pool)
    wait_msg = MulticallWaiter(conf, conn, timeout)
    conn.declare_direct_consumer(msg_id, wait_msg)
    conn.topic_send(topic, msg)
    return wait_msg
Пример #14
0
def notify(context, message):
    """Passes notification to multiple notifiers in a list."""
    for driver in _get_drivers():
        try:
            driver.notify(context, message)
        except Exception as e:
            LOG.exception(_("Problem '%(e)s' attempting to send to "
                            "notification driver %(driver)s."), locals())
Пример #15
0
    def _from_xml(self, datastring):
        plurals = set(self.metadata.get('plurals', {}))

        try:
            node = minidom.parseString(datastring).childNodes[0]
            return {node.nodeName: self._from_xml_node(node, plurals)}
        except expat.ExpatError:
            msg = _("cannot understand XML")
            raise exception.MalformedRequestBody(reason=msg)
Пример #16
0
 def consume(self):
     """Fetch the message and pass it to the callback object"""
     message = self.receiver.fetch()
     try:
         self.callback(message.content)
     except Exception:
         LOG.exception(_("Failed to process message... skipping it."))
     finally:
         self.session.acknowledge(message)
Пример #17
0
    def __call__(self, request):
        """WSGI method that controls (de)serialization and method dispatch."""

        try:
            action, action_args, accept = self.deserialize_request(request)
        except exception.InvalidContentType:
            msg = _("Unsupported Content-Type")
            return webob.exc.HTTPUnsupportedMediaType(explanation=msg)
        except exception.MalformedRequestBody:
            msg = _("Malformed request body")
            return webob.exc.HTTPBadRequest(explanation=msg)

        action_result = self.execute_action(action, request, **action_args)
        try:
            return self.serialize_response(action, action_result, accept)
        # return unserializable result (typically a webob exc)
        except Exception:
            return action_result
Пример #18
0
    def consume(self, sock):
        #TODO(ewindisch): use zero-copy (i.e. references, not copying)
        data = sock.recv()
        LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data)
        if sock in self.mapping:
            LOG.debug(_("ROUTER RELAY-OUT %(data)s") % {
                'data': data})
            self.mapping[sock].send(data)
            return

        topic, msg_id, style, in_msg = data

        ctx, request = _deserialize(in_msg)
        ctx = RpcContext.unmarshal(ctx)

        proxy = self.proxies[sock]

        self.pool.spawn_n(self.process, style, topic,
                          proxy, ctx, request)
Пример #19
0
def _serialize(data):
    """
    Serialization wrapper
    We prefer using JSON, but it cannot encode all types.
    Error if a developer passes us bad data.
    """
    try:
        return str(jsonutils.dumps(data, ensure_ascii=True))
    except TypeError:
        LOG.error(_("JSON serialization failed."))
        raise
Пример #20
0
def notify(context, message):
    """Sends a notification to the RabbitMQ"""
    if not context:
        context = req_context.get_admin_context()
    priority = message.get("priority", CONF.default_notification_level)
    priority = priority.lower()
    for topic in CONF.notification_topics:
        topic = "%s.%s" % (topic, priority)
        try:
            rpc.notify(context, topic, message)
        except Exception, e:
            LOG.exception(_("Could not send notification to %(topic)s. " "Payload=%(message)s"), locals())
Пример #21
0
    def subscribe(self, msg_filter):
        """Subscribe."""
        if not self.can_sub:
            raise RPCException("Cannot subscribe on this socket.")
        LOG.debug(_("Subscribing to %s"), msg_filter)

        try:
            self.sock.setsockopt(zmq.SUBSCRIBE, msg_filter)
        except Exception:
            return

        self.subscriptions.append(msg_filter)
Пример #22
0
def _call(addr, context, msg_id, topic, msg, timeout=None):
    # timeout_response is how long we wait for a response
    timeout = timeout or CONF.rpc_response_timeout

    # The msg_id is used to track replies.
    msg_id = str(uuid.uuid4().hex)

    # Replies always come into the reply service.
    reply_topic = "zmq_replies.%s" % CONF.rpc_zmq_host

    LOG.debug(_("Creating payload"))
    # Curry the original request into a reply method.
    mcontext = RpcContext.marshal(context)
    payload = {
        'method': '-reply',
        'args': {
            'msg_id': msg_id,
            'context': mcontext,
            'topic': reply_topic,
            'msg': [mcontext, msg]
        }
    }

    LOG.debug(_("Creating queue socket for reply waiter"))

    # Messages arriving async.
    # TODO(ewindisch): have reply consumer with dynamic subscription mgmt
    with Timeout(timeout, exception=rpc_common.Timeout):
        try:
            msg_waiter = ZmqSocket(
                "ipc://%s/zmq_topic_zmq_replies" % CONF.rpc_zmq_ipc_dir,
                zmq.SUB, subscribe=msg_id, bind=False
            )

            LOG.debug(_("Sending cast"))
            _cast(addr, context, msg_id, topic, payload)

            LOG.debug(_("Cast sent; Waiting reply"))
            # Blocks until receives reply
            msg = msg_waiter.recv()
            LOG.debug(_("Received message: %s"), msg)
            LOG.debug(_("Unpacking response"))
            responses = _deserialize(msg[-1])
        # ZMQError trumps the Timeout error.
        except zmq.ZMQError:
            raise RPCException("ZMQ Socket Error")
        finally:
            if 'msg_waiter' in vars():
                msg_waiter.close()

    # It seems we don't need to do all of the following,
    # but perhaps it would be useful for multicall?
    # One effect of this is that we're checking all
    # responses for Exceptions.
    for resp in responses:
        if isinstance(resp, types.DictType) and 'exc' in resp:
            raise rpc_common.deserialize_remote_exception(CONF, resp['exc'])

    return responses[-1]
Пример #23
0
    def deserialize_body(self, request, action):
        if not len(request.body) > 0:
            LOG.debug(_("Empty body provided in request"))
            return {}

        try:
            content_type = request.get_content_type()
        except exception.InvalidContentType:
            LOG.debug(_("Unrecognized Content-Type provided in request"))
            raise

        if content_type is None:
            LOG.debug(_("No Content-Type provided in request"))
            return {}

        try:
            deserializer = self.get_body_deserializer(content_type)
        except exception.InvalidContentType:
            LOG.debug(_("Unable to deserialize body as provided Content-Type"))
            raise

        return deserializer.deserialize(request.body, action)
Пример #24
0
    def consume(self, sock):
        ipc_dir = CONF.rpc_zmq_ipc_dir

        #TODO(ewindisch): use zero-copy (i.e. references, not copying)
        data = sock.recv()
        topic, msg_id, style, in_msg = data
        topic = topic.split('.', 1)[0]

        LOG.debug(_("CONSUMER GOT %s"), ' '.join(map(pformat, data)))

        # Handle zmq_replies magic
        if topic.startswith('fanout~'):
            sock_type = zmq.PUB

            # This doesn't change what is in the message,
            # it only specifies that these messages go to
            # the generic fanout topic.
            topic = 'fanout~'
        elif topic.startswith('zmq_replies'):
            sock_type = zmq.PUB
            inside = _deserialize(in_msg)
            msg_id = inside[-1]['args']['msg_id']
            response = inside[-1]['args']['response']
            LOG.debug(_("->response->%s"), response)
            data = [str(msg_id), _serialize(response)]
        else:
            sock_type = zmq.PUSH

            if not topic in self.topic_proxy:
                outq = ZmqSocket("ipc://%s/zmq_topic_%s" % (ipc_dir, topic),
                                 sock_type, bind=True)
                self.topic_proxy[topic] = outq
                self.sockets.append(outq)
                LOG.info(_("Created topic proxy: %s"), topic)

        LOG.debug(_("ROUTER RELAY-OUT START %(data)s") % {'data': data})
        self.topic_proxy[topic].send(data)
        LOG.debug(_("ROUTER RELAY-OUT SUCCEEDED %(data)s") % {'data': data})
Пример #25
0
    def create_consumer(self, topic, proxy, fanout=False):
        # Only consume on the base topic name.
        topic = topic.split('.', 1)[0]

        LOG.info(_("Create Consumer for topic (%(topic)s)") %
                 {'topic': topic})

        # Consume direct-push fanout messages (relay to local consumers)
        if fanout:
            # If we're not in here, we can't receive direct fanout messages
            if CONF.rpc_zmq_host in matchmaker.queues(topic):
                # Consume from all remote publishers.
                self._consume_fanout(self.reactor, topic, proxy)
            else:
                LOG.warn("This service cannot receive direct PUSH fanout "
                         "messages without being known by the matchmaker.")
                return

            # Configure consumer for direct pushes.
            subscribe = (topic, fanout)[type(fanout) == str]
            sock_type = zmq.SUB
            topic = 'fanout~' + topic

            inaddr = "tcp://127.0.0.1:%s" % (CONF.rpc_zmq_port_pub, )
        else:
            sock_type = zmq.PULL
            subscribe = None

            # Receive messages from (local) proxy
            inaddr = "ipc://%s/zmq_topic_%s" % \
                (CONF.rpc_zmq_ipc_dir, topic)

        LOG.debug(_("Consumer is a zmq.%s"),
                  ['PULL', 'SUB'][sock_type == zmq.SUB])

        # Consume messages from local rpc-zmq-receiver daemon.
        self.reactor.register(proxy, inaddr, sock_type,
                              subscribe=subscribe, in_bind=False)
Пример #26
0
def unpack_context(conf, msg):
    """Unpack context from msg."""
    context_dict = {}
    for key in list(msg.keys()):
        # NOTE(vish): Some versions of python don't like unicode keys
        #             in kwargs.
        key = str(key)
        if key.startswith('_context_'):
            value = msg.pop(key)
            context_dict[key[9:]] = value
    context_dict['msg_id'] = msg.pop('_msg_id', None)
    context_dict['conf'] = conf
    ctx = RpcContext.from_dict(context_dict)
    rpc_common._safe_log(LOG.debug, _('unpacked context: %s'), ctx.to_dict())
    return ctx
Пример #27
0
def add_driver(notification_driver):
    """Add a notification driver at runtime."""
    # Make sure the driver list is initialized.
    _get_drivers()
    if isinstance(notification_driver, basestring):
        # Load and add
        try:
            driver = importutils.import_module(notification_driver)
            _drivers[notification_driver] = driver
        except ImportError as e:
            LOG.exception(_("Failed to load notifier %s. "
                            "These notifications will not be sent.") %
                          notification_driver)
    else:
        # Driver is already loaded; just add the object.
        _drivers[notification_driver] = notification_driver
Пример #28
0
    def reconnect(self):
        """Handles reconnecting and re-establishing sessions and queues"""
        if self.connection.opened():
            try:
                self.connection.close()
            except qpid.messaging.exceptions.ConnectionError:
                pass

        while True:
            try:
                self.connection.open()
            except qpid.messaging.exceptions.ConnectionError, e:
                LOG.error(_("Unable to connect to AMQP server: %s"), e)
                time.sleep(self.conf.qpid_reconnect_interval or 1)
            else:
                break
Пример #29
0
    def _get_response(self, ctx, proxy, topic, data):
        """Process a curried message and cast the result to topic."""
        LOG.debug(_("Running func with context: %s"), ctx.to_dict())
        data.setdefault('version', None)
        data.setdefault('args', [])

        try:
            result = proxy.dispatch(
                ctx, data['version'], data['method'], **data['args'])
            return ConsumerBase.normalize_reply(result, ctx.replies)
        except greenlet.GreenletExit:
            # ignore these since they are just from shutdowns
            pass
        except Exception:
            return {'exc':
                    rpc_common.serialize_remote_exception(sys.exc_info())}
Пример #30
0
    def __init__(self, message=None, **kwargs):
        self.kwargs = kwargs

        if not message:
            try:
                message = self.message % kwargs

            except Exception as e:
                # kwargs doesn't match a variable in the message
                # log the issue and the kwargs
                LOG.exception(_('Exception in string format operation'))
                for name, value in kwargs.iteritems():
                    LOG.error("%s: %s" % (name, value))
                # at least get the core message out if something happened
                message = self.message

        super(RPCException, self).__init__(message)