Esempio n. 1
0
def multicall(conf, context, topic, msg, timeout, connection_pool):
    """Make a call that returns multiple times."""
    # TODO(pekowski): Remove all these comments in Havana.
    # For amqp_rpc_single_reply_queue = False,
    # Can't use 'with' for multicall, as it returns an iterator
    # that will continue to use the connection.  When it's done,
    # connection.close() will get called which will put it back into
    # the pool
    # For amqp_rpc_single_reply_queue = True,
    # The 'with' statement is mandatory for closing the connection
    LOG.debug(_('Making synchronous call on %s ...'), topic)
    msg_id = uuid.uuid4().hex
    msg.update({'_msg_id': msg_id})
    LOG.debug(_('MSG_ID is %s') % (msg_id))
    _add_unique_id(msg)
    pack_context(msg, context)

    # TODO(pekowski): Remove this flag and the code under the if clause
    #                 in Havana.
    if not conf.amqp_rpc_single_reply_queue:
        conn = ConnectionContext(conf, connection_pool)
        wait_msg = MulticallWaiter(conf, conn, timeout)
        conn.declare_direct_consumer(msg_id, wait_msg)
        conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout)
    else:
        with _reply_proxy_create_sem:
            if not connection_pool.reply_proxy:
                connection_pool.reply_proxy = ReplyProxy(conf, connection_pool)
        msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()})
        wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool)
        with ConnectionContext(conf, connection_pool) as conn:
            conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout)
    return wait_msg
Esempio n. 2
0
def multicall(conf, context, topic, msg, timeout, connection_pool):
    """Make a call that returns multiple times."""
    # TODO(pekowski): Remove all these comments in Havana.
    # For amqp_rpc_single_reply_queue = False,
    # Can't use 'with' for multicall, as it returns an iterator
    # that will continue to use the connection.  When it's done,
    # connection.close() will get called which will put it back into
    # the pool
    # For amqp_rpc_single_reply_queue = True,
    # The 'with' statement is mandatory for closing the connection
    LOG.debug(_('Making synchronous call on %s ...'), topic)
    msg_id = uuid.uuid4().hex
    msg.update({'_msg_id': msg_id})
    LOG.debug(_('MSG_ID is %s') % (msg_id))
    _add_unique_id(msg)
    pack_context(msg, context)

    # TODO(pekowski): Remove this flag and the code under the if clause
    #                 in Havana.
    if not conf.amqp_rpc_single_reply_queue:
        conn = ConnectionContext(conf, connection_pool)
        wait_msg = MulticallWaiter(conf, conn, timeout)
        conn.declare_direct_consumer(msg_id, wait_msg)
        conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout)
    else:
        with _reply_proxy_create_sem:
            if not connection_pool.reply_proxy:
                connection_pool.reply_proxy = ReplyProxy(conf, connection_pool)
        msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()})
        wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool)
        with ConnectionContext(conf, connection_pool) as conn:
            conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout)
    return wait_msg
Esempio n. 3
0
def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None,
              failure=None, ending=False, log_failure=True):
    """Sends a reply or an error on the channel signified by msg_id.

    Failure should be a sys.exc_info() tuple.

    """
    with ConnectionContext(conf, connection_pool) as conn:
        if failure:
            failure = rpc_common.serialize_remote_exception(failure,
                                                            log_failure)

        try:
            msg = {'result': reply, 'failure': failure}
        except TypeError:
            msg = {'result': dict((k, repr(v))
                   for k, v in reply.__dict__.iteritems()),
                   'failure': failure}
        if ending:
            msg['ending'] = True
        _add_unique_id(msg)
        # If a reply_q exists, add the msg_id to the reply and pass the
        # reply_q to direct_send() to use it as the response queue.
        # Otherwise use the msg_id for backward compatibilty.
        if reply_q:
            msg['_msg_id'] = msg_id
            conn.direct_send(reply_q, rpc_common.serialize_msg(msg))
        else:
            conn.direct_send(msg_id, rpc_common.serialize_msg(msg))
Esempio n. 4
0
def msg_reply(conf,
              msg_id,
              reply_q,
              connection_pool,
              reply=None,
              failure=None,
              ending=False,
              log_failure=True):
    """Sends a reply or an error on the channel signified by msg_id.

    Failure should be a sys.exc_info() tuple.

    """
    with ConnectionContext(conf, connection_pool) as conn:
        if failure:
            failure = rpc_common.serialize_remote_exception(
                failure, log_failure)

        msg = {'result': reply, 'failure': failure}
        if ending:
            msg['ending'] = True
        _add_unique_id(msg)
        # If a reply_q exists, add the msg_id to the reply and pass the
        # reply_q to direct_send() to use it as the response queue.
        # Otherwise use the msg_id for backward compatibility.
        if reply_q:
            msg['_msg_id'] = msg_id
            conn.direct_send(reply_q, rpc_common.serialize_msg(msg))
        else:
            conn.direct_send(msg_id, rpc_common.serialize_msg(msg))
Esempio n. 5
0
def cast(conf, context, topic, msg, connection_pool):
    """Sends a message on a topic without waiting for a response."""
    LOG.debug(_('Making asynchronous cast on %s...'), topic)
    _add_unique_id(msg)
    pack_context(msg, context)
    with ConnectionContext(conf, connection_pool) as conn:
        conn.topic_send(topic, rpc_common.serialize_msg(msg))
Esempio n. 6
0
def cast_to_server(conf, context, server_params, topic, msg, connection_pool):
    """Sends a message on a topic to a specific server."""
    _add_unique_id(msg)
    pack_context(msg, context)
    with ConnectionContext(conf, connection_pool, pooled=False,
                           server_params=server_params) as conn:
        conn.topic_send(topic, rpc_common.serialize_msg(msg))
Esempio n. 7
0
def fanout_cast(conf, context, topic, msg, connection_pool):
    """Sends a message on a fanout exchange without waiting for a response."""
    LOG.debug(_('Making asynchronous fanout cast...'))
    _add_unique_id(msg)
    pack_context(msg, context)
    with ConnectionContext(conf, connection_pool) as conn:
        conn.fanout_send(topic, rpc_common.serialize_msg(msg))
Esempio n. 8
0
def cast(conf, context, topic, msg, connection_pool):
    """Sends a message on a topic without waiting for a response."""
    LOG.debug(_('Making asynchronous cast on %s...'), topic)
    _add_unique_id(msg)
    pack_context(msg, context)
    with ConnectionContext(conf, connection_pool) as conn:
        conn.topic_send(topic, rpc_common.serialize_msg(msg))
Esempio n. 9
0
def fanout_cast(conf, context, topic, msg, connection_pool):
    """Sends a message on a fanout exchange without waiting for a response."""
    LOG.debug(_('Making asynchronous fanout cast...'))
    _add_unique_id(msg)
    pack_context(msg, context)
    with ConnectionContext(conf, connection_pool) as conn:
        conn.fanout_send(topic, rpc_common.serialize_msg(msg))
Esempio n. 10
0
def cast_to_server(conf, context, server_params, topic, msg, connection_pool):
    """Sends a message on a topic to a specific server."""
    _add_unique_id(msg)
    pack_context(msg, context)
    with ConnectionContext(conf,
                           connection_pool,
                           pooled=False,
                           server_params=server_params) as conn:
        conn.topic_send(topic, rpc_common.serialize_msg(msg))
Esempio n. 11
0
def notify(conf, context, topic, msg, connection_pool, envelope):
    """Sends a notification event on a topic."""
    LOG.debug(_('Sending %(event_type)s on %(topic)s'),
              dict(event_type=msg.get('event_type'), topic=topic))
    _add_unique_id(msg)
    pack_context(msg, context)
    with ConnectionContext(conf, connection_pool) as conn:
        if envelope:
            msg = rpc_common.serialize_msg(msg, force_envelope=True)
        conn.notify_send(topic, msg)
Esempio n. 12
0
def notify(conf, context, topic, msg, connection_pool, envelope):
    """Sends a notification event on a topic."""
    LOG.debug(_('Sending %(event_type)s on %(topic)s'),
              dict(event_type=msg.get('event_type'),
                   topic=topic))
    _add_unique_id(msg)
    pack_context(msg, context)
    with ConnectionContext(conf, connection_pool) as conn:
        if envelope:
            msg = rpc_common.serialize_msg(msg)
        conn.notify_send(topic, msg)
Esempio n. 13
0
    def cast(self, msg_id, topic, data, envelope=False):
        msg_id = msg_id or 0

        if not (envelope or rpc_common._SEND_RPC_ENVELOPE):
            self.outq.send(
                map(bytes, (msg_id, topic, 'cast', _serialize(data))))
            return

        rpc_envelope = rpc_common.serialize_msg(data[1], envelope)
        zmq_msg = reduce(lambda x, y: x + y, rpc_envelope.items())
        self.outq.send(
            map(bytes, (msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg))
Esempio n. 14
0
    def cast(self, msg_id, topic, data, envelope=False):
        msg_id = msg_id or 0

        if not (envelope or rpc_common._SEND_RPC_ENVELOPE):
            self.outq.send(map(bytes,
                           (msg_id, topic, 'cast', _serialize(data))))
            return

        rpc_envelope = rpc_common.serialize_msg(data[1], envelope)
        zmq_msg = reduce(lambda x, y: x + y, rpc_envelope.items())
        self.outq.send(map(bytes,
                       (msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg))
Esempio n. 15
0
    def cast(self, msg_id, topic, data, envelope):
        msg_id = msg_id or 0

        if not envelope:
            self.outq.send(map(bytes,
                           (msg_id, topic, 'cast', _serialize(data))))
            return

        rpc_envelope = rpc_common.serialize_msg(data[1], envelope)
        zmq_msg = moves.reduce(lambda x, y: x + y, rpc_envelope.items())
        self.outq.send(map(bytes,
                       (msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg))
Esempio n. 16
0
    def cast(self, msg_id, topic, data, envelope):
        msg_id = msg_id or 0

        if not envelope:
            self.outq.send(
                map(bytes, (msg_id, topic, 'cast', _serialize(data))))
            return

        rpc_envelope = rpc_common.serialize_msg(data[1], envelope)
        zmq_msg = moves.reduce(lambda x, y: x + y, rpc_envelope.items())
        self.outq.send(
            map(bytes, (msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg))
Esempio n. 17
0
def multicall(conf, context, topic, msg, timeout, connection_pool):
    """Make a call that returns multiple times."""
    LOG.debug(_('Making synchronous call on %s ...'), topic)
    msg_id = uuid.uuid4().hex
    msg.update({'_msg_id': msg_id})
    LOG.debug(_('MSG_ID is %s') % (msg_id))
    _add_unique_id(msg)
    pack_context(msg, context)

    with _reply_proxy_create_sem:
        if not connection_pool.reply_proxy:
            connection_pool.reply_proxy = ReplyProxy(conf, connection_pool)
    msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()})
    wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool)
    with ConnectionContext(conf, connection_pool) as conn:
        conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout)
    return wait_msg
Esempio n. 18
0
def multicall(conf, context, topic, msg, timeout, connection_pool):
    """Make a call that returns multiple times."""
    LOG.debug(_('Making synchronous call on %s ...'), topic)
    msg_id = uuid.uuid4().hex
    msg.update({'_msg_id': msg_id})
    LOG.debug(_('MSG_ID is %s') % (msg_id))
    _add_unique_id(msg)
    pack_context(msg, context)

    with _reply_proxy_create_sem:
        if not connection_pool.reply_proxy:
            connection_pool.reply_proxy = ReplyProxy(conf, connection_pool)
    msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()})
    wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool)
    with ConnectionContext(conf, connection_pool) as conn:
        conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout)
    return wait_msg