示例#1
0
 def publish(self, topic, data):
     LOG.debug("Publishing message on topic %s" % topic)
     with self.get_connection() as conn:
         channel = conn.channel()
         exchange = self.declare_exchange(channel, topic, 'fanout')
         producer = Producer(channel, exchange=exchange, auto_declare=False)
         producer.publish(data)
示例#2
0
class MqClient(object):
    """
    exchange='E_X7_W2S', queue='Q_X7_W2S',routing_key = 'RK_X7_W2S'
    """

    def __init__(self, kwargs):
        if kwargs:
            self.kwargs = kwargs
        else:
            self.kwargs = MqDict

    def connect(self, hostname="localhost", userid="guest", password="******", virtual_host="/"):
        conn = BrokerConnection(hostname, userid, password, virtual_host)
        # define Web2Server exchange
        exchange = Exchange(self.kwargs["X7_E"], type="direct")
        # queue = Queue(self.kwargs["X7_Q"], exchange, routing_key=self.kwargs["X7_RK"])
        channel = conn.channel()

        self.producer = Producer(channel, exchange, routing_key=self.kwargs["X7_RK"])

    def send(self, msg):
        self.producer.publish(msg, serializer="json", compression="zlib")

    def close(self):
        pass
示例#3
0
文件: amqp.py 项目: bernytt/alerta-ui
class FanoutPublisher(PluginBase):

    def __init__(self):

        if app.debug:
            setup_logging(loglevel='DEBUG', loggers=[''])

        self.connection = BrokerConnection(app.config['AMQP_URL'])
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s', app.config['AMQP_URL'], e)
            raise RuntimeError

        self.channel = self.connection.channel()
        self.exchange_name = app.config['AMQP_TOPIC']

        self.exchange = Exchange(name=self.exchange_name, type='fanout', channel=self.channel)
        self.producer = Producer(exchange=self.exchange, channel=self.channel)

        LOG.info('Configured fanout publisher on topic "%s"', app.config['AMQP_TOPIC'])

    def pre_receive(self, alert):

        return alert

    def post_receive(self, alert):

        LOG.info('Sending message %s to AMQP topic "%s"', alert.get_id(), app.config['AMQP_TOPIC'])
        LOG.debug('Message: %s', alert.get_body())

        self.producer.publish(alert.get_body(), declare=[self.exchange], retry=True)
示例#4
0
def insert_push_notification(request):
    if request.method != "POST":
        return HttpResponse(status=405)

    try:
        content = json.loads(request.body)

        if 'user_id' in content and 'message' in content:
            # get a connection to RabbitMQ broker, create a channel and create a producer for pushing the message to the appropriate CAMI event exchange
            with Connection(settings.BROKER_URL) as conn:
                channel = conn.channel()

                inserter = Producer(
                    exchange=settings.PUSH_NOTIFICATIONS_EXCHANGE,
                    channel=channel,
                    routing_key="push_notification"
                )
                inserter.publish(request.body)

                logger.debug("[insertion] New push notification was enqueued: %s", str(content))

                return HttpResponse(status=201)
    except Exception as e:
        logger.debug("[insertion] ERROR! Exception caught in insert_push_notification method: %s", e.message)

    return HttpResponse(status=400)
示例#5
0
    def publish(self, message):
        """Publishes a pulse message to the proper exchange."""

        if not self.exchange:
            raise InvalidExchange(self.exchange)

        if not message:
            raise MalformedMessage(message)

        message._prepare()

        if not self.connection:
            self.connect()

        producer = Producer(channel=self.connection,
                            exchange=Exchange(self.exchange, type='topic'),
                            routing_key=message.routing_key)

        # The message is actually a simple envelope format with a payload and
        # some metadata.
        final_data = {}
        final_data['payload'] = message.data
        final_data['_meta'] = message.metadata.copy()
        final_data['_meta'].update({
            'exchange': self.exchange,
            'routing_key': message.routing_key,
            'serializer': self.config.serializer,
            'sent': time_to_string(datetime.now(timezone(self.config.broker_timezone)))
        })

        producer.publish(final_data, serializer=self.config.serializer)
示例#6
0
def insert_measurement(request):
    if request.method != "POST":
        return HttpResponse(status=405)

    try:
        content = json.loads(request.body)
        if 'measurement_type' in content:
            # get a connection to RabbitMQ broker, create a channel and create a
            # producer for pushing the message to the measurements exchange
            with Connection(settings.BROKER_URL) as conn:
                channel = conn.channel()

                inserter = Producer(
                    exchange=settings.MEASUREMENTS_EXCHANGE,
                    channel=channel,
                    routing_key="measurement." + content['measurement_type']
                )
                inserter.publish(request.body)

                logger.debug("[insertion] New measurement was enqueued: %s", str(content))

                return HttpResponse(status=201)
    except Exception as e:
        logger.debug("[insertion] ERROR! Exception caught in insert_measurement method: %s", e.message)

    return HttpResponse(status=400)
    def test_search(self):
        self.harvest_path = "/sfm-data/collection_set/test_collection/test_1"
        harvest_msg = {
            "id": "test:1",
            "type": "twitter_search",
            "path": self.harvest_path,
            "seeds": [
                {
                    "id": "seed_id3",
                    "token": "gwu"
                }
            ],
            "credentials": {
                "consumer_key": tests.TWITTER_CONSUMER_KEY,
                "consumer_secret": tests.TWITTER_CONSUMER_SECRET,
                "access_token": tests.TWITTER_ACCESS_TOKEN,
                "access_token_secret": tests.TWITTER_ACCESS_TOKEN_SECRET
            },
            "collection_set": {
                "id": "test_collection_set"
            },
            "collection": {
                "id": "test_collection"
            },
            "options": {
                "web_resources": True,
                "media": True,
                "tweets": True
            }
        }
        with self._create_connection() as connection:
            bound_exchange = self.exchange(connection)
            producer = Producer(connection, exchange=bound_exchange)
            producer.publish(harvest_msg, routing_key="harvest.start.twitter.twitter_search")

            status_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:1", status_msg["id"])
            # Running
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            # Another running message
            status_msg = self._wait_for_message(self.result_queue, connection)
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            # Now wait for result message.
            result_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:1", result_msg["id"])
            # Success
            self.assertEqual(STATUS_SUCCESS, result_msg["status"])
            # Some tweets
            self.assertTrue(result_msg["stats"][date.today().isoformat()]["tweets"])

            # Web harvest message.
            web_harvest_msg = self._wait_for_message(self.web_harvest_queue, connection)
            self.assertTrue(len(web_harvest_msg["seeds"]))

            # Warc created message.
            self.assertTrue(self._wait_for_message(self.warc_created_queue, connection))
示例#8
0
def send_message(a, b):
    with Connection(broker_url) as conn:
        channel = conn.channel()
        exchange = Exchange(custom_exchange, type='direct')
        producer = Producer(channel, exchange=exchange, serializer='json')
        producer.maybe_declare(exchange)
        producer.publish(routing_key=custom_rk, body={'a': a, 'b': b})
示例#9
0
 def test_publish_retry_calls_ensure(self):
     p = Producer(Mock())
     p._connection = Mock()
     p._connection.declared_entities = set()
     ensure = p.connection.ensure = Mock()
     p.publish('foo', exchange='foo', retry=True)
     ensure.assert_called()
示例#10
0
文件: emit.py 项目: pcreech/pulp
def send(document, routing_key=None):
    """
    Attempt to send a message to the AMQP broker.

    If we cannot obtain a new connection then the message will be dropped. Note
    that we do not block when waiting for a connection.

    :param document: the taskstatus Document we want to send
    :type  document: mongoengine.Document
    :param routing_key: The routing key for the message
    :type  routing_key: str
    """

    # if the user has not enabled notifications, just bail
    event_notifications_enabled = config.getboolean("messaging", "event_notifications_enabled")
    if not event_notifications_enabled:
        return

    try:
        payload = document.to_json()
    except TypeError:
        _logger.warn("unable to convert document to JSON; event message not sent")
        return

    broker_url = config.get("messaging", "event_notification_url")

    notification_topic = Exchange(name=DEFAULT_EXCHANGE_NAME, type="topic")

    with Connection(broker_url) as connection:
        producer = Producer(connection)
        producer.maybe_declare(notification_topic)
        producer.publish(payload, exchange=notification_topic, routing_key=routing_key)
示例#11
0
    def send(self, topic, message):
        """Publishes a pulse message to the proper exchange."""

        if not message:
            Log.error("Expecting a message")

        message._prepare()

        if not self.connection:
            self.connect()

        producer = Producer(
            channel=self.connection,
            exchange=Exchange(self.settings.exchange, type='topic'),
            routing_key=topic
        )

        # The message is actually a simple envelope format with a payload and
        # some metadata.
        final_data = Dict(
            payload=message.data,
            _meta=set_default({
                'exchange': self.settings.exchange,
                'routing_key': message.routing_key,
                'serializer': self.settings.serializer,
                'sent': time_to_string(datetime.datetime.now(timezone(self.settings.broker_timezone))),
                'count': self.count
            }, message.metadata)
        )

        producer.publish(jsons.scrub(final_data), serializer=self.settings.serializer)
        self.count += 1
示例#12
0
 def test_helper_sync_send_msg(self, exchange, ex_rk, send_rk, payload):
     ex = Exchange(exchange, 'topic')
     queue = Queue(exchange=ex, routing_key=ex_rk + '.*', exclusive=True, channel=self.__connection)
     queue.declare()
     prod = Producer(self.__connection, exchange=ex, routing_key=send_rk)
     prod.publish(payload)
     return queue
    def test_filter(self):
        harvest_msg = {
            "id": "test:2",
            "type": "twitter_filter",
            "seeds": [
                {
                    "token": "obama"
                }
            ],
            "credentials": {
                "consumer_key": tests.TWITTER_CONSUMER_KEY,
                "consumer_secret": tests.TWITTER_CONSUMER_SECRET,
                "access_token": tests.TWITTER_ACCESS_TOKEN,
                "access_token_secret": tests.TWITTER_ACCESS_TOKEN_SECRET
            },
            "collection": {
                "id": "test_collection",
                "path": self.collection_path

            }
        }
        with self._create_connection() as connection:
            bound_exchange = self.exchange(connection)
            producer = Producer(connection, exchange=bound_exchange)
            producer.publish(harvest_msg, routing_key="harvest.start.twitter.twitter_filter")

            # Wait 30 seconds
            time.sleep(30)

            # Send stop message
            harvest_stop_msg = {
                "id": "test:2",
            }
            producer.publish(harvest_stop_msg, routing_key="harvest.stop.twitter.twitter_filter")

            # Now wait for result message.
            counter = 0
            message_obj = None
            bound_result_queue = self.result_queue(connection)
            while counter < 180 and not message_obj:
                time.sleep(.5)
                message_obj = bound_result_queue.get(no_ack=True)
                counter += 1
            self.assertIsNotNone(message_obj, "Timed out waiting for result at {}.".format(datetime.now()))
            result_msg = message_obj.payload
            # Matching ids
            self.assertEqual("test:2", result_msg["id"])
            # Success
            self.assertEqual("completed success", result_msg["status"])
            # Some tweets
            self.assertTrue(result_msg["summary"]["tweet"])

            # Web harvest message.
            bound_web_harvest_queue = self.web_harvest_queue(connection)
            message_obj = bound_web_harvest_queue.get(no_ack=True)
            self.assertIsNotNone(message_obj, "No web harvest message.")
            web_harvest_msg = message_obj.payload
            # Some seeds
            self.assertTrue(len(web_harvest_msg["seeds"]))
示例#14
0
def main():
    filename = "meta"
    fptr = open(filename, "r")
    amqpurl = fptr.readline().strip()
    exchange_name = fptr.readline().strip()

    exchange = Exchange(exchange_name, type="direct")
    D_queue = Queue(exchange_name, exchange, routing_key=exchange_name, auto_delete=False, exclusive=False)


    connection = BrokerConnection(amqpurl)
    print amqpurl
    channel = connection.channel()

    queue = D_queue(channel)
    queue.declare()
    producer = Producer(channel, exchange, routing_key=exchange_name)

    message_count = int(sys.argv[1])
    imgsize = int(sys.argv[2])
    name = sys.argv[3]

    s3url = ""
    if 'S3_URL' in os.environ:
        s3url = os.environ['S3_URL']
    s3id = os.environ['EC2_ACCESS_KEY']
    s3pw = os.environ['EC2_SECRET_KEY']

    n = datetime.now()
    print "XXX starting %s" % (str(n))

    msg_list = []
    dashi_name = str(uuid.uuid4()).split('-')[0]
    for i in range(0, message_count):
        msg = {'program': 'python node2.py %d %d %d' % (i, message_count, imgsize),
                'rank': i,
                's3url': s3url,
                's3id': s3id,
                's3pw': s3pw,
                'testname': name,
                'dashiname': dashi_name}
        msg_list.append(msg)
    random.shuffle(msg_list)

    print "Add the messages to the queue..."
    for msg in msg_list:
        print "%s %d of %d" % (msg['testname'], msg['rank'], message_count)
        sys.stdout.flush()
        producer.publish(msg,
                     exchange=exchange,
                     routing_key=exchange_name,
                     serializer="json")

    dashi = get_dashi_connection(amqpurl, dashi_name)
    p_con = get_phantom_con(s3id, s3pw)
    wait_till_done(dashi, message_count, p_con, name)

    n = datetime.now()
    print "XXX done %s" % (str(n))
示例#15
0
    def send_msg(self, msg, exch, routing_key):
        if not self.connection:
            raise MissingConfiguration("Missing connection!")

        from kombu import Exchange, Producer
        exch = Exchange(exch, type='topic')
        prod = Producer(self.connection, exchange=exch)
        prod.publish(msg, routing_key=routing_key)
示例#16
0
文件: messaging.py 项目: iiilx/bjj
def send_increment_upvotes(for_post_id):
    """Send a message for incrementing the click count for an URL."""
    exchange = Exchange("test", type="direct")
    queue = Queue("test", exchange, routing_key="test")
    connection = establish_connection()
    channel = connection.channel()
    producer = Producer(channel, exchange, routing_key="test")
    producer.publish(str(for_post_id))
    connection.close()
示例#17
0
    def test_produce_consume(self):
        channel = self.c.channel()
        producer = Producer(channel, self.e)
        consumer1 = Consumer(channel, self.q)
        consumer2 = Consumer(channel, self.q2)
        self.q2(channel).declare()

        for i in range(10):
            producer.publish({'foo': i}, routing_key='test_transport_memory')
        for i in range(10):
            producer.publish({'foo': i}, routing_key='test_transport_memory2')

        _received1 = []
        _received2 = []

        def callback1(message_data, message):
            _received1.append(message)
            message.ack()

        def callback2(message_data, message):
            _received2.append(message)
            message.ack()

        consumer1.register_callback(callback1)
        consumer2.register_callback(callback2)

        consumer1.consume()
        consumer2.consume()

        while 1:
            if len(_received1) + len(_received2) == 20:
                break
            self.c.drain_events()

        self.assertEqual(len(_received1) + len(_received2), 20)

        # compression
        producer.publish({'compressed': True},
                         routing_key='test_transport_memory',
                         compression='zlib')
        m = self.q(channel).get()
        self.assertDictEqual(m.payload, {'compressed': True})

        # queue.delete
        for i in range(10):
            producer.publish({'foo': i}, routing_key='test_transport_memory')
        self.assertTrue(self.q(channel).get())
        self.q(channel).delete()
        self.q(channel).declare()
        self.assertIsNone(self.q(channel).get())

        # queue.purge
        for i in range(10):
            producer.publish({'foo': i}, routing_key='test_transport_memory2')
        self.assertTrue(self.q2(channel).get())
        self.q2(channel).purge()
        self.assertIsNone(self.q2(channel).get())
示例#18
0
 def _publish(sender: Producer) -> None:
     logger.debug("Send message {body} to broker {amqpuri} with routing key {routing_key}"
                  .format(body=message, amqpuri=self._amqp_uri, routing_key=message.header.topic))
     sender.publish(message.body.value,
                    headers=_build_message_header(message),
                    exchange=self._exchange,
                    serializer='json',   # todo: fix this for the mime type of the message
                    routing_key=message.header.topic,
                    declare=[self._exchange])
    def test_search(self):
        harvest_msg = {
            "id": "test:1",
            "type": "twitter_search",
            "seeds": [
                {
                    "token": "gwu"
                }
            ],
            "credentials": {
                "consumer_key": tests.TWITTER_CONSUMER_KEY,
                "consumer_secret": tests.TWITTER_CONSUMER_SECRET,
                "access_token": tests.TWITTER_ACCESS_TOKEN,
                "access_token_secret": tests.TWITTER_ACCESS_TOKEN_SECRET
            },
            "collection": {
                "id": "test_collection",
                "path": self.collection_path

            }
        }
        with self._create_connection() as connection:
            bound_exchange = self.exchange(connection)
            producer = Producer(connection, exchange=bound_exchange)
            producer.publish(harvest_msg, routing_key="harvest.start.twitter.twitter_search")

            # Now wait for result message.
            counter = 0
            bound_result_queue = self.result_queue(connection)
            message_obj = None
            while counter < 240 and not message_obj:
                time.sleep(.5)
                message_obj = bound_result_queue.get(no_ack=True)
                counter += 1
            self.assertTrue(message_obj, "Timed out waiting for result at {}.".format(datetime.now()))
            result_msg = message_obj.payload
            # Matching ids
            self.assertEqual("test:1", result_msg["id"])
            # Success
            self.assertEqual("completed success", result_msg["status"])
            # Some tweets
            self.assertTrue(result_msg["summary"]["tweet"])

            # Web harvest message.
            bound_web_harvest_queue = self.web_harvest_queue(connection)
            message_obj = bound_web_harvest_queue.get(no_ack=True)
            # method_frame, header_frame, web_harvest_body = self.channel.basic_get(self.web_harvest_queue)
            self.assertIsNotNone(message_obj, "No web harvest message.")
            web_harvest_msg = message_obj.payload
            # Some seeds
            self.assertTrue(len(web_harvest_msg["seeds"]))

            # Warc created message.
            # method_frame, header_frame, warc_created_body = self.channel.basic_get(self.warc_created_queue)
            bound_warc_created_queue = self.warc_created_queue(connection)
            message_obj = bound_warc_created_queue.get(no_ack=True)
            self.assertIsNotNone(message_obj, "No warc created message.")
示例#20
0
class KombuLogger(object):
    def __init__(self, host="localhost", user="******", password="******", vhost="/", exchange="analytics"):
        self.connection = BrokerConnection(host, user, password, vhost)
        self.channel = self.connection.channel()
        self.exchange = Exchange(exchange, "topic", durable=True, auto_delete=False)
        self.producer = Producer(self.channel, exchange=self.exchange, serializer="json")
    
    def write(self, event, timestamp, attributes):
        self.producer.publish({"event": event, "ts": timestamp, "attr": attributes}, routing_key=event)
示例#21
0
def kpublish(url, queue_name, refdes, particles):
    from kombu import Connection, Exchange, Producer, Queue
    headers = {'sensor': refdes, 'deliveryType': 'streamed'}
    with Connection(url) as conn:
        exchange = Exchange('amq.direct', type='direct')
        queue = Queue(name=queue_name, exchange=exchange, routing_key=queue_name)
        producer = Producer(conn, exchange=exchange, routing_key=queue_name)
        producer.publish(json.dumps(particles), content_encoding='ascii', content_type='text/plain',
                         headers=headers, declare=[queue], user_id='guest')
示例#22
0
文件: views.py 项目: sp00/kral
def exchange_send(data,exchange):
    try:
        connection = BrokerConnection()
        channel = connection.channel()
        producer = Producer(channel, Exchange(exchange, type="fanout"))
        producer.publish(data)
        channel.close()
        connection.close()
    except Exception, error:
        print(error)
示例#23
0
 def is_up(cls):
     try:
         bus_exchange = Exchange(BUS_EXCHANGE_NAME, type=BUS_EXCHANGE_TYPE)
         with Connection(BUS_URL) as connection:
             producer = Producer(connection, exchange=bus_exchange, auto_declare=True)
             producer.publish('', routing_key='test')
     except IOError:
         return False
     else:
         return True
示例#24
0
    def test_purge(self):
        channel = self.connection.channel()
        producer = Producer(channel, self.exchange, routing_key='test_Redis')
        self.queue(channel).declare()

        for i in range(10):
            producer.publish({'hello': 'world-%s' % (i, )})

        assert channel._size('test_Redis') == 10
        assert self.queue(channel).purge() == 10
        channel.close()
示例#25
0
    def test_publish__get_redispyv3(self):
        channel = self.connection.channel()
        producer = Producer(channel, self.exchange, routing_key='test_Redis')
        self.queue(channel).declare()

        producer.publish({'hello': 'world'})

        assert self.queue(channel).get().payload == {'hello': 'world'}
        assert self.queue(channel).get() is None
        assert self.queue(channel).get() is None
        assert self.queue(channel).get() is None
示例#26
0
    def __send_push_notification(self, user_id, message):
        payload = {"user_id": user_id, "message": message}

        with Connection(settings.BROKER_URL) as conn:
            channel = conn.channel()

            inserter = Producer(exchange=Exchange('push_notifications',
                                                  type='topic'),
                                channel=channel,
                                routing_key="push_notification")
            inserter.publish(json.dumps(payload))
示例#27
0
文件: test_redis.py 项目: Scalr/kombu
    def test_publish__get(self):
        channel = self.connection.channel()
        producer = Producer(channel, self.exchange, routing_key='test_Redis')
        self.queue(channel).declare()

        producer.publish({'hello': 'world'})

        assert self.queue(channel).get().payload == {'hello': 'world'}
        assert self.queue(channel).get() is None
        assert self.queue(channel).get() is None
        assert self.queue(channel).get() is None
示例#28
0
 def send(self, message):
     print("Send called", message)
     with self._create_connection() as connection:
         producer = Producer(connection,
                             exchange=self._exchange,
                             routing_key=Config.rabbit_mq_routing_key)
         payload = message.payload()
         producer.declare()
         producer.publish(payload,
                          exchange=self._exchange,
                          routing_key=Config.rabbit_mq_routing_key)
示例#29
0
    def test_auto_declare(self):
        channel = self.connection.channel()
        p = Producer(channel, self.exchange, auto_declare=True)
        # creates Exchange clone at bind
        assert p.exchange is not self.exchange
        assert p.exchange.is_bound
        # auto_declare declares exchange'
        assert 'exchange_declare' not in channel

        p.publish('foo')
        assert 'exchange_declare' in channel
示例#30
0
    def test_purge(self):
        channel = self.connection.channel()
        producer = Producer(channel, self.exchange, routing_key='test_Redis')
        self.queue(channel).declare()

        for i in range(10):
            producer.publish({'hello': 'world-%s' % (i, )})

        self.assertEqual(channel._size('test_Redis'), 10)
        self.assertEqual(self.queue(channel).purge(), 10)
        channel.close()
    def send_message(self, message, topic, routing_key=None):
        with self.connection as _conn:
            _conn.connect()
            channel = _conn.channel()
            producer = Producer(channel)

            logger.info(f"Insert data on TOPIC: {topic}")

            producer.publish(body=message, exchange=topic, routing_key=routing_key)

            logger.info(f"Message {message} sent to topic {topic}!")
示例#32
0
 def publish(self):
     cn = connection.Connection(host='localhost:5672',
                                userid='test',
                                password='******',
                                confirm_publish=True)
     cn.connect()
     channel = cn.channel()
     producer = Producer(channel)
     for i in range(1, 300):
         producer.publish("hello", "aa_1")
         time.sleep(2)
示例#33
0
def main():
    """Main program function"""
    with Connection(rabbitmq_url) as conn:
        producer = Producer(conn)
        exchange = Exchange(name='producer_consumer_exchange', type='direct')
        while True:
            logger.info('Sent a message: {}'.format('hello world'))
            producer.publish(body={'hello': 'world'},
                             routing_key='producer_key',
                             exchange=exchange)
            time.sleep(sending_period)
示例#34
0
    def test_auto_declare(self):
        channel = self.connection.channel()
        p = Producer(channel, self.exchange, auto_declare=True)
        # creates Exchange clone at bind
        assert p.exchange is not self.exchange
        assert p.exchange.is_bound
        # auto_declare declares exchange'
        assert 'exchange_declare' not in channel

        p.publish('foo')
        assert 'exchange_declare' in channel
示例#35
0
    def test_publish__get(self):
        channel = self.connection.channel()
        producer = Producer(channel, self.exchange, routing_key="test_Redis")
        self.queue(channel).declare()

        producer.publish({"hello": "world"})

        self.assertDictEqual(self.queue(channel).get().payload, {"hello": "world"})
        self.assertIsNone(self.queue(channel).get())
        self.assertIsNone(self.queue(channel).get())
        self.assertIsNone(self.queue(channel).get())
示例#36
0
 def is_up(self):
     try:
         with Connection(self._url) as connection:
             producer = Producer(connection,
                                 exchange=self._default_exchange,
                                 auto_declare=True)
             producer.publish('', routing_key='test')
     except IOError:
         return False
     else:
         return True
示例#37
0
 def test_produce__consume(self):
     if not self.verify_alive():
         return
     chan1 = self.connection.channel()
     consumer = Consumer(chan1, self.queue)
     consumer.queues[0].purge()
     producer = Producer(chan1, self.exchange)
     producer.publish({"foo": "bar"}, routing_key=self.prefix)
     message = consumeN(self.connection, consumer)
     self.assertDictEqual(message[0], {"foo": "bar"})
     chan1.close()
     self.purge([self.queue.name])
示例#38
0
 def test_produce__consume(self):
     if not self.verify_alive():
         return
     chan1 = self.connection.channel()
     consumer = Consumer(chan1, self.queue)
     consumer.queues[0].purge()
     producer = Producer(chan1, self.exchange)
     producer.publish({"foo": "bar"}, routing_key=self.prefix)
     message = consumeN(self.connection, consumer)
     self.assertDictEqual(message[0], {"foo": "bar"})
     chan1.close()
     self.purge([self.queue.name])
示例#39
0
 def _publish(sender: Producer) -> None:
     logger.debug(
         "Send message {body} to broker {amqpuri} with routing key {routing_key}"
         .format(body=message,
                 amqpuri=self._amqp_uri,
                 routing_key=message.header.topic))
     sender.publish(message.body.bytes,
                    headers=_build_message_header(message),
                    exchange=self._exchange,
                    content_type="text/plain",
                    routing_key=message.header.topic,
                    declare=[self._exchange])
    def test_blog_posts(self):
        self.harvest_path = "/sfm-data/collection_set/test_collection/test_2"
        harvest_msg = {
            "id": "test:2",
            "type": "tumblr_blog_posts",
            "path": self.harvest_path,
            "seeds": [
                {
                    "uid": "gwuscrc",
                    "id": "seed1"
                }
            ],
            "credentials": {
                "api_key": tests.TUMBLR_API_KEY
            },
            "collection_set": {
                "id": "test_collection_set"
            },
            "collection": {
                "id": "test_collection"
            },
            "options": {
            }
        }
        with self._create_connection() as connection:
            bound_exchange = self.exchange(connection)
            producer = Producer(connection, exchange=bound_exchange)
            producer.publish(harvest_msg, routing_key="harvest.start.tumblr.tumblr_blog_posts")

            # Now wait for status message.
            status_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:2", status_msg["id"])
            # Running
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            # Another running message
            status_msg = self._wait_for_message(self.result_queue, connection)
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            # Now wait for result message.
            result_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:2", result_msg["id"])
            # Success
            self.assertEqual(STATUS_SUCCESS, result_msg["status"])
            # Some posts
            self.assertTrue(result_msg["stats"][date.today().isoformat()]["tumblr posts"])

            # Warc created message.
            # check path exist
            warc_msg = self._wait_for_message(self.warc_created_queue, connection)
            self.assertTrue(os.path.isfile(warc_msg["warc"]["path"]))
示例#41
0
    def test_publish__get(self):
        channel = self.connection.channel()
        producer = Producer(channel, self.exchange, routing_key='test_Redis')
        self.queue(channel).declare()

        producer.publish({'hello': 'world'})

        self.assertDictEqual(
            self.queue(channel).get().payload, {'hello': 'world'})
        self.assertIsNone(self.queue(channel).get())
        self.assertIsNone(self.queue(channel).get())
        self.assertIsNone(self.queue(channel).get())
示例#42
0
    def test_harvest(self):
        self.harvest_path = "/sfm-data/collection_set/test_collection/test_1"
        harvest_msg = {
            "id": "test:1",
            "parent_id": "sfmui:45",
            "type": "web",
            "seeds": [
                {
                    "token": "http://gwu-libraries.github.io/sfm-ui/"
                },
            ],
            "path": self.harvest_path,
            "collection_set": {
                "id": "test_collection_set"
            },
            "collection": {
                "id": "test_collection"
            }
        }

        with self._create_connection() as connection:
            bound_exchange = self.exchange(connection)
            producer = Producer(connection, exchange=bound_exchange)
            producer.publish(harvest_msg, routing_key="harvest.start.web")

            # Now wait for status message.
            status_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:1", status_msg["id"])
            # Running
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            time.sleep(30)

            # Another running message
            status_msg = self._wait_for_message(self.result_queue, connection)
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            # Now wait for result message.
            result_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:1", result_msg["id"])
            # Success
            self.assertEqual(STATUS_SUCCESS, result_msg["status"])
            # Some web resources
            self.assertTrue(
                result_msg["stats"][date.today().isoformat()]["web resources"])

            # Warc created message.
            bound_warc_created_queue = self.warc_created_queue(connection)
            message_obj = bound_warc_created_queue.get(no_ack=True)
            self.assertIsNotNone(message_obj, "No warc created message.")
    def test_harvest(self):
        self.harvest_path = "/sfm-data/collection_set/test_collection/test_1"
        harvest_msg = {
            "id": "test:1",
            "parent_id": "sfmui:45",
            "type": "web",
            "seeds": [
                {
                    "token": "http://gwu-libraries.github.io/sfm-ui/"
                },
            ],
            "path": self.harvest_path,
            "collection_set": {
                "id": "test_collection_set"

            },
            "collection": {
                "id": "test_collection"
            }
        }

        with self._create_connection() as connection:
            bound_exchange = self.exchange(connection)
            producer = Producer(connection, exchange=bound_exchange)
            producer.publish(harvest_msg, routing_key="harvest.start.web")

            # Now wait for status message.
            status_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:1", status_msg["id"])
            # Running
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            time.sleep(30)

            # Another running message
            status_msg = self._wait_for_message(self.result_queue, connection)
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            # Now wait for result message.
            result_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:1", result_msg["id"])
            # Success
            self.assertEqual(STATUS_SUCCESS, result_msg["status"])
            # Some web resources
            self.assertTrue(result_msg["stats"][date.today().isoformat()]["web resources"])

            # Warc created message.
            bound_warc_created_queue = self.warc_created_queue(connection)
            message_obj = bound_warc_created_queue.get(no_ack=True)
            self.assertIsNotNone(message_obj, "No warc created message.")
示例#44
0
 def produce(self, message, exchange, routing_key):
     """
     对于生产者而言,需要知道三个信息:
         1. 发送的exchange
         2. 分发的routing_key
         3. 发送的消息
     """
     producer = Producer(self._conn)
     producer.publish(
         body=message,
         exchange=exchange,
         routing_key=routing_key,
     )
示例#45
0
def main(out):

    while True:
        for task in TASKS:
            if task['last'] + task['timedelta'] < datetime.datetime.now():
                with Connection() as connection:
                    with connection.channel() as channel:
                        producer = Producer(channel)
                        producer.publish(task['message'], exchange=out)

                task['last'] = datetime.datetime.now()
            continue
        sleep(10)
示例#46
0
 def enqueue(self, message: dict, routing_key: str) -> None:
     """Enqueue message"""
     producer = Producer(exchange=self.exchange, channel=self.channel)
     correlation_id = uuid()
     producer.publish(
         body=message,
         serializer="json",
         routing_key=routing_key,
         correlation_id=correlation_id,
         retry=self.retry,
         retry_policy=self.retry_policy,
     )
     self.logger.debug("Published %s: %s", routing_key, message)
示例#47
0
 def _produce_message(cls, channel, exchange, payload):
     routing_key = cls._get_produced_message_routing_key(payload)
     producer = Producer(
         channel,
         exchange=exchange,
         auto_declare=True,
     )
     producer.publish(
         ujson.dumps(payload.to_dict()),
         routing_key=routing_key,
         mandatory=True,
         content_type='text/plain',
         delivery_mode=2,
     )
def producer(msg=None):
    print("------- in producer")
    rabbit_url = "amqp://localhost:5672/"
    conn = Connection(rabbit_url)
    channel = conn.channel()
    exchange = Exchange("scrapy", type="direct")
    producer = Producer(exchange=exchange,
                        channel=channel,
                        routing_key="quotes")
    queue = Queue(name="quotation", exchange=exchange, routing_key="quotes")
    queue.maybe_bind(conn)
    queue.declare()
    producer.publish(msg)
    print("published ->")
    def test_search_timeline(self):
        self.path = "/sfm-collection-set-data/collection_set/test_collection/test_3"
        harvest_msg = {
            "id": "test:3",
            "type": "weibo_timeline",
            "path": self.path,
            "credentials": {
                "access_token": tests.WEIBO_ACCESS_TOKEN
            },
            "collection_set": {
                "id": "test_collection_set"
            },
            "collection": {
                "id": "test_collection"
            },
            "options": {}
        }
        with self._create_connection() as connection:
            bound_exchange = self.exchange(connection)
            producer = Producer(connection, exchange=bound_exchange)
            producer.publish(harvest_msg,
                             routing_key="harvest.start.weibo.weibo_timeline")

            # Now wait for status message.
            status_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:3", status_msg["id"])
            # Running
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            # Another running message
            status_msg = self._wait_for_message(self.result_queue, connection)
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            # Now wait for result message.
            result_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:3", result_msg["id"])
            # Success
            self.assertEqual(STATUS_SUCCESS, result_msg["status"])

            # Some weibo posts
            self.assertTrue(
                result_msg["stats"][date.today().isoformat()]["weibos"])

            # Warc created message.
            warc_msg = self._wait_for_message(self.warc_created_queue,
                                              connection)
            # check path exist
            self.assertTrue(os.path.isfile(warc_msg["warc"]["path"]))
示例#50
0
    def test_basic_get__accept_allowed(self):
        conn = Connection('memory://')
        q = Queue('foo', exchange=self.exchange)
        p = Producer(conn)
        p.publish(
            {'complex': object()},
            declare=[q], exchange=self.exchange, serializer='pickle',
        )

        message = q(conn).get(accept=['pickle'], no_ack=True)
        self.assertIsNotNone(message)

        payload = message.decode()
        self.assertTrue(payload['complex'])
示例#51
0
    def test_basic_get__accept_disallowed(self):
        conn = Connection('memory://')
        q = Queue('foo', exchange=self.exchange)
        p = Producer(conn)
        p.publish(
            {'complex': object()},
            declare=[q], exchange=self.exchange, serializer='pickle',
        )

        message = q(conn).get(no_ack=True)
        self.assertIsNotNone(message)

        with self.assertRaises(q.ContentDisallowed):
            message.decode()
示例#52
0
 def _publish(sender: Producer) -> None:
     logger.debug(
         "Send message {body} to broker {amqpuri} with routing key {routing_key}"
         .format(body=message,
                 amqpuri=self._amqp_uri,
                 routing_key=message.header.topic))
     sender.publish(
         message.body.value,
         headers=_build_message_header(message),
         exchange=self._exchange,
         serializer=
         'json',  # todo: fix this for the mime type of the message
         routing_key=message.header.topic,
         declare=[self._exchange])
示例#53
0
class BaseRPC:
    def __init__(self,
                 connection: Connection,
                 consumer_connection: Optional[Connection] = None):
        self.connection = connection
        self.consumer_connection = consumer_connection or connection
        self.producer = Producer(self.connection)
        self.consumer = Consumer(
            self.consumer_connection,
            queues=[Queue(exclusive=True, auto_delete=True)],
            on_message=self._on_message)
        self.consumer.consume()
        self.queue: Queue = self.consumer.queues[0]
        self.callbacks = list()

        self._awaiting_results: Set[CorrelationID] = set()

    def call(self,
             message: str,
             routing_key: str,
             properties: MutableMapping[str, str] = None) -> CorrelationID:
        properties = properties or {}
        correlation_id = str(uuid.uuid4())
        properties['correlation_id'] = correlation_id
        properties['reply_to'] = self.queue.name

        self._awaiting_results.add(correlation_id)
        self.producer.publish(message, routing_key, **properties)

        return correlation_id

    def add_callback(self, cb: Callable[[CorrelationID, str], None]):
        self.callbacks.append(cb)

    def _on_message(self, message: Message):
        body = message.body
        correlation_id = message.properties['correlation_id']

        try:
            self._awaiting_results.remove(correlation_id)
        except KeyError:
            raise Exception("UNEXPECTED MESSAGE")

        for callback in self.callbacks:
            callback(correlation_id, body)

    def drain_message(self, timeout=None):
        """ Drain a single event from the connection. """
        self.connection.drain_events(timeout=timeout)
示例#54
0
    def test_producer_on_return(self):
        def on_return(_exception, _exchange, _routing_key, _message):
            pass
        channel = self.c.channel()
        producer = Producer(channel, on_return=on_return)
        consumer = self.c.Consumer([self.q3])

        producer.publish(
            {'hello': 'on return'},
            declare=consumer.queues,
            exchange=self.fanout,
        )

        assert self.q3(self.c).get().payload == {'hello': 'on return'}
        assert self.q3(self.c).get() is None
示例#55
0
def publish_message(body, routing_key):
    try:
        with Connection(os.environ['MESSAGE_BROKER_URL']) as conn:
            with conn.channel() as channel:
                exchange = Exchange(os.environ['MESSAGE_BROKER_EXCHANGE'],
                                    type='topic', channel=channel, durable=True)
                exchange.declare()
                producer = Producer(channel=channel,
                                    exchange=exchange,
                                    routing_key=routing_key,
                                    serializer='json')
                producer.publish(body=body, retry=True)
    except Exception as ex:
        # TODO: Handle this better
        logging.error(f"Something went wrong in publish_message - {ex.__class__} - {ex}")
示例#56
0
class FanoutPublisher(PluginBase):
    def __init__(self, name=None):
        if app.config['DEBUG']:
            setup_logging(loglevel='DEBUG', loggers=[''])

        self.connection = BrokerConnection(AMQP_URL)
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s', AMQP_URL,
                      e)
            raise RuntimeError

        self.channel = self.connection.channel()
        self.exchange_name = AMQP_TOPIC

        self.exchange = Exchange(name=self.exchange_name,
                                 type='fanout',
                                 channel=self.channel)
        self.producer = Producer(exchange=self.exchange, channel=self.channel)

        super(FanoutPublisher, self).__init__(name)

        LOG.info('Configured fanout publisher on topic "%s"', AMQP_TOPIC)

    def pre_receive(self, alert):
        return alert

    def post_receive(self, alert):
        LOG.info('Sending message %s to AMQP topic "%s"', alert.get_id(),
                 AMQP_TOPIC)

        try:
            body = alert.serialize  # alerta >= 5.0

            # update body's datetime-related fields  with utc-aware values
            body.update({
                key: body[key].replace(tzinfo=pytz.utc)
                for key in ['createTime', 'lastReceiveTime', 'receiveTime']
            })
        except Exception:
            body = alert.get_body()  # alerta < 5.0

        LOG.debug('Message: %s', body)
        self.producer.publish(body, declare=[self.exchange], retry=True)

    def status_change(self, alert, status, text):
        return
示例#57
0
    def send_message(self, message, topic):
        # with self.send_connection as _conn:
        _conn = self.send_connection
        _conn.connect()
        # channel = _conn.channel()
        with _conn.channel() as channel:
            producer = Producer(channel)

            logger.debug(f"Insert data on TOPIC: {topic}")

            if not topic.startswith(self.config.get_event_name_prefix()):
                topic = f"{self.config.get_event_name_prefix()}{topic}"

            producer.publish(body=message, exchange=topic, routing_key=None)

            logger.debug(f"Message {message} sent to topic {topic}!")
示例#58
0
def publish_msg_and_get_from_other_queue(amqp_connection,
                                         proto_obj,
                                         src_queue,
                                         dst_queue,
                                         headers,
                                         expected_number_of_output_msgs=1):
    received_msgs = []

    def on_message(self, message):
        received_msgs.append(message)

    worker = Worker(amqp_connection)
    with amqp_connection.channel() as channel:
        producer = Producer(channel)
        result = producer.publish(
            proto_obj.SerializeToString(),
            exchange=queue_configs.EXTERNAL_EXCHANGE,
            routing_key=src_queue.routing_key,
            content_type='application/vnd.google.protobuf',
            content_encoding='binary',
            headers=headers,
            delivery_mode=2,
            declare=[queue_configs.EXTERNAL_EXCHANGE, src_queue])

    [_ for _ in worker.consume(limit=expected_number_of_output_msgs)]
    with Consumer(amqp_connection, dst_queue, callbacks=[on_message]):
        amqp_connection.drain_events(timeout=1)

    return received_msgs
示例#59
0
def publish(start=False, startfs=False, stop=False, close=False):
    body = json.dumps(close_json)
    if start == True:
      body = json.dumps(start_json)
    elif stop == True:
      body = json.dumps(stop_json)
    elif startfs == True:
      body = json.dumps(startfs_json)

    conn = BrokerConnection(hostname=rabbit_host, port=5672, userid=rabbit_user, password=rabbit_password, virtual_host=rabbit_vhost, heartbeat=4)
    channel = conn.channel()

    exchange = Exchange(rabbit_exchange, type='topic', durable=False)
    producer = Producer(exchange=exchange, channel=channel, routing_key=rabbit_routingkey)

    producer.publish(body)
示例#60
0
    def response_greenthread():
        with get_connection() as conn:
            with conn.channel() as chan:
                queue = nova.get_topic_queue('test_rpc', 'test', channel=chan)
                queue.declare()
                queue_declared.send(True)
                msg = ifirst(queue_iterator(queue, no_ack=True, timeout=2))
                msgid, _, _, args = nova.parse_message(msg.payload)

                exchange = nova.get_reply_exchange(msgid)
                producer = Producer(chan, exchange=exchange, routing_key=msgid)

                msg = {'result': args, 'failure': None, 'ending': False}
                producer.publish(msg)
                msg = {'result': None, 'failure': None, 'ending': True}
                producer.publish(msg)