Example #1
0
 def test_publish_retry_calls_ensure(self):
     p = Producer(Mock())
     p._connection = Mock()
     p._connection.declared_entities = set()
     ensure = p.connection.ensure = Mock()
     p.publish('foo', exchange='foo', retry=True)
     ensure.assert_called()
Example #2
0
class MqClient(object):
    """
    exchange='E_X7_W2S', queue='Q_X7_W2S',routing_key = 'RK_X7_W2S'
    """

    def __init__(self, kwargs):
        if kwargs:
            self.kwargs = kwargs
        else:
            self.kwargs = MqDict

    def connect(self, hostname="localhost", userid="guest", password="******", virtual_host="/"):
        conn = BrokerConnection(hostname, userid, password, virtual_host)
        # define Web2Server exchange
        exchange = Exchange(self.kwargs["X7_E"], type="direct")
        # queue = Queue(self.kwargs["X7_Q"], exchange, routing_key=self.kwargs["X7_RK"])
        channel = conn.channel()

        self.producer = Producer(channel, exchange, routing_key=self.kwargs["X7_RK"])

    def send(self, msg):
        self.producer.publish(msg, serializer="json", compression="zlib")

    def close(self):
        pass
Example #3
0
 def test_revive(self):
     chan = self.connection.channel()
     p = Producer(chan)
     chan2 = self.connection.channel()
     p.revive(chan2)
     self.assertIs(p.channel, chan2)
     self.assertIs(p.exchange.channel, chan2)
    def test_search(self):
        self.harvest_path = "/sfm-data/collection_set/test_collection/test_1"
        harvest_msg = {
            "id": "test:1",
            "type": "twitter_search",
            "path": self.harvest_path,
            "seeds": [
                {
                    "id": "seed_id3",
                    "token": "gwu"
                }
            ],
            "credentials": {
                "consumer_key": tests.TWITTER_CONSUMER_KEY,
                "consumer_secret": tests.TWITTER_CONSUMER_SECRET,
                "access_token": tests.TWITTER_ACCESS_TOKEN,
                "access_token_secret": tests.TWITTER_ACCESS_TOKEN_SECRET
            },
            "collection_set": {
                "id": "test_collection_set"
            },
            "collection": {
                "id": "test_collection"
            },
            "options": {
                "web_resources": True,
                "media": True,
                "tweets": True
            }
        }
        with self._create_connection() as connection:
            bound_exchange = self.exchange(connection)
            producer = Producer(connection, exchange=bound_exchange)
            producer.publish(harvest_msg, routing_key="harvest.start.twitter.twitter_search")

            status_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:1", status_msg["id"])
            # Running
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            # Another running message
            status_msg = self._wait_for_message(self.result_queue, connection)
            self.assertEqual(STATUS_RUNNING, status_msg["status"])

            # Now wait for result message.
            result_msg = self._wait_for_message(self.result_queue, connection)
            # Matching ids
            self.assertEqual("test:1", result_msg["id"])
            # Success
            self.assertEqual(STATUS_SUCCESS, result_msg["status"])
            # Some tweets
            self.assertTrue(result_msg["stats"][date.today().isoformat()]["tweets"])

            # Web harvest message.
            web_harvest_msg = self._wait_for_message(self.web_harvest_queue, connection)
            self.assertTrue(len(web_harvest_msg["seeds"]))

            # Warc created message.
            self.assertTrue(self._wait_for_message(self.warc_created_queue, connection))
Example #5
0
 def publish(self, topic, data):
     LOG.debug("Publishing message on topic %s" % topic)
     with self.get_connection() as conn:
         channel = conn.channel()
         exchange = self.declare_exchange(channel, topic, 'fanout')
         producer = Producer(channel, exchange=exchange, auto_declare=False)
         producer.publish(data)
Example #6
0
def insert_measurement(request):
    if request.method != "POST":
        return HttpResponse(status=405)

    try:
        content = json.loads(request.body)
        if 'measurement_type' in content:
            # get a connection to RabbitMQ broker, create a channel and create a
            # producer for pushing the message to the measurements exchange
            with Connection(settings.BROKER_URL) as conn:
                channel = conn.channel()

                inserter = Producer(
                    exchange=settings.MEASUREMENTS_EXCHANGE,
                    channel=channel,
                    routing_key="measurement." + content['measurement_type']
                )
                inserter.publish(request.body)

                logger.debug("[insertion] New measurement was enqueued: %s", str(content))

                return HttpResponse(status=201)
    except Exception as e:
        logger.debug("[insertion] ERROR! Exception caught in insert_measurement method: %s", e.message)

    return HttpResponse(status=400)
Example #7
0
def insert_push_notification(request):
    if request.method != "POST":
        return HttpResponse(status=405)

    try:
        content = json.loads(request.body)

        if 'user_id' in content and 'message' in content:
            # get a connection to RabbitMQ broker, create a channel and create a producer for pushing the message to the appropriate CAMI event exchange
            with Connection(settings.BROKER_URL) as conn:
                channel = conn.channel()

                inserter = Producer(
                    exchange=settings.PUSH_NOTIFICATIONS_EXCHANGE,
                    channel=channel,
                    routing_key="push_notification"
                )
                inserter.publish(request.body)

                logger.debug("[insertion] New push notification was enqueued: %s", str(content))

                return HttpResponse(status=201)
    except Exception as e:
        logger.debug("[insertion] ERROR! Exception caught in insert_push_notification method: %s", e.message)

    return HttpResponse(status=400)
Example #8
0
def send_message(a, b):
    with Connection(broker_url) as conn:
        channel = conn.channel()
        exchange = Exchange(custom_exchange, type='direct')
        producer = Producer(channel, exchange=exchange, serializer='json')
        producer.maybe_declare(exchange)
        producer.publish(routing_key=custom_rk, body={'a': a, 'b': b})
Example #9
0
class FanoutPublisher(PluginBase):

    def __init__(self):

        if app.debug:
            setup_logging(loglevel='DEBUG', loggers=[''])

        self.connection = BrokerConnection(app.config['AMQP_URL'])
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s', app.config['AMQP_URL'], e)
            raise RuntimeError

        self.channel = self.connection.channel()
        self.exchange_name = app.config['AMQP_TOPIC']

        self.exchange = Exchange(name=self.exchange_name, type='fanout', channel=self.channel)
        self.producer = Producer(exchange=self.exchange, channel=self.channel)

        LOG.info('Configured fanout publisher on topic "%s"', app.config['AMQP_TOPIC'])

    def pre_receive(self, alert):

        return alert

    def post_receive(self, alert):

        LOG.info('Sending message %s to AMQP topic "%s"', alert.get_id(), app.config['AMQP_TOPIC'])
        LOG.debug('Message: %s', alert.get_body())

        self.producer.publish(alert.get_body(), declare=[self.exchange], retry=True)
Example #10
0
 def test_manual_declare(self):
     channel = self.connection.channel()
     p = Producer(channel, self.exchange, auto_declare=False)
     self.assertTrue(p.exchange.is_bound)
     self.assertNotIn("exchange_declare", channel, "auto_declare=False does not declare exchange")
     p.declare()
     self.assertIn("exchange_declare", channel, "p.declare() declares exchange")
Example #11
0
 def test_helper_sync_send_msg(self, exchange, ex_rk, send_rk, payload):
     ex = Exchange(exchange, 'topic')
     queue = Queue(exchange=ex, routing_key=ex_rk + '.*', exclusive=True, channel=self.__connection)
     queue.declare()
     prod = Producer(self.__connection, exchange=ex, routing_key=send_rk)
     prod.publish(payload)
     return queue
Example #12
0
 def test_revive(self):
     chan = self.connection.channel()
     p = Producer(chan)
     chan2 = self.connection.channel()
     p.revive(chan2)
     assert p.channel is chan2
     assert p.exchange.channel is chan2
Example #13
0
    def publish(self, message):
        """Publishes a pulse message to the proper exchange."""

        if not self.exchange:
            raise InvalidExchange(self.exchange)

        if not message:
            raise MalformedMessage(message)

        message._prepare()

        if not self.connection:
            self.connect()

        producer = Producer(channel=self.connection,
                            exchange=Exchange(self.exchange, type='topic'),
                            routing_key=message.routing_key)

        # The message is actually a simple envelope format with a payload and
        # some metadata.
        final_data = {}
        final_data['payload'] = message.data
        final_data['_meta'] = message.metadata.copy()
        final_data['_meta'].update({
            'exchange': self.exchange,
            'routing_key': message.routing_key,
            'serializer': self.config.serializer,
            'sent': time_to_string(datetime.now(timezone(self.config.broker_timezone)))
        })

        producer.publish(final_data, serializer=self.config.serializer)
Example #14
0
  def __init__(self, cmd_q=None, reply_q=None):
    threading.Thread.__init__(self)
    self.settings = ConfigParser.ConfigParser()
    self.settings.read('../config/site.ini')
    self.rabbitmqUsername = self.settings.get('rabbitmq', 'username')
    self.rabbitmqPassword = self.settings.get('rabbitmq', 'password')
    self.rabbitmqHost = self.settings.get('rabbitmq', 'host')
    self.conn = Connection('amqp://'+self.rabbitmqUsername+':'+self.rabbitmqPassword+'@'+self.rabbitmqHost+':5672//')
    self.producer = Producer(self.conn.channel(), exchange = Exchange('eyezon.status', type='fanout'), serializer="json")
    self.rpcProducer= Producer(self.conn.channel(), serializer="json")

    self.cmd_q = cmd_q or Queue.Queue()
    self.reply_q = reply_q or Queue.Queue()

    queue = kombu.Queue(
        name="eyezon.cmd",
        exchange=Exchange('eyezon.cmd'),
        channel=self.conn.channel(),
        durable=False,
        exclusive=False,
        auto_delete=True)
    self.consumer = Consumer(self.conn.channel(), queues = queue, auto_declare=True, callbacks=[self.send_cmd])
    self.consumer.consume(no_ack=True)

    self.alarmCache = {
      "zoneTimerDump": None,
      "keypadUpdate": None,
      "zoneStateChange": None,
      "partitionStateChange": None,
      "realtimeCIDEvent": None,
      "zoneTimerDump": None
    }
Example #15
0
File: emit.py Project: pcreech/pulp
def send(document, routing_key=None):
    """
    Attempt to send a message to the AMQP broker.

    If we cannot obtain a new connection then the message will be dropped. Note
    that we do not block when waiting for a connection.

    :param document: the taskstatus Document we want to send
    :type  document: mongoengine.Document
    :param routing_key: The routing key for the message
    :type  routing_key: str
    """

    # if the user has not enabled notifications, just bail
    event_notifications_enabled = config.getboolean("messaging", "event_notifications_enabled")
    if not event_notifications_enabled:
        return

    try:
        payload = document.to_json()
    except TypeError:
        _logger.warn("unable to convert document to JSON; event message not sent")
        return

    broker_url = config.get("messaging", "event_notification_url")

    notification_topic = Exchange(name=DEFAULT_EXCHANGE_NAME, type="topic")

    with Connection(broker_url) as connection:
        producer = Producer(connection)
        producer.maybe_declare(notification_topic)
        producer.publish(payload, exchange=notification_topic, routing_key=routing_key)
Example #16
0
    def send(self, topic, message):
        """Publishes a pulse message to the proper exchange."""

        if not message:
            Log.error("Expecting a message")

        message._prepare()

        if not self.connection:
            self.connect()

        producer = Producer(
            channel=self.connection,
            exchange=Exchange(self.settings.exchange, type='topic'),
            routing_key=topic
        )

        # The message is actually a simple envelope format with a payload and
        # some metadata.
        final_data = Dict(
            payload=message.data,
            _meta=set_default({
                'exchange': self.settings.exchange,
                'routing_key': message.routing_key,
                'serializer': self.settings.serializer,
                'sent': time_to_string(datetime.datetime.now(timezone(self.settings.broker_timezone))),
                'count': self.count
            }, message.metadata)
        )

        producer.publish(jsons.scrub(final_data), serializer=self.settings.serializer)
        self.count += 1
    def test_filter(self):
        harvest_msg = {
            "id": "test:2",
            "type": "twitter_filter",
            "seeds": [
                {
                    "token": "obama"
                }
            ],
            "credentials": {
                "consumer_key": tests.TWITTER_CONSUMER_KEY,
                "consumer_secret": tests.TWITTER_CONSUMER_SECRET,
                "access_token": tests.TWITTER_ACCESS_TOKEN,
                "access_token_secret": tests.TWITTER_ACCESS_TOKEN_SECRET
            },
            "collection": {
                "id": "test_collection",
                "path": self.collection_path

            }
        }
        with self._create_connection() as connection:
            bound_exchange = self.exchange(connection)
            producer = Producer(connection, exchange=bound_exchange)
            producer.publish(harvest_msg, routing_key="harvest.start.twitter.twitter_filter")

            # Wait 30 seconds
            time.sleep(30)

            # Send stop message
            harvest_stop_msg = {
                "id": "test:2",
            }
            producer.publish(harvest_stop_msg, routing_key="harvest.stop.twitter.twitter_filter")

            # Now wait for result message.
            counter = 0
            message_obj = None
            bound_result_queue = self.result_queue(connection)
            while counter < 180 and not message_obj:
                time.sleep(.5)
                message_obj = bound_result_queue.get(no_ack=True)
                counter += 1
            self.assertIsNotNone(message_obj, "Timed out waiting for result at {}.".format(datetime.now()))
            result_msg = message_obj.payload
            # Matching ids
            self.assertEqual("test:2", result_msg["id"])
            # Success
            self.assertEqual("completed success", result_msg["status"])
            # Some tweets
            self.assertTrue(result_msg["summary"]["tweet"])

            # Web harvest message.
            bound_web_harvest_queue = self.web_harvest_queue(connection)
            message_obj = bound_web_harvest_queue.get(no_ack=True)
            self.assertIsNotNone(message_obj, "No web harvest message.")
            web_harvest_msg = message_obj.payload
            # Some seeds
            self.assertTrue(len(web_harvest_msg["seeds"]))
Example #18
0
def main():
    filename = "meta"
    fptr = open(filename, "r")
    amqpurl = fptr.readline().strip()
    exchange_name = fptr.readline().strip()

    exchange = Exchange(exchange_name, type="direct")
    D_queue = Queue(exchange_name, exchange, routing_key=exchange_name, auto_delete=False, exclusive=False)


    connection = BrokerConnection(amqpurl)
    print amqpurl
    channel = connection.channel()

    queue = D_queue(channel)
    queue.declare()
    producer = Producer(channel, exchange, routing_key=exchange_name)

    message_count = int(sys.argv[1])
    imgsize = int(sys.argv[2])
    name = sys.argv[3]

    s3url = ""
    if 'S3_URL' in os.environ:
        s3url = os.environ['S3_URL']
    s3id = os.environ['EC2_ACCESS_KEY']
    s3pw = os.environ['EC2_SECRET_KEY']

    n = datetime.now()
    print "XXX starting %s" % (str(n))

    msg_list = []
    dashi_name = str(uuid.uuid4()).split('-')[0]
    for i in range(0, message_count):
        msg = {'program': 'python node2.py %d %d %d' % (i, message_count, imgsize),
                'rank': i,
                's3url': s3url,
                's3id': s3id,
                's3pw': s3pw,
                'testname': name,
                'dashiname': dashi_name}
        msg_list.append(msg)
    random.shuffle(msg_list)

    print "Add the messages to the queue..."
    for msg in msg_list:
        print "%s %d of %d" % (msg['testname'], msg['rank'], message_count)
        sys.stdout.flush()
        producer.publish(msg,
                     exchange=exchange,
                     routing_key=exchange_name,
                     serializer="json")

    dashi = get_dashi_connection(amqpurl, dashi_name)
    p_con = get_phantom_con(s3id, s3pw)
    wait_till_done(dashi, message_count, p_con, name)

    n = datetime.now()
    print "XXX done %s" % (str(n))
Example #19
0
 def test_prepare(self):
     message = {u'the quick brown fox': u'jumps over the lazy dog'}
     channel = self.connection.channel()
     p = Producer(channel, self.exchange, serializer='json')
     m, ctype, cencoding = p._prepare(message, headers={})
     self.assertDictEqual(message, anyjson.loads(m))
     self.assertEqual(ctype, 'application/json')
     self.assertEqual(cencoding, 'utf-8')
Example #20
0
 def test_prepare(self):
     message = {"the quick brown fox": "jumps over the lazy dog"}
     channel = self.connection.channel()
     p = Producer(channel, self.exchange, serializer="json")
     m, ctype, cencoding = p._prepare(message, headers={})
     self.assertDictEqual(message, anyjson.loads(m))
     self.assertEqual(ctype, "application/json")
     self.assertEqual(cencoding, "utf-8")
Example #21
0
 def test_prepare(self):
     message = {'the quick brown fox': 'jumps over the lazy dog'}
     channel = self.connection.channel()
     p = Producer(channel, self.exchange, serializer='json')
     m, ctype, cencoding = p._prepare(message, headers={})
     assert json.loads(m) == message
     assert ctype == 'application/json'
     assert cencoding == 'utf-8'
Example #22
0
    def send_msg(self, msg, exch, routing_key):
        if not self.connection:
            raise MissingConfiguration("Missing connection!")

        from kombu import Exchange, Producer
        exch = Exchange(exch, type='topic')
        prod = Producer(self.connection, exchange=exch)
        prod.publish(msg, routing_key=routing_key)
def kpublish(url, queue_name, refdes, particles):
    from kombu import Connection, Exchange, Producer, Queue
    headers = {'sensor': refdes, 'deliveryType': 'streamed'}
    with Connection(url) as conn:
        exchange = Exchange('amq.direct', type='direct')
        queue = Queue(name=queue_name, exchange=exchange, routing_key=queue_name)
        producer = Producer(conn, exchange=exchange, routing_key=queue_name)
        producer.publish(json.dumps(particles), content_encoding='ascii', content_type='text/plain',
                         headers=headers, declare=[queue], user_id='guest')
Example #24
0
def send_increment_upvotes(for_post_id):
    """Send a message for incrementing the click count for an URL."""
    exchange = Exchange("test", type="direct")
    queue = Queue("test", exchange, routing_key="test")
    connection = establish_connection()
    channel = connection.channel()
    producer = Producer(channel, exchange, routing_key="test")
    producer.publish(str(for_post_id))
    connection.close()
Example #25
0
class KombuLogger(object):
    def __init__(self, host="localhost", user="******", password="******", vhost="/", exchange="analytics"):
        self.connection = BrokerConnection(host, user, password, vhost)
        self.channel = self.connection.channel()
        self.exchange = Exchange(exchange, "topic", durable=True, auto_delete=False)
        self.producer = Producer(self.channel, exchange=self.exchange, serializer="json")
    
    def write(self, event, timestamp, attributes):
        self.producer.publish({"event": event, "ts": timestamp, "attr": attributes}, routing_key=event)
Example #26
0
 def test_manual_declare(self):
     channel = self.connection.channel()
     p = Producer(channel, self.exchange, auto_declare=False)
     assert p.exchange.is_bound
     # auto_declare=False does not declare exchange
     assert 'exchange_declare' not in channel
     # p.declare() declares exchange')
     p.declare()
     assert 'exchange_declare' in channel
    def test_search(self):
        harvest_msg = {
            "id": "test:1",
            "type": "twitter_search",
            "seeds": [
                {
                    "token": "gwu"
                }
            ],
            "credentials": {
                "consumer_key": tests.TWITTER_CONSUMER_KEY,
                "consumer_secret": tests.TWITTER_CONSUMER_SECRET,
                "access_token": tests.TWITTER_ACCESS_TOKEN,
                "access_token_secret": tests.TWITTER_ACCESS_TOKEN_SECRET
            },
            "collection": {
                "id": "test_collection",
                "path": self.collection_path

            }
        }
        with self._create_connection() as connection:
            bound_exchange = self.exchange(connection)
            producer = Producer(connection, exchange=bound_exchange)
            producer.publish(harvest_msg, routing_key="harvest.start.twitter.twitter_search")

            # Now wait for result message.
            counter = 0
            bound_result_queue = self.result_queue(connection)
            message_obj = None
            while counter < 240 and not message_obj:
                time.sleep(.5)
                message_obj = bound_result_queue.get(no_ack=True)
                counter += 1
            self.assertTrue(message_obj, "Timed out waiting for result at {}.".format(datetime.now()))
            result_msg = message_obj.payload
            # Matching ids
            self.assertEqual("test:1", result_msg["id"])
            # Success
            self.assertEqual("completed success", result_msg["status"])
            # Some tweets
            self.assertTrue(result_msg["summary"]["tweet"])

            # Web harvest message.
            bound_web_harvest_queue = self.web_harvest_queue(connection)
            message_obj = bound_web_harvest_queue.get(no_ack=True)
            # method_frame, header_frame, web_harvest_body = self.channel.basic_get(self.web_harvest_queue)
            self.assertIsNotNone(message_obj, "No web harvest message.")
            web_harvest_msg = message_obj.payload
            # Some seeds
            self.assertTrue(len(web_harvest_msg["seeds"]))

            # Warc created message.
            # method_frame, header_frame, warc_created_body = self.channel.basic_get(self.warc_created_queue)
            bound_warc_created_queue = self.warc_created_queue(connection)
            message_obj = bound_warc_created_queue.get(no_ack=True)
            self.assertIsNotNone(message_obj, "No warc created message.")
Example #28
0
 def _publish(sender: Producer) -> None:
     logger.debug("Send message {body} to broker {amqpuri} with routing key {routing_key}"
                  .format(body=message, amqpuri=self._amqp_uri, routing_key=message.header.topic))
     sender.publish(message.body.value,
                    headers=_build_message_header(message),
                    exchange=self._exchange,
                    serializer='json',   # todo: fix this for the mime type of the message
                    routing_key=message.header.topic,
                    declare=[self._exchange])
Example #29
0
File: views.py Project: sp00/kral
def exchange_send(data,exchange):
    try:
        connection = BrokerConnection()
        channel = connection.channel()
        producer = Producer(channel, Exchange(exchange, type="fanout"))
        producer.publish(data)
        channel.close()
        connection.close()
    except Exception, error:
        print(error)
def __send_message_to_conductor(node_id, command, connection, exchange, routing_key):
    '''
    return should be None unless you are Unit Testing with Connection which uses Mock Transport
    '''
    message = {}
    message[Constants.MESSAGE_NODE_ID] = node_id
    message[Constants.MESSAGE_ACK_COMMAND] = command
    logger.debug('Publishing Message to routing_key[' + routing_key + '] command[' + command + ']')
    producer = Producer(connection, serializer='json')
    return producer.publish(message, exchange=exchange, routing_key=routing_key)
Example #31
0
    def test_produce_consume(self):
        producer_channel = self.p.channel()
        consumer_channel = self.c.channel()
        producer = Producer(producer_channel, self.e)
        consumer1 = Consumer(consumer_channel, self.q)
        consumer2 = Consumer(consumer_channel, self.q2)
        self.q2(consumer_channel).declare()

        for i in range(10):
            producer.publish({'foo': i},
                             routing_key='test_transport_filesystem')
        for i in range(10):
            producer.publish({'foo': i},
                             routing_key='test_transport_filesystem2')

        _received1 = []
        _received2 = []

        def callback1(message_data, message):
            _received1.append(message)
            message.ack()

        def callback2(message_data, message):
            _received2.append(message)
            message.ack()

        consumer1.register_callback(callback1)
        consumer2.register_callback(callback2)

        consumer1.consume()
        consumer2.consume()

        while 1:
            if len(_received1) + len(_received2) == 20:
                break
            self.c.drain_events()

        self.assertEqual(len(_received1) + len(_received2), 20)

        # compression
        producer.publish({'compressed': True},
                         routing_key='test_transport_filesystem',
                         compression='zlib')
        m = self.q(consumer_channel).get()
        self.assertDictEqual(m.payload, {'compressed': True})

        # queue.delete
        for i in range(10):
            producer.publish({'foo': i},
                             routing_key='test_transport_filesystem')
        self.assertTrue(self.q(consumer_channel).get())
        self.q(consumer_channel).delete()
        self.q(consumer_channel).declare()
        self.assertIsNone(self.q(consumer_channel).get())

        # queue.purge
        for i in range(10):
            producer.publish({'foo': i},
                             routing_key='test_transport_filesystem2')
        self.assertTrue(self.q2(consumer_channel).get())
        self.q2(consumer_channel).purge()
        self.assertIsNone(self.q2(consumer_channel).get())
from kombu import Connection, Exchange, Producer, Queue, Consumer
rabbit_url = "redis://localhost:6379/"
print("rabbit_url: ", rabbit_url)
conn = Connection(rabbit_url)
print("conn: ", conn)

channel = conn.channel()
print("channel: ", channel)

exchange = Exchange("example-exchange", type="direct")
print("exchange: ", exchange)

producer = Producer(exchange=exchange, channel=channel, routing_key='BOB')
print("producer: ", producer)

queue = Queue(name="example-queue", exchange=exchange, routing_key='BOB')
print("queue: ", queue, "\n", queue.maybe_bind(conn), queue.declare())
queue.maybe_bind(conn)
queue.declare()

producer.publish("Hello there")
print("msg: ", producer.publish("Hello there"))

def process_body(body, message):
    print("Message: ", body)
    message.ack()


with Consumer(conn, queues=queue, callbacks=[process_body], accept=["text/plain"]):
    print("consumer: ", conn)
    conn.drain_events(timeout=2)
Example #33
0
def send_message(conn):
    producer = Producer(conn)
    producer.publish('hello world', exchange=exchange, routing_key='asynt')
    print('MESSAGE SENT')
Example #34
0
 def test_no_exchange(self):
     chan = self.connection.channel()
     p = Producer(chan)
     self.assertFalse(p.exchange.name)
Example #35
0
            def handle_exabgp_msg(bgp_message):
                redis.set("exabgp_seen_bgp_update", "1", ex=MON_TIMEOUT_LAST_BGP_UPDATE)
                msg = {
                    "type": bgp_message["type"],
                    "communities": bgp_message.get("communities", []),
                    "timestamp": float(bgp_message["timestamp"]),
                    "path": bgp_message.get("path", []),
                    "service": "exabgp|{}".format(host),
                    "prefix": bgp_message["prefix"],
                    "peer_asn": int(bgp_message["peer_asn"]),
                }

                this_prefix = msg["prefix"]
                ip_version = get_ip_version(this_prefix)
                if this_prefix in prefix_tree[ip_version]:
                    try:
                        if validator.validate(msg):
                            msgs = normalize_msg_path(msg)
                            for msg in msgs:
                                key_generator(msg)
                                log.debug(msg)
                                if autoconf:
                                    try:
                                        if learn_neighbors:
                                            msg["learn_neighbors"] = True
                                        shared_memory_locks[
                                            "autoconf_updates"
                                        ].acquire()
                                        autoconf_updates = (
                                            self.shared_memory_manager_dict[
                                                "autoconf_updates"
                                            ]
                                        )
                                        autoconf_updates[msg["key"]] = msg
                                        self.shared_memory_manager_dict[
                                            "autoconf_updates"
                                        ] = autoconf_updates
                                        # mark the autoconf BGP updates for configuration
                                        # processing in redis
                                        redis_pipeline = redis.pipeline()
                                        redis_pipeline.sadd(
                                            "autoconf-update-keys-to-process",
                                            msg["key"],
                                        )
                                        redis_pipeline.execute()
                                    except Exception:
                                        log.exception("exception")
                                    finally:
                                        shared_memory_locks[
                                            "autoconf_updates"
                                        ].release()
                                else:
                                    with Producer(self.connection) as producer:
                                        producer.publish(
                                            msg,
                                            exchange=self.update_exchange,
                                            routing_key="update",
                                            serializer="ujson",
                                        )
                        else:
                            log.debug("Invalid format message: {}".format(msg))
                    except BaseException:
                        log.exception(
                            "Error when normalizing BGP message: {}".format(msg)
                        )
Example #36
0
 def test_repr(self):
     p = Producer(self.connection)
     self.assertTrue(repr(p))
Example #37
0
        item_state = item['state']
        item_type = item['type']
        item_name = item['name']
        item_id = item_name


        message = {
            'item_id' : item_id,
            'item_name' : item_name,
            'item_type' : item_type,
            'item_state' : item_state
        }

        producer_connection.ensure_connection()
        with Producer(producer_connection) as producer:
            producer.publish(
                json.dumps(message),
                exchange=exchange.name,
                routing_key='data_source.to.event_generator_2',
                retry=True
            )

        print ("Send event to Rule Engine: " + 'data_source.to.event_generator_2')

    print ("\n\n")

    time.sleep(5)

# clientMQTT = mqtt.Client()
# clientMQTT.connect("192.168.60.197")
Example #38
0
def parse_ripe_ris(connection, prefixes_file, hosts):
    exchange = Exchange("bgp-update",
                        channel=connection,
                        type="direct",
                        durable=False)
    exchange.declare()

    prefixes = load_json(prefixes_file)
    assert prefixes is not None
    prefix_tree = radix.Radix()
    for prefix in prefixes:
        prefix_tree.add(prefix)

    ris_suffix = os.getenv("RIS_ID", "my_as")

    validator = mformat_validator()
    with Producer(connection) as producer:
        while True:
            try:
                events = requests.get(
                    "https://ris-live.ripe.net/v1/stream/?format=json&client=artemis-{}"
                    .format(ris_suffix),
                    stream=True,
                )
                # http://docs.python-requests.org/en/latest/user/advanced/#streaming-requests
                iterator = events.iter_lines()
                next(iterator)
                for data in iterator:
                    try:
                        parsed = json.loads(data)
                        msg = parsed["data"]
                        if "type" in parsed and parsed["type"] == "ris_error":
                            log.error(msg)
                        # also check if ris host is in the configuration
                        elif ("type" in msg and msg["type"] == "UPDATE"
                              and (not hosts or msg["host"] in hosts)):
                            norm_ris_msgs = normalize_ripe_ris(
                                msg, prefix_tree)
                            for norm_ris_msg in norm_ris_msgs:
                                if validator.validate(norm_ris_msg):
                                    norm_path_msgs = normalize_msg_path(
                                        norm_ris_msg)
                                    for norm_path_msg in norm_path_msgs:
                                        key_generator(norm_path_msg)
                                        log.debug(norm_path_msg)
                                        producer.publish(
                                            norm_path_msg,
                                            exchange=exchange,
                                            routing_key="update",
                                            serializer="json",
                                        )
                                else:
                                    log.warning(
                                        "Invalid format message: {}".format(
                                            msg))
                    except Exception:
                        log.exception("exception")
                log.warning(
                    "Iterator ran out of data; the connection will be retried")
            except Exception:
                log.exception("server closed connection")
                time.sleep(5)
Example #39
0
def run_bgpstream(
    prefixes_file=None,
    kafka_host=None,
    kafka_port=None,
    kafka_topic="openbmp.bmp_raw",
    start=0,
    end=0,
):
    """
    Retrieve all records related to a list of prefixes
    https://bgpstream.caida.org/docs/api/pybgpstream/_pybgpstream.html

    :param prefixes_file: <str> input prefix json
    :param kafka_host: <str> kafka host
    :param kafka_port: <int> kafka_port
    :param kafka_topic: <str> kafka topic
    :param start: <int> start timestamp in UNIX epochs
    :param end: <int> end timestamp in UNIX epochs (if 0 --> "live mode")

    :return: -
    """

    prefixes = load_json(prefixes_file)
    assert prefixes is not None

    # create a new bgpstream instance and a reusable bgprecord instance
    stream = _pybgpstream.BGPStream()

    # set kafka data interface
    stream.set_data_interface("kafka")

    # set host connection details
    stream.set_data_interface_option("kafka", "brokers",
                                     "{}:{}".format(kafka_host, kafka_port))

    # set topic
    stream.set_data_interface_option("kafka", "topic", kafka_topic)

    # filter prefixes
    for prefix in prefixes:
        stream.add_filter("prefix", prefix)

    # filter record type
    stream.add_filter("record-type", "updates")

    # filter based on timing (if end=0 --> live mode)
    stream.add_interval_filter(start, end)

    # set live mode
    stream.set_live_mode()

    # start the stream
    stream.start()

    with Connection(RABBITMQ_URI) as connection:
        exchange = Exchange("bgp-update",
                            channel=connection,
                            type="direct",
                            durable=False)
        exchange.declare()
        producer = Producer(connection)
        validator = mformat_validator()
        while True:
            # get next record
            try:
                rec = stream.get_next_record()
            except BaseException:
                continue
            if (rec.status != "valid") or (rec.type != "update"):
                continue

            # get next element
            try:
                elem = rec.get_next_elem()
            except BaseException:
                continue

            while elem:
                if elem.type in {"A", "W"}:
                    redis.set(
                        "bgpstreamkafka_seen_bgp_update",
                        "1",
                        ex=int(
                            os.getenv(
                                "MON_TIMEOUT_LAST_BGP_UPDATE",
                                DEFAULT_MON_TIMEOUT_LAST_BGP_UPDATE,
                            )),
                    )
                    this_prefix = str(elem.fields["prefix"])
                    service = "bgpstreamkafka|{}".format(str(rec.collector))
                    type_ = elem.type
                    if type_ == "A":
                        as_path = elem.fields["as-path"].split(" ")
                        communities = [{
                            "asn": int(comm.split(":")[0]),
                            "value": int(comm.split(":")[1]),
                        } for comm in elem.fields["communities"]]
                    else:
                        as_path = []
                        communities = []
                    timestamp = float(rec.time)
                    peer_asn = elem.peer_asn

                    for prefix in prefixes:
                        base_ip, mask_length = this_prefix.split("/")
                        our_prefix = IPNetwork(prefix)
                        if (IPAddress(base_ip) in our_prefix
                                and int(mask_length) >= our_prefix.prefixlen):
                            msg = {
                                "type": type_,
                                "timestamp": timestamp,
                                "path": as_path,
                                "service": service,
                                "communities": communities,
                                "prefix": this_prefix,
                                "peer_asn": peer_asn,
                            }
                            if validator.validate(msg):
                                msgs = normalize_msg_path(msg)
                                for msg in msgs:
                                    key_generator(msg)
                                    log.debug(msg)
                                    producer.publish(
                                        msg,
                                        exchange=exchange,
                                        routing_key="update",
                                        serializer="ujson",
                                    )
                            else:
                                log.warning(
                                    "Invalid format message: {}".format(msg))
                            break
                try:
                    elem = rec.get_next_elem()
                except BaseException:
                    continue
Example #40
0
 def test_prepare_not_callable(self):
     x = Producer(Mock)
     self.pool.prepare(x)
Example #41
0
 def test_publish_retry_calls_ensure(self):
     p = Producer(Mock())
     p._connection = Mock()
     ensure = p.connection.ensure = Mock()
     p.publish('foo', exchange='foo', retry=True)
     self.assertTrue(ensure.called)
Example #42
0
 def test_pickle(self):
     chan = Mock()
     producer = Producer(chan, serializer='pickle')
     p2 = pickle.loads(pickle.dumps(producer))
     self.assertEqual(p2.serializer, producer.serializer)
Example #43
0
    def test_produce_consume(self):
        channel = self.c.channel()
        producer = Producer(channel, self.e)
        consumer1 = Consumer(channel, self.q)
        consumer2 = Consumer(channel, self.q2)
        self.q2(channel).declare()

        for i in range(10):
            producer.publish({'foo': i}, routing_key='test_transport_memory')
        for i in range(10):
            producer.publish({'foo': i}, routing_key='test_transport_memory2')

        _received1 = []
        _received2 = []

        def callback1(message_data, message):
            _received1.append(message)
            message.ack()

        def callback2(message_data, message):
            _received2.append(message)
            message.ack()

        consumer1.register_callback(callback1)
        consumer2.register_callback(callback2)

        consumer1.consume()
        consumer2.consume()

        while 1:
            if len(_received1) + len(_received2) == 20:
                break
            self.c.drain_events()

        assert len(_received1) + len(_received2) == 20

        # compression
        producer.publish({'compressed': True},
                         routing_key='test_transport_memory',
                         compression='zlib')
        m = self.q(channel).get()
        assert m.payload == {'compressed': True}

        # queue.delete
        for i in range(10):
            producer.publish({'foo': i}, routing_key='test_transport_memory')
        assert self.q(channel).get()
        self.q(channel).delete()
        self.q(channel).declare()
        assert self.q(channel).get() is None

        # queue.purge
        for i in range(10):
            producer.publish({'foo': i}, routing_key='test_transport_memory2')
        assert self.q2(channel).get()
        self.q2(channel).purge()
        assert self.q2(channel).get() is None
Example #44
0
 def test_no_channel(self):
     p = Producer(None)
     self.assertFalse(p._channel)
Example #45
0
 def producer(self):
     return Producer(self.connection)
Example #46
0
from kombu import Connection, Exchange, Queue, Producer

rabbit_url = "amqp://localhost:5672/"

conn = Connection(rabbit_url)

channel = conn.channel()

exchange = Exchange("example-exchange", type="direct")

producer = Producer(exchange=exchange, channel=channel, routing_key="BOB")

queue = Queue(name="example-queue", exchange=exchange, routing_key="BOB")
queue.maybe_bind(conn)
queue.declare()

producer.publish("Hello there!")
Example #47
0
connection = Connection('amqp://*****:*****@192.168.3.82:5672//')
channel = connection.channel()

message=Message(channel,body='Hello Kombu')

# produce
producer = Producer(channel,exchange=task_exchange)
producer.publish(message.body,routing_key='suo_piao')
'''
# submit_task
exchange = Exchange('submit_task')
queue = Queue('submit_task', exchange, routing_key='submit_task')

#f =  open("input_seed.json")
f = open("input_test1.json")
with Connection('amqp://*****:*****@192.168.3.82:5672//') as connection, f:
    producer = Producer(connection)
    message = json.loads(f.read())
    for i in range(1):
        hash_id = str(uuid.uuid1())
        print hash_id
        message['params']['url']['hash'] = "url_hash#" + hash_id
        message['params']['thunder_hash'] = "thunder_hash#" + hash_id
        message = json.dumps(message)
        producer.publish(message,
                         exchange=exchange,
                         declare=[queue],
                         routing_key='submit_task',
                         serializer='json')
        message = json.loads(message)
class ReservationWorker(ConsumerMixin):
    """This class represents the background worker that listens for tasks in RabbitMQ queue called
        conf['RESERVATION_WORKER_QUEUE_NAME'].
        It should be run by background.py file.
    """
    class ExecutionException(Exception):
        """Exception raised during execution of function"""
        def __init__(self, message=''):
            super(ReservationWorker.ExecutionException, self).__init__(message)

    def __init__(self, connection, logger):
        from gooutsafe.comm import conf
        self.logger = logger
        self.connection = connection
        self.queues = [Queue(conf['RESERVATION_WORKER_QUEUE_NAME'])]
        self.producer = Producer(Connection(conf['RABBIT_MQ_URL']))

    def get_consumers(self, consumer, channel):
        return [consumer(queues=self.queues, callbacks=[self.on_message])]

    def stop(self):
        self.should_stop = True

    def on_message(self, body, message):
        self.logger.info(
            'Received new message in the queue for reservations worker')

        response_object = None
        try:
            message_object = json.loads(body)
        except ValueError as ve:
            self.logger.error('Cannot parse JSON object, %s' % ve)
            response_object = dict(status='Cannot parse JSON object')
        except TypeError as te:
            self.logger.error('Cannot parse JSON object, %s' % te)
            response_object = dict(status='Cannot parse JSON object')

        if response_object is None:
            try:
                response_object = self.__function_dispatcher(message_object)
            except NotImplementedError:
                self.logger.error('Received invalid operation, message=%s' %
                                  body)
                response_object = dict(status='Invalid Operation Received')
            except self.ExecutionException as ex:
                self.logger.error(ex)
                # exception already logged
                response_object = dict(status='Internal Server Error')

        response: str
        try:
            response = json.dumps(response_object)
        except ValueError:
            self.logger.error(
                'Cannot dump response_object into a JSON, response_object=%s' %
                response_object)

        # checking if the sender has set the reply_to
        if 'reply_to' not in message.properties:
            self.logger.error(
                'Sender has not set the reply_to, so.. Where should I push the reply?'
            )
            self.logger.error('BTW, the reply is this: %s' % response)
        else:
            self.producer.publish(
                body=response,
                exchange='',
                routing_key=message.properties['reply_to'],
                correlation_id=message.properties['correlation_id'])
            self.logger.error(
                'Published message with routing key=%s and correlation_id=%s' %
                (message.properties['reply_to'],
                 message.properties['correlation_id']))

        message.ack()

    @staticmethod
    def __retrieve_by_customer_id(message):
        from gooutsafe.dao.reservation_manager import ReservationManager
        reservations = ReservationManager.retrieve_by_customer_id(
            message['customer_id'])
        reservations = [
            reservation.serialize() for reservation in reservations
        ]

        return reservations

    @staticmethod
    def __retrieve_all_contact_reservation_by_id(message):
        from gooutsafe.dao.reservation_manager import ReservationManager
        reservations = ReservationManager.retrieve_all_contact_reservation_by_id(
            message['customer_id'])
        reservations = [
            reservation.serialize() for reservation in reservations
        ]

        return reservations

    @staticmethod
    def __retrieve_by_customer_id_in_future(message):
        from gooutsafe.dao.reservation_manager import ReservationManager
        reservations = ReservationManager.retrieve_by_customer_id_in_future(
            message['customer_id'])
        reservations = [
            reservation.serialize() for reservation in reservations
        ]

        return reservations

    @staticmethod
    def __retrieve_by_customer_id_in_last_14_days(message):
        from gooutsafe.dao.reservation_manager import ReservationManager
        reservations = ReservationManager.retrieve_by_customer_id_in_last_14_days(
            message['customer_id'])
        reservations = [
            reservation.serialize() for reservation in reservations
        ]

        return reservations

    def __function_dispatcher(self, message):
        if 'func' not in message or 'customer_id' not in message:
            raise ValueError(
                'Message object does not contain \'func\' or \'customer_id\'')

        try:
            if message['func'] == 'retrieve_by_customer_id':
                return self.__retrieve_by_customer_id(message)
            elif message['func'] == 'retrieve_all_contact_reservation_by_id':
                return self.__retrieve_all_contact_reservation_by_id(message)
            elif message['func'] == 'retrieve_by_customer_id_in_future':
                return self.__retrieve_by_customer_id_in_future(message)
            elif message['func'] == 'retrieve_by_customer_id_in_last_14_days':
                return self.__retrieve_by_customer_id_in_last_14_days(message)
            else:
                raise NotImplementedError('This operation is not implemented')
        except RuntimeError as re:
            self.logger.error(re)
            raise self.ExecutionException(
                'A RuntimeError was raised during execution of function dispatcher'
            )
        except Exception as ex:
            self.logger.error(ex)
            raise self.ExecutionException(
                'An exception was raised during execution of function dispatcher!'
            )
iris_path = 'iris/Iris.csv'
queue_list = { "mnist": "mnist-queue", "iris":"iris-queue"}
exchange_list = { "mnist": "exchange-mnist", "iris":"exchange-iris"}

type_of_model = argument_list[1]

url = argument_list[2]
rabbit_url = "amqp://*****:*****@" + url + ":5672/"

queue_name = queue_list[type_of_model]
exchange_name = exchange_list[type_of_model]

conn = Connection(rabbit_url)
channel = conn.channel()
exchange = Exchange(exchange_name, type="direct")
producer = Producer(exchange=exchange, channel=channel, routing_key="BOB")
queue = Queue(name=queue_name, exchange=exchange, routing_key="BOB")
queue.maybe_bind(conn)
queue.declare()
counter = 0
def publish_example(producer):
    producer.publish("Hello there: " + str(counter))
def publish_iris_data(producer):
    with open(iris_path) as f:
        lines = f.readlines()
        r = random.randrange(0, len(lines) ) 
        line = lines[r]
        data = line.split(',')
        body = '{ "signature_name":"predict", "instances": [{"sepal_length":[' + data[0] + '], "sepal_width":[' + data[1] + '], "petal_length":[' + data[2] + '], "petal_width":[' + data[3] + '] }]}'

    producer.publish(body)
Example #50
0
 def test_set_on_return(self):
     chan = Mock()
     chan.events = defaultdict(Mock)
     p = Producer(ChannelPromise(lambda: chan), on_return='on_return')
     p.channel
     chan.events['basic_return'].add.assert_called_with('on_return')
Example #51
0
def task_publish_to_core(self=None, publish_node=None):
    """task_publish_to_core

    :param self: parent task object for bind=True
    :param publish_node: dictionary to send to the AntiNex Core Worker
    """
    if settings.ANTINEX_WORKER_ENABLED:

        conn = None
        dataset = publish_node["body"].get("dataset", None)
        predict_rows = publish_node["body"].get("predict_rows", None)

        if not dataset and not predict_rows:
            log.info(
                ("skipping antinex core publish body={} - "
                 "is missing dataset and predict_rows").format(publish_node))
            return None
        # end of checking for supported requests to the core

        log.info(("task_publish_to_core - start req={}").format(
            str(publish_node)[0:32]))

        if not predict_rows:
            log.info(("building predict_rows from dataset={}").format(dataset))
            predict_rows = []
            predict_rows_df = pd.read_csv(dataset)
            for idx, org_row in predict_rows_df.iterrows():
                new_row = json.loads(org_row.to_json())
                new_row["idx"] = len(predict_rows) + 1
                predict_rows.append(new_row)
            # end of building predict rows

            publish_node["body"]["apply_scaler"] = True
            publish_node["body"]["predict_rows"] = pd.DataFrame(
                predict_rows).to_json()
        # end of validating

        publish_node["body"]["ml_type"] = \
            publish_node["body"]["manifest"]["ml_type"]

        log.debug(("NEXCORE - ssl={} exchange={} routing_key={}").format(
            settings.ANTINEX_SSL_OPTIONS, settings.ANTINEX_EXCHANGE_NAME,
            settings.ANTINEX_ROUTING_KEY))

        try:
            if settings.ANTINEX_WORKER_SSL_ENABLED:
                log.debug("connecting with ssl")
                conn = Connection(settings.ANTINEX_AUTH_URL,
                                  login_method="EXTERNAL",
                                  ssl=settings.ANTINEX_SSL_OPTIONS)
            else:
                log.debug("connecting without ssl")
                conn = Connection(settings.ANTINEX_AUTH_URL)
            # end of connecting

            conn.connect()

            log.debug("getting channel")
            channel = conn.channel()

            core_exchange = Exchange(settings.ANTINEX_EXCHANGE_NAME,
                                     type=settings.ANTINEX_EXCHANGE_TYPE,
                                     durable=True)

            log.debug("creating producer")
            producer = Producer(channel=channel,
                                auto_declare=True,
                                serializer="json")

            try:
                log.debug("declaring exchange")
                producer.declare()
            except Exception as k:
                log.error(("declare exchange failed with ex={}").format(k))
            # end of try to declare exchange which can fail if it exists

            core_queue = Queue(settings.ANTINEX_QUEUE_NAME,
                               core_exchange,
                               routing_key=settings.ANTINEX_ROUTING_KEY,
                               durable=True)

            try:
                log.debug("declaring queue")
                core_queue.maybe_bind(conn)
                core_queue.declare()
            except Exception as k:
                log.error(("declare queue={} routing_key={} failed with ex={}"
                           ).format(settings.ANTINEX_QUEUE_NAME,
                                    settings.ANTINEX_ROUTING_KEY, k))
            # end of try to declare queue which can fail if it exists

            log.info(
                ("publishing exchange={} routing_key={} persist={}").format(
                    core_exchange.name, settings.ANTINEX_ROUTING_KEY,
                    settings.ANTINEX_DELIVERY_MODE))

            producer.publish(body=publish_node["body"],
                             exchange=core_exchange.name,
                             routing_key=settings.ANTINEX_ROUTING_KEY,
                             auto_declare=True,
                             serializer="json",
                             delivery_mode=settings.ANTINEX_DELIVERY_MODE)

        except Exception as e:
            log.info(("Failed to publish to core req={} with ex={}").format(
                publish_node, e))
        # try/ex

        if conn:
            conn.release()

        log.info(("task_publish_to_core - done"))
    else:
        log.debug("core - disabled")
    # publish to the core if enabled

    return None
Example #52
0
from flask import Flask
from flask_socketio import SocketIO
from flask_cors import CORS

app = Flask(__name__)
app.config['SECRET_KEY'] = 'super-secret'
CORS(app, resources={r"/*": {"origins": "*"}})
socketio = SocketIO(app)

hub = Hub()
exchange = Exchange('jobs')
queue = Queue('task:complete', exchange, 'task:complete')
conn = Connection('amqp://')
conn.register_with_event_loop(hub)
producer = Producer(conn)


@socketio.on("task:start")
def task_start(data):
    print('in socket:task:start')
    socketio.emit('task:started', "Hang tight, working in it.")
    producer.publish('Process this.',
                     exchange=exchange,
                     routing_key='task:start')


# If I use threading instead of eventlet, then socketio.emit is tied to a different thread and the client never receives
def task_complete(message):
    print('in rabbit:task:complete')
    socketio.emit('task:complete', "All done, here you go: ___")
Example #53
0
    s.connect(('8.8.8.8', 1))  # connect() for UDP doesn't send packets
    local_ip_address = s.getsockname()[0]
    result['LocalIP'] = local_ip_address
    '''
    with open(config_file_name) as f:
        for line in f.readlines():
            key, value = line.split('=')
            result[key] = value               
    '''
    return result


argument_list = sys.argv

url = argument_list[1]
exchange_name = 'exchange-server-stats'
queue_name = 'server-stats-queue'
#config_file_name = argument_list[4]
rabbit_url = "amqp://*****:*****@" + url + ":5672/"

conn = Connection(rabbit_url)
channel = conn.channel()
exchange = Exchange(exchange_name, type="direct")
producer = Producer(exchange=exchange, channel=channel, routing_key='Stats123')
queue = Queue(name=queue_name, exchange=exchange, routing_key='Stats123')
queue.maybe_bind(conn)
while True:
    msg = get_server_stats()
    print(msg)
    producer.publish(msg)
    time.sleep(60)
 def __init__(self, connection, logger):
     from gooutsafe.comm import conf
     self.logger = logger
     self.connection = connection
     self.queues = [Queue(conf['RESERVATION_WORKER_QUEUE_NAME'])]
     self.producer = Producer(Connection(conf['RABBIT_MQ_URL']))
Example #55
0
 def parse_bgpstreamhist_csvs(self):
     with Connection(RABBITMQ_URI) as connection:
         self.update_exchange = create_exchange(
             "bgp-update", connection, declare=True
         )
         producer = Producer(connection)
         validator = mformat_validator()
         for csv_file in glob.glob("{}/*.csv".format(self.input_dir)):
             try:
                 with open(csv_file, "r") as f:
                     csv_reader = csv.reader(f, delimiter="|")
                     for row in csv_reader:
                         try:
                             if len(row) != 9:
                                 continue
                             if row[0].startswith("#"):
                                 continue
                             # example row: 139.91.0.0/16|8522|1403|1403 6461 2603 21320
                             # 5408
                             # 8522|routeviews|route-views2|A|"[{""asn"":1403,""value"":6461}]"|1517446677
                             this_prefix = row[0]
                             if row[6] == "A":
                                 as_path = row[3].split(" ")
                                 communities = json.loads(row[7])
                             else:
                                 as_path = []
                                 communities = []
                             service = "historical|{}|{}".format(row[4], row[5])
                             type_ = row[6]
                             timestamp = float(row[8])
                             peer_asn = int(row[2])
                             for prefix in self.prefixes:
                                 try:
                                     base_ip, mask_length = this_prefix.split("/")
                                     our_prefix = IPNetwork(prefix)
                                     if (
                                         IPAddress(base_ip) in our_prefix
                                         and int(mask_length) >= our_prefix.prefixlen
                                     ):
                                         msg = {
                                             "type": type_,
                                             "timestamp": timestamp,
                                             "path": as_path,
                                             "service": service,
                                             "communities": communities,
                                             "prefix": this_prefix,
                                             "peer_asn": peer_asn,
                                         }
                                         try:
                                             if validator.validate(msg):
                                                 msgs = normalize_msg_path(msg)
                                                 for msg in msgs:
                                                     key_generator(msg)
                                                     log.debug(msg)
                                                     producer.publish(
                                                         msg,
                                                         exchange=self.update_exchange,
                                                         routing_key="update",
                                                         serializer="ujson",
                                                     )
                                                     time.sleep(0.01)
                                             else:
                                                 log.warning(
                                                     "Invalid format message: {}".format(
                                                         msg
                                                     )
                                                 )
                                         except BaseException:
                                             log.exception(
                                                 "Error when normalizing BGP message: {}".format(
                                                     msg
                                                 )
                                             )
                                         break
                                 except Exception:
                                     log.exception("prefix")
                         except Exception:
                             log.exception("row")
             except Exception:
                 log.exception("exception")
 def provide(self, conn: Connection) -> Producer:
     channel = conn.channel()
     producer = Producer(channel)
     return producer
Example #57
0
import logging

# Celery configuration
CELERY_BROKER_URL = 'amqp://*****:*****@rabbit:5672/'
CELERY_RESULT_BACKEND = 'rpc://'

# Initialize Celery
celery = Celery('workerB',
                broker=CELERY_BROKER_URL,
                backend=CELERY_RESULT_BACKEND)

# Kombu Connection to message broker
kombuConnection = Connection('amqp://*****:*****@rabbit:5672//')

# Initialization of Producer
producer = Producer(kombuConnection, auto_declare=True)


# Kombu error callback
def errback(exc, interval):
    logging.error('Error: %r', exc, exc_info=1)
    logging.error('Retries in second', interval)


# Ensuring broker Connection
publish = kombuConnection.ensure(producer,
                                 producer.publish,
                                 errback=errback,
                                 max_retries=3)

workerBjobs = WorkerBJobs(publish)
Example #58
0
 def exabgp_msg(bgp_message):
     redis.set(
         "exabgp_seen_bgp_update",
         "1",
         ex=int(
             os.getenv(
                 "MON_TIMEOUT_LAST_BGP_UPDATE",
                 DEFAULT_MON_TIMEOUT_LAST_BGP_UPDATE,
             )),
     )
     msg = {
         "type": bgp_message["type"],
         "communities": bgp_message.get("communities", []),
         "timestamp": float(bgp_message["timestamp"]),
         "path": bgp_message.get("path", []),
         "service": "exabgp|{}".format(self.host),
         "prefix": bgp_message["prefix"],
         "peer_asn": int(bgp_message["peer_asn"]),
     }
     for prefix in self.prefixes:
         try:
             base_ip, mask_length = bgp_message["prefix"].split(
                 "/")
             our_prefix = IPNetwork(prefix)
             if (IPAddress(base_ip) in our_prefix and
                     int(mask_length) >= our_prefix.prefixlen):
                 try:
                     if validator.validate(msg):
                         msgs = normalize_msg_path(msg)
                         for msg in msgs:
                             key_generator(msg)
                             log.debug(msg)
                             if self.autoconf:
                                 # thread-safe access to update dict
                                 lock.acquire()
                                 try:
                                     if self.learn_neighbors:
                                         msg["learn_neighbors"] = True
                                     self.autoconf_updates[
                                         msg["key"]] = msg
                                     # mark the autoconf BGP updates for configuration
                                     # processing in redis
                                     redis_pipeline = redis.pipeline(
                                     )
                                     redis_pipeline.sadd(
                                         "autoconf-update-keys-to-process",
                                         msg["key"],
                                     )
                                     redis_pipeline.execute()
                                 except Exception:
                                     log.exception("exception")
                                 finally:
                                     lock.release()
                             else:
                                 with Producer(connection
                                               ) as producer:
                                     producer.publish(
                                         msg,
                                         exchange=self.
                                         update_exchange,
                                         routing_key="update",
                                         serializer="ujson",
                                     )
                     else:
                         log.warning(
                             "Invalid format message: {}".
                             format(msg))
                 except BaseException:
                     log.exception(
                         "Error when normalizing BGP message: {}"
                         .format(msg))
                 break
         except Exception:
             log.exception("exception")