def Connect():
    conn = Connection(host=hostname,
                      port=port,
                      userid=user,
                      password=password,
                      virtual_host=vhost)
    channel = conn.channel()
    channel.exchange_declare(exchange_name, 'topic')

    while True:
        s = r.grp['state']

        for x in s:
            arg_body = """Routing Key[%s] ==================================>
XKRX-CS-KR-000252,13:30:48.023942,7,290.9,123.19,90.82,79.62,937.15
XKRX-CS-KR-000253,13:30:48.024171,7,28.84,93.29,67.13,234.64,149.7
""" % x
            arg_rky = s[x]
            print arg_rky, arg_body
            channel.basic_publish(arg_body.replace("KR", x), exchange_name,
                                  arg_rky)

        time.sleep(5)

    channel.close()
    conn.close()
class test_Delete(unittest.TestCase):

    def setUp(self):
        self.connection = Connection(host="localhost:5672", userid="guest",
                                     password="******", virtual_host="/")
        self.channel = self.connection.channel()
        self.TEST_QUEUE = "pyrabbitmq.testq2"

    def test_delete(self):
        """Test that we can declare a channel delete it, and then declare with
        different properties"""

        res = self.channel.exchange_declare(self.TEST_QUEUE, "direct")
        res =self.channel.queue_declare(self.TEST_QUEUE)
        res = self.channel.queue_bind(self.TEST_QUEUE, self.TEST_QUEUE,
                                self.TEST_QUEUE)

        # Delete the queue
        self.channel.queue_delete(self.TEST_QUEUE)

        # Declare it again
        x = self.channel.queue_declare(self.TEST_QUEUE, durable=True)
        self.assertIn("message_count", x)
        self.assertIn("consumer_count", x)
        self.assertEqual(x["queue"], self.TEST_QUEUE)

        self.channel.queue_delete(self.TEST_QUEUE)

    def test_delete_empty(self):
        """Test that the queue doesn't get deleted if it is not empty"""
        self.channel.exchange_declare(self.TEST_QUEUE, "direct")
        self.channel.queue_declare(self.TEST_QUEUE)
        self.channel.queue_bind(self.TEST_QUEUE, self.TEST_QUEUE,
                                self.TEST_QUEUE)

        message = Message("the quick brown fox jumps over the lazy dog",
                          properties=dict(content_type="application/json",
                                          content_encoding="utf-8"))

        self.channel.basic_publish(message, self.TEST_QUEUE, self.TEST_QUEUE)

        self.assertRaises(ChannelError, self.channel.queue_delete,
                          self.TEST_QUEUE, if_empty=True)
        #We need to make a new channel after a ChannelError
        self.channel = self.connection.channel()

        x = self.channel.basic_get(self.TEST_QUEUE)
        self.assertTrue(x.body)

        self.channel.queue_delete(self.TEST_QUEUE, if_empty=True)

    def tearDown(self):
        if self.channel:
            self.channel.queue_purge(TEST_QUEUE)
            self.channel.close()
        if self.connection:
            try:
                self.connection.close()
            except ConnectionError:
                pass
class StateService(object):
	def __init__(self, routingKey):
		self.routingKey= routingKey
		self.conn = None
		self.channel = None

	def run(self):
		self.conn = Connection(host=hostname, port=port, userid=user, password=password, virtual_host=vhost)
		self.channel = self.conn.channel()
		self.channel.exchange_declare(exchange_name, 'topic')
		queueId = self.channel.queue_declare( exclusive = True ).queue
		self.channel.queue_bind(queueId, exchange_name, self.routingKey)
		try:
		    self.channel.basic_consume(queueId, callback=self.callback_rdm)
		except KeyboardInterrupt:
		    self.channel.close()
		    self.conn.close()

		while True:
			self.conn.drain_events()


	def callback_rdm(self, message):
			#print("Body:'%s', Proeprties:'%s', DeliveryInfo:'%s'" % ( message.body, message.properties, message.delivery_info))
			print message.body
			#message.ack()
			#channel.basic_publish(message.body, exchange_name, arg_rky)

	def close(self):
		self.conn.close()
Example #4
0
def Connect():
	conn = Connection(host=hostname, port=port, userid=user, password=password, virtual_host=vhost)
	channel = conn.channel()
	channel.exchange_declare(exchange_name, 'topic')

	while True:
		s = r.grp['state']

		for x in s:
			arg_body = """Routing Key[%s] ==================================>
XKRX-CS-KR-000252,13:30:48.023942,7,290.9,123.19,90.82,79.62,937.15
XKRX-CS-KR-000253,13:30:48.024171,7,28.84,93.29,67.13,234.64,149.7
XKRX-CS-KR-000254,13:30:48.024337,7,248.17,118.49,1489.54,118.45,117.42
XKRX-CS-KR-000255,13:30:48.024497,7,70.67,170.82,65.45,152.11,420.7
XKRX-CS-KR-000256,13:30:48.034801,7,160.74,82.36,260.87,104.42,384.35
XKRX-CS-KR-000257,13:30:48.034973,7,123.39,150.31,60.78,201.21,181.55
XKRX-CS-KR-000100,13:30:48.035137,8,166.66,87.45,252.83,82.03,44.02
XKRX-CS-KR-000101,13:30:48.045434,8,114.86,1023.0,37.92,65.76,61.82
XKRX-CS-KR-000102,13:30:48.045586,8,159.16,97.96,60.07,75.29,690.15
XKRX-CS-KR-000103,13:30:48.045730,8,23.52,133.91,44.0,107.83,533.96
XKRX-CS-KR-000104,13:30:48.045901,8,76.62,274.25,166.57,116.48,149.1
XKRX-CS-KR-000250,13:30:48.056203,8,105.32,254.87,158.97,21.0,59.72
XKRX-CS-KR-000251,13:30:48.056364,8,192.7,226.26,76.02,72.7,40.53
XKRX-CS-KR-000252,13:30:48.056520,8,138.58,138.76,89.68,41.78,175.83
XKRX-CS-KR-000253,13:30:48.066883,8,88.67,41.84,126.81,222.26,8.98
XKRX-CS-KR-000254,13:30:48.067103,8,156.14,126.11,46.24,24.03,57.94
XKRX-CS-KR-000255,13:30:48.067259,8,136.01,35.25,25.29,275.88,50.33
XKRX-CS-KR-000256,13:30:48.067416,8,136.89,10.51,197.03,200.62,238.65
XKRX-CS-KR-000257,13:30:48.077776,8,47.36,41.77,101.75,105.99,64.56
XKRX-CS-KR-000100,13:30:48.078006,9,26.76,231.9,104.19,117.87,24.69
XKRX-CS-KR-000101,13:30:48.078187,9,57.14,84.92,73.62,33.72,47.86
XKRX-CS-KR-000102,13:30:48.088561,9,21.85,120.6,538.69,58.24,1685.93
XKRX-CS-KR-000103,13:30:48.088819,9,450.32,417.01,210.68,121.41,27.18
XKRX-CS-KR-000104,13:30:48.088998,9,80.61,69.15,132.51,98.67,226.2
XKRX-CS-KR-000250,13:30:48.089161,9,107.44,11.22,80.1,85.93,125.1
XKRX-CS-KR-000251,13:30:48.099518,9,43.86,51.79,282.43,101.35,946.29
XKRX-CS-KR-000252,13:30:48.099705,9,170.75,242.6,74.15,323.43,28.48
XKRX-CS-KR-000253,13:30:48.099871,9,53.27,36.47,81.75,50.96,46.73
XKRX-CS-KR-000254,13:30:48.110195,9,136.93,17.66,77.64,253.57,66.8
XKRX-CS-KR-000255,13:30:48.110408,9,65.49,72.59,39.59,63.07,74.31
XKRX-CS-KR-000256,13:30:48.110575,9,63.16,44.29,36.04,119.36,21.78
XKRX-CS-KR-000257,13:30:48.110733,9,125.17,54.65,374.91,219.27,136.63
""" % x
			arg_rky = s[x]
			print arg_rky, arg_body
			channel.basic_publish(arg_body.replace("KR", x), exchange_name, arg_rky)
		
		#ime.sleep(0.1)

	channel.close()
	conn.close()
Example #5
0
class StateService(object):
    def __init__(self, csGroup, routingKey):
        self.csGroup = csGroup
        self.routingKey= routingKey
        self.conn = None
        self.channel = None

    def run(self):
        
        #arg_nam = sys.argv[1]
        #arg_rky = sys.argv[2]
        #arg_body = sys.argv[2]
        self.conn = Connection(host=hostname, port=port, userid=user, password=password, virtual_host=vhost)
        self.channel = self.conn.channel()
        self.channel.exchange_declare(exchange_name, 'topic')
        queueId = self.channel.queue_declare( exclusive = True ).queue
        self.channel.queue_bind(queueId, exchange_name, self.routingKey)
        self.channel.basic_consume(queueId, callback=self.callback)
        
        """
        try:
            self.channel.basic_consume(queueId, callback=self.callback)
        except KeyboardInterrupt:
            self.channel.close()
            self.conn.close()
        """
        while True:
            self.conn.drain_events()



    def callback(self, message):
        #print("Body:'%s', Proeprties:'%s', DeliveryInfo:'%s'" % ( message.body, message.properties, message.delivery_info))
        print message.body
        self.channel.basic_publish(message.body, exchange_name, self.csGroup)
        #message.ack()
        #message.reject()


    def close(self):
        self.conn.close()
Example #6
0
class test_Channel(unittest.TestCase):

    def setUp(self):
        self.connection = Connection(host='localhost:5672', userid='guest',
                                     password='******', virtual_host='/')
        self.channel = self.connection.channel()
        self._queue_declare()

    def test_send_message(self):
        message = Message(
            'the quick brown fox jumps over the lazy dog',
            properties={
                'content_type': 'application/json',
                'content_encoding': 'utf-8',
            },
        )
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.assertGreater(self.channel.queue_purge(TEST_QUEUE), 2)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

    def _queue_declare(self):
        self.channel.exchange_declare(TEST_QUEUE, 'direct')
        x = self.channel.queue_declare(TEST_QUEUE)
        self.assertIn('message_count', x)
        self.assertIn('consumer_count', x)
        self.assertEqual(x['queue'], TEST_QUEUE)
        self.channel.queue_bind(TEST_QUEUE, TEST_QUEUE, TEST_QUEUE)

    def test_basic_get_ack(self):
        message = Message(
            'the quick brown fox jumps over the lazy dog',
            properties={
                'content_type': 'application/json',
                'content_encoding': 'utf-8',
            },
        )
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        while True:
            x = self.channel.basic_get(TEST_QUEUE)
            if x:
                break
        self.assertIs(self.channel, x.channel)
        self.assertIn('message_count', x.delivery_info)
        self.assertIn('redelivered', x.delivery_info)
        self.assertEqual(x.delivery_info['routing_key'], TEST_QUEUE)
        self.assertEqual(x.delivery_info['exchange'], TEST_QUEUE)
        self.assertTrue(x.delivery_info['delivery_tag'])
        self.assertTrue(x.properties['content_type'])
        self.assertTrue(x.body)
        x.ack()

    def test_timeout_burst(self):
        """Check that if we have a large burst of messages in our queue
        that we can fetch them with a timeout without needing to receive
        any more messages."""

        message = Message('the quick brown fox jumps over the lazy dog',
                          properties=dict(content_type='application/json',
                                          content_encoding='utf-8'))

        for i in xrange(100):
            self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

        messages = []

        def cb(x):
            messages.append(x)
            x.ack()

        self.channel.basic_consume(TEST_QUEUE, callback=cb)
        for i in xrange(100):
            self.connection.drain_events(timeout=0.2)

        self.assertEquals(len(messages), 100)

    def test_timeout(self):
        """Check that our ``drain_events`` call actually times out if
        there are no messages."""
        message = Message('the quick brown fox jumps over the lazy dog',
                          properties=dict(content_type='application/json',
                                          content_encoding='utf-8'))

        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

        messages = []

        def cb(x):
            messages.append(x)
            x.ack()

        self.channel.basic_consume(TEST_QUEUE, callback=cb)
        self.connection.drain_events(timeout=0.1)

        self.assertRaises(
            socket.timeout, self.connection.drain_events, timeout=0.1,
        )
        self.assertEquals(len(messages), 1)

    def tearDown(self):
        if self.channel:
            self.channel.queue_purge(TEST_QUEUE)
            self.channel.close()
        if self.connection:
            try:
                self.connection.close()
            except ConnectionError:
                pass
Example #7
0
class test_Delete(unittest.TestCase):

    def setUp(self):
        self.connection = Connection(host='localhost:5672', userid='guest',
                                     password='******', virtual_host='/')
        self.channel = self.connection.channel()
        self.TEST_QUEUE = 'pyrabbitmq.testq2'
        self.channel.queue_delete(self.TEST_QUEUE)

    def test_delete(self):
        """Test that we can declare a channel delete it, and then declare with
        different properties"""

        self.channel.exchange_declare(self.TEST_QUEUE, 'direct')
        self.channel.queue_declare(self.TEST_QUEUE)
        self.channel.queue_bind(
            self.TEST_QUEUE, self.TEST_QUEUE, self.TEST_QUEUE,
        )

        # Delete the queue
        self.channel.queue_delete(self.TEST_QUEUE)

        # Declare it again
        x = self.channel.queue_declare(self.TEST_QUEUE, durable=True)
        self.assertEqual(x.queue, self.TEST_QUEUE)

        self.channel.queue_delete(self.TEST_QUEUE)

    def test_delete_empty(self):
        """Test that the queue doesn't get deleted if it is not empty"""
        self.channel.exchange_declare(self.TEST_QUEUE, 'direct')
        self.channel.queue_declare(self.TEST_QUEUE)
        self.channel.queue_bind(self.TEST_QUEUE, self.TEST_QUEUE,
                                self.TEST_QUEUE)

        message = Message('the quick brown fox jumps over the lazy dog',
                          properties=dict(content_type='application/json',
                                          content_encoding='utf-8'))

        self.channel.basic_publish(message, self.TEST_QUEUE, self.TEST_QUEUE)

        with self.assertRaises(ChannelError):
            self.channel.queue_delete(self.TEST_QUEUE, if_empty=True)

        # We need to make a new channel after a ChannelError
        self.channel = self.connection.channel()

        x = self.channel.basic_get(self.TEST_QUEUE)
        self.assertTrue(x.body)

        self.channel.queue_delete(self.TEST_QUEUE, if_empty=True)

    def tearDown(self):
        if self.channel and self.connection.connected:
            self.channel.queue_purge(TEST_QUEUE)
            self.channel.close()
        if self.connection:
            try:
                self.connection.close()
            except ConnectionError:
                pass
Example #8
0
class RabbitHelper(object):
    def __init__(self, mq_server = None, virtual_host = cfg.CB_CLUSTER_TAG):

        if mq_server == None:
            mq_server = cfg.RABBITMQ_IP

        self.connection = Connection(host= mq_server, userid="guest", password="******", virtual_host = virtual_host)
        self.manager = Client(mq_server+":55672", "guest", "guest")


    def declare(self, queue = None, durable = True):
        res = None
        channel = self.connection.channel()
        if queue:
            if not isinstance(queue,str): queue = str(queue)
            res = channel.queue_declare(queue = queue, durable = durable, auto_delete = True)
        else:
            # tmp queue
            res = channel.queue_declare(exclusive = True)

        channel.close()
        return res


    def exchange_declare(self, exchange, type_='direct'):
        channel = self.connection.channel()
        channel.exchange_declare(exchange = exchange,
                                type=type_)
        channel.close()

    def bind(self, exchange, queue):
        channel = self.connection.channel()
        channel.queue_bind(exchange = exchange, queue = queue)
        channel.close()

    def delete(self, queue):
        channel = self.connection.channel()
        if not isinstance(queue,str): queue = str(queue)
        channel.queue_delete(queue=queue)
        channel.close()

    def purge(self, queue):
        channel = self.connection.channel()
        if not isinstance(queue,str): queue = str(queue)
        channel.queue_purge(queue=queue)
        channel.close()

    def channel(self):
        return  self.connection.channel(), self.connection


    def qsize(self, queue):
        size = 0
        if queue != None:

            if not isinstance(queue,str): queue = str(queue)

            response = self.declare(queue = queue)
            size = response[1]

        return size

    def broadcastMsg(self, routing_key, body):
        channel = self.connection.channel()
        rc = channel.basic_publish(exchange = '', routing_key = routing_key,  body = body)
        channel.close()

    def getExchange(self, vhost, exchange):
        return self.manager.get_exchange(vhost, exchange)

    def numExchangeQueues(self, vhost, exchange):

        try:
          ex = self.getExchange(vhost, exchange)
          return len(ex['outgoing'])
        except Exception:
          return 1 # todo: sometimes the broker doesn't return expected response


    def putMsg(self, routing_key, body, exchange = ''):

        channel = self.connection.channel()
        if not isinstance(routing_key, str): routing_key= str(routing_key)

        rc = channel.basic_publish(exchange = exchange,
                                   routing_key = routing_key,
                                   body = body)
        channel.close()


    def getMsg(self, queue, no_ack = False, requeue = False):

        channel = self.connection.channel()
        message = channel.basic_get(queue = queue)
        body = None

        if message is not None:
            body = message.body
            # Handle data receipt acknowldegement
            if no_ack == False:
               message.ack()

            if requeue:
                self.putMsg(queue, body)

        channel.close()
        return body

    def getJsonMsg(self, queue, no_ack = False, requeue = False):

        msg = self.getMsg(queue, no_ack, requeue)
        body = {}
        if msg is not None:
            try:
                body = json.loads(msg)
            except ValueError:
                pass

        return body

    def close(self):
        self.connection.close()

    def __del__(self):
        self.close()
Example #9
0
class test_Delete(unittest.TestCase):
    def setUp(self):
        self.connection = Connection(host="localhost:5672",
                                     userid="guest",
                                     password="******",
                                     virtual_host="/")
        self.channel = self.connection.channel()
        self.TEST_QUEUE = "pyrabbitmq.testq2"

    def test_delete(self):
        """Test that we can declare a channel delete it, and then declare with
        different properties"""

        res = self.channel.exchange_declare(self.TEST_QUEUE, "direct")
        res = self.channel.queue_declare(self.TEST_QUEUE)
        res = self.channel.queue_bind(self.TEST_QUEUE, self.TEST_QUEUE,
                                      self.TEST_QUEUE)

        # Delete the queue
        self.channel.queue_delete(self.TEST_QUEUE)

        # Declare it again
        x = self.channel.queue_declare(self.TEST_QUEUE, durable=True)
        self.assertIn("message_count", x)
        self.assertIn("consumer_count", x)
        self.assertEqual(x["queue"], self.TEST_QUEUE)

        self.channel.queue_delete(self.TEST_QUEUE)

    def test_delete_empty(self):
        """Test that the queue doesn't get deleted if it is not empty"""
        self.channel.exchange_declare(self.TEST_QUEUE, "direct")
        self.channel.queue_declare(self.TEST_QUEUE)
        self.channel.queue_bind(self.TEST_QUEUE, self.TEST_QUEUE,
                                self.TEST_QUEUE)

        message = Message("the quick brown fox jumps over the lazy dog",
                          properties=dict(content_type="application/json",
                                          content_encoding="utf-8"))

        self.channel.basic_publish(message, self.TEST_QUEUE, self.TEST_QUEUE)

        self.assertRaises(ChannelError,
                          self.channel.queue_delete,
                          self.TEST_QUEUE,
                          if_empty=True)
        #We need to make a new channel after a ChannelError
        self.channel = self.connection.channel()

        x = self.channel.basic_get(self.TEST_QUEUE)
        self.assertTrue(x.body)

        self.channel.queue_delete(self.TEST_QUEUE, if_empty=True)

    def tearDown(self):
        if self.channel:
            self.channel.queue_purge(TEST_QUEUE)
            self.channel.close()
        if self.connection:
            try:
                self.connection.close()
            except ConnectionError:
                pass
Example #10
0
class test_Channel(unittest.TestCase):

    def setUp(self):
        self.connection = Connection(host='localhost:5672', userid='guest',
                                     password='******', virtual_host='/')
        self.channel = self.connection.channel()
        self.channel.queue_delete(TEST_QUEUE)
        self._queue_declare()

    def test_send_message(self):
        message = Message(
            'the quick brown fox jumps over the lazy dog',
            properties=dict(content_type='application/json',
                            content_encoding='utf-8'))
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.assertGreater(self.channel.queue_purge(TEST_QUEUE), 2)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

    def _queue_declare(self):
        self.channel.exchange_declare(TEST_QUEUE, 'direct')
        x = self.channel.queue_declare(TEST_QUEUE)
        self.assertEqual(x.message_count, x[1])
        self.assertEqual(x.consumer_count, x[2])
        self.assertEqual(x.queue, TEST_QUEUE)
        self.channel.queue_bind(TEST_QUEUE, TEST_QUEUE, TEST_QUEUE)

    def test_basic_get_ack(self):
        message = Message(
            'the quick brown fox jumps over the lazy dog',
            properties=dict(content_type='application/json',
                            content_encoding='utf-8'))
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        while True:
            x = self.channel.basic_get(TEST_QUEUE)
            if x:
                break
        self.assertIs(self.channel, x.channel)
        self.assertIn('message_count', x.delivery_info)
        self.assertIn('redelivered', x.delivery_info)
        self.assertEqual(x.delivery_info['routing_key'], TEST_QUEUE)
        self.assertEqual(x.delivery_info['exchange'], TEST_QUEUE)
        self.assertTrue(x.delivery_info['delivery_tag'])
        self.assertTrue(x.properties['content_type'])
        self.assertTrue(x.body)
        x.ack()

    def test_timeout_burst(self):
        """Check that if we have a large burst of messages in our queue
        that we can fetch them with a timeout without needing to receive
        any more messages."""

        message = Message('the quick brown fox jumps over the lazy dog',
                          properties=dict(content_type='application/json',
                                          content_encoding='utf-8'))

        for i in xrange(100):
            self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

        messages = []

        def cb(x):
            messages.append(x)
            x.ack()

        self.channel.basic_consume(TEST_QUEUE, callback=cb)
        for i in xrange(100):
            self.connection.drain_events(timeout=0.2)

        self.assertEquals(len(messages), 100)

    def test_timeout(self):
        """Check that our ``drain_events`` call actually times out if
        there are no messages."""
        message = Message('the quick brown fox jumps over the lazy dog',
                          properties=dict(content_type='application/json',
                                          content_encoding='utf-8'))

        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

        messages = []

        def cb(x):
            messages.append(x)
            x.ack()

        self.channel.basic_consume(TEST_QUEUE, callback=cb)
        self.connection.drain_events(timeout=0.1)

        with self.assertRaises(socket.timeout):
            self.connection.drain_events(timeout=0.1)
        self.assertEquals(len(messages), 1)

    def tearDown(self):
        if self.channel and self.connection.connected:
            self.channel.queue_purge(TEST_QUEUE)
            self.channel.close()
        if self.connection:
            try:
                self.connection.close()
            except ConnectionError:
                pass
class RabbitMQ(object):  # pylint: disable=r0902,r0903
    """Manages our queue connection"""

    __name__ = 'RabbitMQ'

    def __init__(self, config_parser, queue_name=None):
        # Get our configs
        if queue_name is None:
            self.queue = config_parser.get('rabbitmq', 'queue')
        else:
            self.queue = queue_name
        self.host = config_parser.get('rabbitmq', 'host')
        self.port = int(config_parser.get('rabbitmq', 'port'))
        self.delivery_mode = int(config_parser.get('rabbitmq',
                                                   'delivery_mode'))
        buffer_size = int(config_parser.get('rabbitmq', 'buffer'))
        if config_parser.get('rabbitmq', 'durable').lower() == 'true':
            self.durable = True
        else:
            self.durable = False
        if config_parser.get('rabbitmq', 'auto_delete').lower() == 'true':
            self.auto_delete = True
        else:
            self.auto_delete = False
        if config_parser.get('rabbitmq', 'refresh').lower() == 'none':
            self.refresh = int(time.time()) + 31536000
        else:
            self.refresh = int(config_parser.get('rabbitmq', 'refresh'))

        # This is a buffer to manage messages that come in while RabbitMQ may
        # be unavailable
        self.buffer = Queue.Queue(maxsize=buffer_size)

        # And set up our connection
        self.connection = None
        self.channel = None
        self.last_connect = 0
        self.processed_count = 0
        self._connect()
        self._declare()

    def __del__(self):
        self._close()

    def __unicode__(self):
        return self.__str__()

    def __str__(self):
        return __name__

    @timeout_decorator.timeout(5)
    def _connect(self):
        """Creates our AMQP connection"""
        try:
            self.connection = Connection(host=self.host)
            self.channel = self.connection.channel()
            logging.info('Connected to server: ' + self.host)
            self.last_connect = int(time.time())
            return True
        except ConnectionError:
            logging.error('Unable to connect to server')
            return None

    def _close(self):
        """Closes the AMQP connection"""
        if self.connection is not None:
            self.connection.close()
        logging.debug('Closed connection')

    @timeout_decorator.timeout(5)
    def _declare(self):
        """Declares the queue used for receiving logs"""
        logging.debug('Declaring queue: ' + self.queue + ', durable = ' +
                      repr(self.durable) + ', auto_delete = ' +
                      repr(self.auto_delete))
        try:
            self.channel.queue_declare(queue=self.queue,
                                       durable=self.durable,
                                       auto_delete=self.auto_delete)
            return True
        except AttributeError:
            # We raise here as faliing to declare the queue is an immediate
            # show-stopper that things cannot neatly recover from.
            raise
        except ChannelError:
            # This can happen if we are attempting to redefine a queue with
            # settings that are not representative of those already present.
            logging.fatal('A ChannelError occurred. Make sure that you are ' +
                          'not attempting to change any existing queue '
                          'declarations.')
            logging.fatal(traceback.format_exc())
            raise

    def _refresh_connection(self):
        """
        This refreshes the AMQP connection after timeouts or DNS changes, which
        is an absolute must-have for load-balanced servers in an environment
        such as EC2 where the IP address of the host is not guaranteed to
        remain constant. RabbitMQ does not like network interruptions, so this
        is our attempt to handle them with a little bit of grace.
        """
        if int(time.time()) - self.last_connect > self.refresh:
            logging.info('Connection refresh threshold reached')
            self._close()
            result = self._connect()
            return result
        else:
            return True

    def publish(self, log):
        """Publishes a log entry into the queue."""
        logging.debug('Publishing to ' + self.queue + ', message: ' + str(log))
        connection_result = self._refresh_connection()
        logging.debug('connection_result: ' + str(connection_result))
        if connection_result is None:
            logging.info('Buffering log message in publish')
            self.buffer.put(log)
            return True
        try:
            # Exchanges are not implemented (yet)
            self.channel.basic_publish(exchange='',
                                       routing_key=self.queue,
                                       body=log,
                                       delivery_mode=self.delivery_mode)
            self.processed_count = self.processed_count + 1
            return True
        except ConnectionError:
            # This may happen if the backend server has gone away. We call
            # _refresh_connection() to try and get it back. If this fails then
            # the message will be dropped. In the future it would be nice to
            # have an internal buffer for keeping these messages until the
            # queue comes back.
            logging.debug(
                'A ConnectionError was thrown when attempting to publish a ' +
                'message.')
            self.last_connect = 0
            raise
Example #12
0
class test_Channel(unittest.TestCase):
    def setUp(self):
        self.connection = Connection(host="localhost:5672",
                                     userid="guest",
                                     password="******",
                                     virtual_host="/")
        self.channel = self.connection.channel()
        self._queue_declare()

    def test_send_message(self):
        message = Message("the quick brown fox jumps over the lazy dog",
                          properties=dict(content_type="application/json",
                                          content_encoding="utf-8"))
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.assertGreater(self.channel.queue_purge(TEST_QUEUE), 2)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

    def _queue_declare(self):
        self.channel.exchange_declare(TEST_QUEUE, "direct")
        x = self.channel.queue_declare(TEST_QUEUE)
        self.assertIn("message_count", x)
        self.assertIn("consumer_count", x)
        self.assertEqual(x["queue"], TEST_QUEUE)
        self.channel.queue_bind(TEST_QUEUE, TEST_QUEUE, TEST_QUEUE)

    def test_basic_get_ack(self):
        message = Message("the quick brown fox jumps over the lazy dog",
                          properties=dict(content_type="application/json",
                                          content_encoding="utf-8"))
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        while True:
            x = self.channel.basic_get(TEST_QUEUE)
            if x:
                break
        self.assertIs(self.channel, x.channel)
        self.assertIn("message_count", x.delivery_info)
        self.assertIn("redelivered", x.delivery_info)
        self.assertEqual(x.delivery_info["routing_key"], TEST_QUEUE)
        self.assertEqual(x.delivery_info["exchange"], TEST_QUEUE)
        self.assertTrue(x.delivery_info["delivery_tag"])
        self.assertTrue(x.properties["content_type"])
        self.assertTrue(x.body)
        x.ack()

    def test_timeout_burst(self):
        """Check that if we have a large burst of messages in our queue
        that we can fetch them with a timeout without needing to receive
        any more messages."""

        message = Message("the quick brown fox jumps over the lazy dog",
                          properties=dict(content_type="application/json",
                                          content_encoding="utf-8"))

        for i in xrange(100):
            self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

        messages = []

        def cb(x):
            messages.append(x)
            x.ack()

        self.channel.basic_consume(TEST_QUEUE, callback=cb)
        for i in xrange(100):
            self.connection.drain_events(timeout=0.2)

        self.assertEquals(len(messages), 100)

    def test_timeout(self):
        """Check that our ``drain_events`` call actually times out if
        there are no messages."""
        message = Message("the quick brown fox jumps over the lazy dog",
                          properties=dict(content_type="application/json",
                                          content_encoding="utf-8"))

        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

        messages = []

        def cb(x):
            messages.append(x)
            x.ack()

        self.channel.basic_consume(TEST_QUEUE, callback=cb)
        self.connection.drain_events(timeout=0.1)

        self.assertRaises(socket.timeout,
                          self.connection.drain_events,
                          timeout=0.1)
        self.assertEquals(len(messages), 1)

    def tearDown(self):
        if self.channel:
            self.channel.queue_purge(TEST_QUEUE)
            self.channel.close()
        if self.connection:
            try:
                self.connection.close()
            except ConnectionError:
                pass
Example #13
0
class Collector(object):
    def __init__(self, index):
        self.index = index
        self.cancelled = False
        self.transport = self.protocol = None
        self.messages = []
        self.block_size = 1000
        self.exchange = "logstash"
        self.current_date = None
        self.logger = getLogger('alco.collector.%s' % self.index.name)
        self.amqp = self.redis = self.conn = self.vhost = self.rabbit = None
        self.insert_thread = None
        self.query_queue = queue.Queue()
        self.result_queue = queue.Queue()
        self.result_queue.put(None)
        self.query = self.values_stub = None
        self.existing = self.included = self.indexed = self.filtered = None

    def cancel(self):
        self.cancelled = True

    def inserter_loop(self):
        self.conn = connections[ALCO_SETTINGS['SPHINX_DATABASE_NAME']]
        while not self.cancelled:
            try:
                query, args = self.query_queue.get(block=True, timeout=1)
            except queue.Empty:
                continue
            result = self.insert_data(query, args)
            self.result_queue.put(result)

    def connect(self):
        connections['default'].close()
        rabbitmq = ALCO_SETTINGS['RABBITMQ']
        self.amqp = Connection(**rabbitmq)
        self.redis = redis.Redis(**ALCO_SETTINGS['REDIS'])
        self.insert_thread = Thread(target=self.inserter_loop)
        self.insert_thread.start()
        hostname = '%s:%s' % (rabbitmq['host'],
                              ALCO_SETTINGS['RABBITMQ_API_PORT'])
        self.rabbit = Client(hostname, rabbitmq['userid'],
                             rabbitmq['password'])
        self.vhost = rabbitmq['virtual_host']

    # noinspection PyUnusedLocal
    def process_sigint(self, signum, frame):
        self.logger.info("Got signal %s" % signum)
        self.cancel()
        self.logger.info("Futures cancelled, wait for thread")
        self.insert_thread.join()
        self.logger.info("Thread done")

    def __call__(self):
        signal.signal(signal.SIGINT, self.process_sigint)
        signal.signal(signal.SIGTERM, self.process_sigint)

        try:
            self.logger.debug("Connecting to RabbitMQ")
            self.connect()
            self.declare_queue()
            self.cleanup_bindings()
            channel = self.amqp.channel()
            channel.basic_qos(0, 1000, False)
            channel.basic_consume(self.index.queue_name,
                                  callback=self.process_message,
                                  no_ack=True)
            start = time.time()
            self.logger.debug("Start processing messages")
            while not self.cancelled:
                try:
                    self.amqp.drain_events(timeout=1)
                except (socket.timeout, OSError):
                    pass
                if time.time() - start > 1:
                    self.push_messages()
                    start = time.time()
        except KeyboardInterrupt:
            self.logger.warning("Got SIGINT, exit(0)")
        finally:
            self.amqp.close()
            sys.exit(0)

    def process_message(self, msg):
        data = json.loads(six.binary_type(msg.body))
        ts = data.pop('@timestamp')
        data.pop("@version")
        msg = data.pop('message')
        seq = data.pop('seq', 0)
        dt = datetime.strptime(ts, "%Y-%m-%dT%H:%M:%S.%fZ")
        result = {
            'ts': time.mktime(dt.timetuple()),
            'ms': dt.microsecond,
            'seq': seq,
            'message': msg,
            'data': data
        }
        self.messages.append(result)
        d = dt.date()
        if not self.current_date:
            self.current_date = d
        if d != self.current_date:
            self.current_date = d
            self.push_messages()
        if len(self.messages) >= self.block_size:
            self.push_messages()

    def declare_queue(self):
        channel = self.amqp.channel()
        """:type channel: amqp.channel.Channel"""
        durable = self.index.durable
        channel.exchange_declare(exchange=self.exchange,
                                 type='topic',
                                 durable=durable,
                                 auto_delete=False)
        channel.queue_declare(self.index.queue_name,
                              durable=durable,
                              auto_delete=False)
        for rk in self.get_routing_keys():
            channel.queue_bind(self.index.queue_name,
                               exchange=self.exchange,
                               routing_key=rk)

    def get_routing_keys(self):
        return map(lambda x: x.strip(), self.index.routing_key.split(','))

    def cleanup_bindings(self):
        self.logger.debug("Checking bindings")
        queue = self.index.queue_name
        exchange = self.exchange
        bindings = self.rabbit.get_queue_bindings(self.vhost, queue)
        bindings = [b for b in bindings if b['source'] == exchange]
        allowed = self.get_routing_keys()
        q = six.moves.urllib.parse.quote
        for b in bindings:
            rk = b['routing_key']
            if rk in allowed:
                continue
            self.logger.debug("Unbind %s with RK=%s" % (queue, rk))
            self.rabbit.delete_binding(self.vhost, exchange, q(queue), q(rk))

    def push_messages(self):
        try:
            request_started.send(None, environ=None)
            self._push_messages()
        except Exception as e:
            self.logger.exception(e)
            raise
        finally:
            request_finished.send(None)

    def _push_messages(self):
        messages, self.messages = self.messages, []
        if not messages:
            return
        message_count = len(messages)
        self.logger.info("Saving %s events" % message_count)
        columns = defaultdict(set)
        suffix = self.current_date.strftime("%Y%m%d")
        name = "%s_%s" % (self.index.name, suffix)
        args = []
        self.load_index_columns()
        self.prepare_query(name)

        pkeys = self.get_primary_keys(messages)
        seen = set()

        for pk, data in zip(pkeys, messages):
            # saving seen columns to LoggerColumn model, collecting unique
            # values for caching in redis

            js = data['data']
            self.process_js_columns(js, columns, self.included, seen)
            js_str = json.dumps(js)
            values = tuple(js.get(c) or '' for c in self.indexed)
            args.extend((pk, js_str, data['message']) + values)

        query = self.query + ','.join([self.values_stub] * message_count)

        self.save_column_values(columns)

        self.save_new_columns(seen)
        if self.result_queue.empty():
            self.logger.debug("Insert still running, waiting")
            while not self.cancelled:
                try:
                    self.result_queue.get(block=True, timeout=1)
                except queue.Empty:
                    continue

        self.query_queue.put((query, args))

    def insert_data(self, query, args):
        self.logger.debug("Inserting logs to searchd")
        result = None
        for _ in 1, 2, 3:
            try:
                c = self.conn.cursor()
                result = c.execute(query, args)
                self.logger.debug("%s rows inserted" % c.rowcount)
                c.close()
            except ProgrammingError:
                self.logger.exception("Can't insert values to index: %s" %
                                      query)
            except DatabaseError as e:
                self.logger.exception("Sphinx connection error: %s" % e)
                try:
                    close_old_connections()
                except Exception as e:
                    self.logger.exception("Can't reconnect: %s" % e)
                    os.kill(os.getpid(), signal.SIGKILL)
            except Exception:
                self.logger.exception("Unhandled error in insert_data")
            else:
                return result
        self.logger.error("Can't insert data in 3 tries, exit process")
        os.kill(os.getpid(), signal.SIGKILL)

    def save_new_columns(self, seen):
        self.logger.debug("Check for new columns")
        for column in seen - set(self.existing):
            self.logger.debug("Register column %s" % column)
            self.index.loggercolumn_set.create(name=column)

    def save_column_values(self, columns):
        self.logger.debug("Saving values for filtered columns")
        ts = time.time()
        for column in self.filtered:
            values = columns.get(column)
            if not values:
                continue
            key = keys.KEY_COLUMN_VALUES.format(index=self.index.name,
                                                column=column)
            values = {v: ts for v in values}
            self.redis.zadd(key, **values)

    def prepare_query(self, name):
        if self.indexed:
            self.query = "REPLACE INTO %s (id, js, logline, %s) VALUES " % (
                name, ', '.join(self.indexed))
        else:
            self.query = "REPLACE INTO %s (id, js, logline) VALUES " % name

        sql_col_count = len(self.indexed) + 3  # + jd, js, logline
        self.values_stub = "(%s)" % ", ".join(["%s"] * sql_col_count)

    def load_index_columns(self):
        # all defined columns
        all_columns = list(self.index.loggercolumn_set.all())
        included_columns = [c for c in all_columns if not c.excluded]
        filtered_columns = [c for c in included_columns if c.filtered]
        indexed_columns = [c for c in included_columns if c.indexed]
        self.existing = [c.name for c in all_columns]
        self.included = [c.name for c in included_columns]
        self.filtered = [c.name for c in filtered_columns]
        self.indexed = [c.name for c in indexed_columns]

    @staticmethod
    def process_js_columns(js, columns, included, seen):
        for key, value in list(js.items()):
            if key in ('pk', 'id', 'ts', 'ms', 'seq', 'model'):
                # escape fields reserved by Django and ALCO
                js['%s_x' % key] = js.pop(key)
                key = '%s_x' % key
            # save seen columns set
            if key not in seen:
                seen.add(key)
            if key not in included:
                # discard fields excluded from indexing
                js.pop(key)
                continue
            # save column values set
            if type(value) not in (bool, int, float, six.text_type):
                continue
            if value not in columns[key]:
                columns[key].add(value)

    def get_primary_keys(self, messages):
        """ Generate PK sequence for a list of messages."""
        pkeys = []
        pk = None
        for msg in messages:
            # pk is [timestamp][microseconds][randint] in 10based integer
            pk = int((msg['ts'] * 10**6 + msg['ms']) * 1000) + randint(0, 1000)
            pkeys.append(pk)
        self.logger.debug("first pk is %s" % pk)
        return pkeys
Example #14
0
class Collector(object):

    def __init__(self, index):
        self.index = index
        self.cancelled = False
        self.transport = self.protocol = None
        self.messages = []
        self.block_size = 1000
        self.exchange = "logstash"
        self.current_date = None
        self.logger = getLogger('alco.collector.%s' % self.index.name)
        self.amqp = self.redis = self.conn = self.vhost = self.rabbit = None
        self.insert_thread = None
        self.query_queue = queue.Queue()
        self.result_queue = queue.Queue()
        self.result_queue.put(None)
        self.query = self.values_stub = None
        self.existing = self.included = self.indexed = self.filtered = None

    def cancel(self):
        self.cancelled = True

    def inserter_loop(self):
        self.conn = connections[ALCO_SETTINGS['SPHINX_DATABASE_NAME']]
        while not self.cancelled:
            try:
                query, args = self.query_queue.get(block=True, timeout=1)
            except queue.Empty:
                continue
            result = self.insert_data(query, args)
            self.result_queue.put(result)

    def connect(self):
        connections['default'].close()
        rabbitmq = ALCO_SETTINGS['RABBITMQ']
        self.amqp = Connection(**rabbitmq)
        self.redis = redis.Redis(**ALCO_SETTINGS['REDIS'])
        self.insert_thread = Thread(target=self.inserter_loop)
        self.insert_thread.start()
        hostname = '%s:%s' % (rabbitmq['host'],
                              ALCO_SETTINGS['RABBITMQ_API_PORT'])
        self.rabbit = Client(hostname, rabbitmq['userid'], rabbitmq['password'])
        self.vhost = rabbitmq['virtual_host']

    # noinspection PyUnusedLocal
    def process_sigint(self, signum, frame):
        self.logger.info("Got signal %s" % signum)
        self.cancel()
        self.logger.info("Futures cancelled, wait for thread")
        self.insert_thread.join()
        self.logger.info("Thread done")

    def __call__(self):
        signal.signal(signal.SIGINT, self.process_sigint)
        signal.signal(signal.SIGTERM, self.process_sigint)

        try:
            self.logger.debug("Connecting to RabbitMQ")
            self.connect()
            self.declare_queue()
            self.cleanup_bindings()
            channel = self.amqp.channel()
            channel.basic_qos(0, 1000, False)
            channel.basic_consume(self.index.queue_name,
                                  callback=self.process_message, no_ack=True)
            start = time.time()
            self.logger.debug("Start processing messages")
            while not self.cancelled:
                try:
                    self.amqp.drain_events(timeout=1)
                except (socket.timeout, OSError):
                    pass
                if time.time() - start > 1:
                    self.push_messages()
                    start = time.time()
        except KeyboardInterrupt:
            self.logger.warning("Got SIGINT, exit(0)")
        finally:
            self.amqp.close()
            sys.exit(0)

    def process_message(self, msg):
        data = json.loads(six.binary_type(msg.body))
        ts = data.pop('@timestamp')
        data.pop("@version")
        msg = data.pop('message')
        seq = data.pop('seq', 0)
        dt = datetime.strptime(ts, "%Y-%m-%dT%H:%M:%S.%fZ")
        result = {
            'ts': time.mktime(dt.timetuple()),
            'ms': dt.microsecond,
            'seq': seq,
            'message': msg,
            'data': data
        }
        self.messages.append(result)
        d = dt.date()
        if not self.current_date:
            self.current_date = d
        if d != self.current_date:
            self.current_date = d
            self.push_messages()
        if len(self.messages) >= self.block_size:
            self.push_messages()

    def declare_queue(self):
        channel = self.amqp.channel()
        """:type channel: amqp.channel.Channel"""
        durable = self.index.durable
        channel.exchange_declare(exchange=self.exchange, type='topic',
                                 durable=durable, auto_delete=False)
        channel.queue_declare(self.index.queue_name, durable=durable,
                              auto_delete=False)
        for rk in self.get_routing_keys():
            channel.queue_bind(self.index.queue_name, exchange=self.exchange,
                               routing_key=rk)

    def get_routing_keys(self):
        return map(lambda x: x.strip(), self.index.routing_key.split(','))

    def cleanup_bindings(self):
        self.logger.debug("Checking bindings")
        queue = self.index.queue_name
        exchange = self.exchange
        bindings = self.rabbit.get_queue_bindings(self.vhost, queue)
        bindings = [b for b in bindings if b['source'] == exchange]
        allowed = self.get_routing_keys()
        q = six.moves.urllib.parse.quote
        for b in bindings:
            rk = b['routing_key']
            if rk in allowed:
                continue
            self.logger.debug("Unbind %s with RK=%s" % (queue, rk))
            self.rabbit.delete_binding(self.vhost, exchange, q(queue), q(rk))

    def push_messages(self):
        try:
            request_started.send(None, environ=None)
            self._push_messages()
        except Exception as e:
            self.logger.exception(e)
            raise
        finally:
            request_finished.send(None)

    def _push_messages(self):
        messages, self.messages = self.messages, []
        if not messages:
            return
        message_count = len(messages)
        self.logger.info("Saving %s events" % message_count)
        columns = defaultdict(set)
        suffix = self.current_date.strftime("%Y%m%d")
        name = "%s_%s" % (self.index.name, suffix)
        args = []
        self.load_index_columns()
        self.prepare_query(name)

        pkeys = self.get_primary_keys(messages)
        seen = set()

        for pk, data in zip(pkeys, messages):
            # saving seen columns to LoggerColumn model, collecting unique
            # values for caching in redis

            js = data['data']
            self.process_js_columns(js, columns, self.included, seen)
            js_str = json.dumps(js)
            values = tuple(js.get(c) or '' for c in self.indexed)
            args.extend((pk, js_str, data['message']) + values)

        query = self.query + ','.join([self.values_stub] * message_count)

        self.save_column_values(columns)

        self.save_new_columns(seen)
        if self.result_queue.empty():
            self.logger.debug("Insert still running, waiting")
            while not self.cancelled:
                try:
                    self.result_queue.get(block=True, timeout=1)
                except queue.Empty:
                    continue

        self.query_queue.put((query, args))

    def insert_data(self, query, args):
        self.logger.debug("Inserting logs to searchd")
        result = None
        for _ in 1, 2, 3:
            try:
                c = self.conn.cursor()
                result = c.execute(query, args)
                self.logger.debug("%s rows inserted" % c.rowcount)
                c.close()
            except ProgrammingError:
                self.logger.exception(
                    "Can't insert values to index: %s" % query)
            except DatabaseError as e:
                self.logger.exception("Sphinx connection error: %s" % e)
                try:
                    close_old_connections()
                except Exception as e:
                    self.logger.exception("Can't reconnect: %s" % e)
                    os.kill(os.getpid(), signal.SIGKILL)
            except Exception:
                self.logger.exception("Unhandled error in insert_data")
            else:
                return result
        self.logger.error("Can't insert data in 3 tries, exit process")
        os.kill(os.getpid(), signal.SIGKILL)

    def save_new_columns(self, seen):
        self.logger.debug("Check for new columns")
        for column in seen - set(self.existing):
            self.logger.debug("Register column %s" % column)
            self.index.loggercolumn_set.create(name=column)

    def save_column_values(self, columns):
        self.logger.debug("Saving values for filtered columns")
        ts = time.time()
        for column in self.filtered:
            values = columns.get(column)
            if not values:
                continue
            key = keys.KEY_COLUMN_VALUES.format(index=self.index.name,
                                                column=column)
            values = {v: ts for v in values}
            self.redis.zadd(key, **values)

    def prepare_query(self, name):
        if self.indexed:
            self.query = "REPLACE INTO %s (id, js, logline, %s) VALUES " % (
                name, ', '.join(self.indexed))
        else:
            self.query = "REPLACE INTO %s (id, js, logline) VALUES " % name

        sql_col_count = len(self.indexed) + 3  # + jd, js, logline
        self.values_stub = "(%s)" % ", ".join(["%s"] * sql_col_count)

    def load_index_columns(self):
        # all defined columns
        all_columns = list(self.index.loggercolumn_set.all())
        included_columns = [c for c in all_columns if not c.excluded]
        filtered_columns = [c for c in included_columns if c.filtered]
        indexed_columns = [c for c in included_columns if c.indexed]
        self.existing = [c.name for c in all_columns]
        self.included = [c.name for c in included_columns]
        self.filtered = [c.name for c in filtered_columns]
        self.indexed = [c.name for c in indexed_columns]

    @staticmethod
    def process_js_columns(js, columns, included, seen):
        for key, value in list(js.items()):
            if key in ('pk', 'id', 'ts', 'ms', 'seq', 'model'):
                # escape fields reserved by Django and ALCO
                js['%s_x' % key] = js.pop(key)
                key = '%s_x' % key
            # save seen columns set
            if key not in seen:
                seen.add(key)
            if key not in included:
                # discard fields excluded from indexing
                js.pop(key)
                continue
            # save column values set
            if type(value) not in (bool, int, float, six.text_type):
                continue
            if value not in columns[key]:
                columns[key].add(value)

    def get_primary_keys(self, messages):
        """ Generate PK sequence for a list of messages."""
        pkeys = []
        pk = None
        for msg in messages:
            # pk is [timestamp][microseconds][randint] in 10based integer
            pk = int((msg['ts'] * 10**6 + msg['ms']) * 1000) + randint(0, 1000)
            pkeys.append(pk)
        self.logger.debug("first pk is %s" % pk)
        return pkeys
Example #15
0
class RabbitHelper(object):
    def __init__(self, mq_server = None):

        if mq_server == None:
            mq_server = cfg.RABBITMQ_IP

        self.connection = Connection(host= mq_server, userid="guest", password="******", virtual_host=cfg.CB_CLUSTER_TAG)


    def declare(self, queue, durable = True):
        channel = self.connection.channel()
        if not isinstance(queue,str): queue = str(queue)
        res = channel.queue_declare(queue = queue, durable = durable, auto_delete = True)
        channel.close()
        return res


    def purge(self, queue):
        channel = self.connection.channel()
        if not isinstance(queue,str): queue = str(queue)
        channel.queue_delete(queue=queue)
        channel.close()


    def consume(self, callback, queue, no_ack = True):
        channel = self.connection.channel()
        channel.basic_consume(callback, queue = queue, no_ack = no_ack)
        channel.start_consuming()
        channel.close()


    def qsize(self, queue):
        size = 0
        if queue != None:

            if not isinstance(queue,str): queue = str(queue)

            response = self.declare(queue = queue)
            size = response[1]

        return size

    def putMsg(self, queue, body):
        channel = self.connection.channel()
        if not isinstance(queue, str): queue = str(queue)
        rc = channel.basic_publish(exchange = '', routing_key=queue,  body = body)
        channel.close()


    def getMsg(self, queue, no_ack = False, requeue = False):

        channel = self.connection.channel()
        message = channel.basic_get(queue = queue)
        body = None

        if message is not None:
            body = message.body
            # Handle data receipt acknowldegement
            if no_ack == False:
               message.ack()

            if requeue:
                self.putMsg(queue, body)

        channel.close()
        return body

    def getJsonMsg(self, queue, no_ack = False, requeue = False):

        msg = self.getMsg(queue, no_ack, requeue)
        body = {}
        if msg is not None:
            try:
                body = json.loads(msg)
            except ValueError:
                pass

        return body

    def close(self):
        self.connection.close()

    def __del__(self):
        self.close()
Example #16
0
class test_Channel(unittest.TestCase):
    def setUp(self):
        self.connection = Connection(host="localhost:5672", userid="guest", password="******", virtual_host="/")
        self.channel = self.connection.channel()
        self.channel.queue_delete(TEST_QUEUE)
        self._queue_declare()

    def test_send_message(self):
        message = Message(
            channel=self.channel,
            body="the quick brown fox jumps over the lazy dog",
            properties=dict(content_type="application/json", content_encoding="utf-8"),
        )
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.assertGreater(self.channel.queue_purge(TEST_QUEUE), 2)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

    def _queue_declare(self):
        self.channel.exchange_declare(TEST_QUEUE, "direct")
        x = self.channel.queue_declare(TEST_QUEUE)
        self.assertEqual(x.message_count, x[1])
        self.assertEqual(x.consumer_count, x[2])
        self.assertEqual(x.queue, TEST_QUEUE)
        self.channel.queue_bind(TEST_QUEUE, TEST_QUEUE, TEST_QUEUE)

    def test_basic_get_ack(self):
        message = Message(
            channel=self.channel,
            body="the quick brown fox jumps over the lazy dog",
            properties=dict(content_type="application/json", content_encoding="utf-8"),
        )
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)
        while True:
            x = self.channel.basic_get(TEST_QUEUE)
            if x:
                break
        self.assertIs(self.channel, x.channel)
        self.assertIn("message_count", x.delivery_info)
        self.assertIn("redelivered", x.delivery_info)
        self.assertEqual(x.delivery_info["routing_key"], TEST_QUEUE)
        self.assertEqual(x.delivery_info["exchange"], TEST_QUEUE)
        self.assertTrue(x.delivery_info["delivery_tag"])
        self.assertTrue(x.properties["content_type"])
        self.assertTrue(x.body)
        x.ack()

    def test_timeout_burst(self):
        """Check that if we have a large burst of messages in our queue
        that we can fetch them with a timeout without needing to receive
        any more messages."""

        message = Message(
            channel=self.channel,
            body="the quick brown fox jumps over the lazy dog",
            properties=dict(content_type="application/json", content_encoding="utf-8"),
        )

        for i in xrange(100):
            self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

        messages = []

        def cb(x):
            messages.append(x)
            x.ack()

        self.channel.basic_consume(TEST_QUEUE, callback=cb)
        for i in xrange(100):
            self.connection.drain_events(timeout=0.2)

        self.assertEquals(len(messages), 100)

    def test_timeout(self):
        """Check that our ``drain_events`` call actually times out if
        there are no messages."""
        message = Message(
            channel=self.channel,
            body="the quick brown fox jumps over the lazy dog",
            properties=dict(content_type="application/json", content_encoding="utf-8"),
        )

        self.channel.basic_publish(message, TEST_QUEUE, TEST_QUEUE)

        messages = []

        def cb(x):
            messages.append(x)
            x.ack()

        self.channel.basic_consume(TEST_QUEUE, callback=cb)
        self.connection.drain_events(timeout=0.1)

        with self.assertRaises(socket.timeout):
            self.connection.drain_events(timeout=0.1)
        self.assertEquals(len(messages), 1)

    def tearDown(self):
        if self.channel and self.connection.connected:
            self.channel.queue_purge(TEST_QUEUE)
            self.channel.close()
        if self.connection:
            try:
                self.connection.close()
            except ConnectionError:
                pass
class RabbitMQ(object):  # pylint: disable=r0902,r0903
    """Manages our queue connection"""

    __name__ = "RabbitMQ"

    def __init__(self, config_parser, queue_name=None):
        # Get our configs
        if queue_name is None:
            self.queue = config_parser.get("rabbitmq", "queue")
        else:
            self.queue = queue_name
        self.host = config_parser.get("rabbitmq", "host")
        self.port = int(config_parser.get("rabbitmq", "port"))
        self.delivery_mode = int(config_parser.get("rabbitmq", "delivery_mode"))
        buffer_size = int(config_parser.get("rabbitmq", "buffer"))
        if config_parser.get("rabbitmq", "durable").lower() == "true":
            self.durable = True
        else:
            self.durable = False
        if config_parser.get("rabbitmq", "auto_delete").lower() == "true":
            self.auto_delete = True
        else:
            self.auto_delete = False
        if config_parser.get("rabbitmq", "refresh").lower() == "none":
            self.refresh = int(time.time()) + 31536000
        else:
            self.refresh = int(config_parser.get("rabbitmq", "refresh"))

        # This is a buffer to manage messages that come in while RabbitMQ may
        # be unavailable
        self.buffer = Queue.Queue(maxsize=buffer_size)

        # And set up our connection
        self.connection = None
        self.channel = None
        self.last_connect = 0
        self.processed_count = 0
        self._connect()
        self._declare()

    def __del__(self):
        self._close()

    def __unicode__(self):
        return self.__str__()

    def __str__(self):
        return __name__

    @timeout_decorator.timeout(5)
    def _connect(self):
        """Creates our AMQP connection"""
        try:
            self.connection = Connection(host=self.host)
            self.channel = self.connection.channel()
            logging.info("Connected to server: " + self.host)
            self.last_connect = int(time.time())
            return True
        except ConnectionError:
            logging.error("Unable to connect to server")
            return None

    def _close(self):
        """Closes the AMQP connection"""
        if self.connection is not None:
            self.connection.close()
        logging.debug("Closed connection")

    @timeout_decorator.timeout(5)
    def _declare(self):
        """Declares the queue used for receiving logs"""
        logging.debug(
            "Declaring queue: "
            + self.queue
            + ", durable = "
            + repr(self.durable)
            + ", auto_delete = "
            + repr(self.auto_delete)
        )
        try:
            self.channel.queue_declare(queue=self.queue, durable=self.durable, auto_delete=self.auto_delete)
            return True
        except AttributeError:
            # We raise here as faliing to declare the queue is an immediate
            # show-stopper that things cannot neatly recover from.
            raise
        except ChannelError:
            # This can happen if we are attempting to redefine a queue with
            # settings that are not representative of those already present.
            logging.fatal(
                "A ChannelError occurred. Make sure that you are " + "not attempting to change any existing queue "
                "declarations."
            )
            logging.fatal(traceback.format_exc())
            raise

    def _refresh_connection(self):
        """
        This refreshes the AMQP connection after timeouts or DNS changes, which
        is an absolute must-have for load-balanced servers in an environment
        such as EC2 where the IP address of the host is not guaranteed to
        remain constant. RabbitMQ does not like network interruptions, so this
        is our attempt to handle them with a little bit of grace.
        """
        if int(time.time()) - self.last_connect > self.refresh:
            logging.info("Connection refresh threshold reached")
            self._close()
            result = self._connect()
            return result
        else:
            return True

    def publish(self, log):
        """Publishes a log entry into the queue."""
        logging.debug("Publishing to " + self.queue + ", message: " + str(log))
        connection_result = self._refresh_connection()
        logging.debug("connection_result: " + str(connection_result))
        if connection_result is None:
            logging.info("Buffering log message in publish")
            self.buffer.put(log)
            return True
        try:
            # Exchanges are not implemented (yet)
            self.channel.basic_publish(exchange="", routing_key=self.queue, body=log, delivery_mode=self.delivery_mode)
            self.processed_count = self.processed_count + 1
            return True
        except ConnectionError:
            # This may happen if the backend server has gone away. We call
            # _refresh_connection() to try and get it back. If this fails then
            # the message will be dropped. In the future it would be nice to
            # have an internal buffer for keeping these messages until the
            # queue comes back.
            logging.debug("A ConnectionError was thrown when attempting to publish a " + "message.")
            self.last_connect = 0
            raise
Example #18
0
class RabbitHelper(object):
    def __init__(self, mq_server = None, virtual_host = cfg.CB_CLUSTER_TAG):

        if mq_server == None:
            mq_server = cfg.RABBITMQ_IP

        self.connection = Connection(host= mq_server, userid="guest", password="******", virtual_host = virtual_host)


    def declare(self, queue, durable = True):
        channel = self.connection.channel()
        if not isinstance(queue,str): queue = str(queue)
        res = channel.queue_declare(queue = queue, durable = durable, auto_delete = True)
        channel.close()
        return res

    def delete(self, queue):
        channel = self.connection.channel()
        if not isinstance(queue,str): queue = str(queue)
        channel.queue_delete(queue=queue)
        channel.close()

    def purge(self, queue):
        channel = self.connection.channel()
        if not isinstance(queue,str): queue = str(queue)
        channel.queue_purge(queue=queue)
        channel.close()

    def consume(self, callback, queue, no_ack = True):
        channel = self.connection.channel()
        channel.basic_consume(callback, queue = queue, no_ack = no_ack)
        channel.start_consuming()
        channel.close()


    def qsize(self, queue):
        size = 0
        if queue != None:

            if not isinstance(queue,str): queue = str(queue)

            response = self.declare(queue = queue)
            size = response[1]

        return size

    def putMsg(self, queue, body):
        channel = self.connection.channel()
        if not isinstance(queue, str): queue = str(queue)
        rc = channel.basic_publish(exchange = '', routing_key=queue,  body = body)
        channel.close()


    def getMsg(self, queue, no_ack = False, requeue = False):

        channel = self.connection.channel()
        message = channel.basic_get(queue = queue)
        body = None

        if message is not None:
            body = message.body
            # Handle data receipt acknowldegement
            if no_ack == False:
               message.ack()

            if requeue:
                self.putMsg(queue, body)

        channel.close()
        return body

    def getJsonMsg(self, queue, no_ack = False, requeue = False):

        msg = self.getMsg(queue, no_ack, requeue)
        body = {}
        if msg is not None:
            try:
                body = json.loads(msg)
            except ValueError:
                pass

        return body

    def close(self):
        self.connection.close()

    def __del__(self):
        self.close()
Example #19
0
# coding=utf-8
import sys

from librabbitmq import Connection

connection = Connection(host='localhost',
                        userid='dongwm',
                        password='******',
                        virtual_host='web_develop')
channel = connection.channel()

channel.exchange_declare('web_develop',
                         'direct',
                         passive=False,
                         durable=True,
                         auto_delete=False)
if len(sys.argv) != 1:
    msg = sys.argv[1]
else:
    msg = 'hah'

channel.basic_publish(msg, 'web_develop', 'xxx_routing_key')

connection.close()