Esempio n. 1
0
    def run(self):  
        # Setup connection
        mainLogger.debug('Connecting to Redis on %s %s %s' % (
            agentConfig['redis_host'], agentConfig['redis_port'], agentConfig['redis_db'])
        )
        connection = BrokerConnection(
                        hostname=agentConfig['redis_host'],
                        transport="redis",
                        virtual_host=agentConfig['redis_db'],
                        port=int(agentConfig['redis_port'])
        )
        connection.connect()
        consumer = Consumer(connection)

        while True:
            try:
               consumer.consume()
            except Empty:
               mainLogger.debug('No tasks, going to sleep')
               # sleep is patched and triggers context switching
               # for eventlet
               time.sleep(1)
                
        mainLogger.debug('Waiting')
        mainLogger.debug('Done & exit')
Esempio n. 2
0
class FanoutPublisher(PluginBase):

    def __init__(self):

        if app.debug:
            setup_logging(loglevel='DEBUG', loggers=[''])

        self.connection = BrokerConnection(app.config['AMQP_URL'])
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s', app.config['AMQP_URL'], e)
            raise RuntimeError

        self.channel = self.connection.channel()
        self.exchange_name = app.config['AMQP_TOPIC']

        self.exchange = Exchange(name=self.exchange_name, type='fanout', channel=self.channel)
        self.producer = Producer(exchange=self.exchange, channel=self.channel)

        LOG.info('Configured fanout publisher on topic "%s"', app.config['AMQP_TOPIC'])

    def pre_receive(self, alert):

        return alert

    def post_receive(self, alert):

        LOG.info('Sending message %s to AMQP topic "%s"', alert.get_id(), app.config['AMQP_TOPIC'])
        LOG.debug('Message: %s', alert.get_body())

        self.producer.publish(alert.get_body(), declare=[self.exchange], retry=True)
Esempio n. 3
0
class RabbitMQHandler(object):
    def __init__(self, connection_string, exchange):
        self._connection = BrokerConnection(connection_string)
        self._connections = set([self._connection])  # set of connection for the heartbeat
        self._exchange = Exchange(exchange, durable=True, delivry_mode=2, type='topic')
        self._connection.connect()
        monitor_heartbeats(self._connections)

    def _get_producer(self):
        producer = producers[self._connection].acquire(block=True, timeout=2)
        self._connections.add(producer.connection)
        return producer

    def publish(self, item, contributor):
        with self._get_producer() as producer:
            producer.publish(item, exchange=self._exchange, routing_key=contributor, declare=[self._exchange])

    def info(self):
        if not self._is_active:
            return {}
        with self._get_producer() as producer:
            res = producer.connection.info()
            if 'password' in res:
                del res['password']
            return res
Esempio n. 4
0
class FanoutPublisher(PluginBase):

    def __init__(self, name=None):
        if app.config['DEBUG']:
            setup_logging(loglevel='DEBUG', loggers=[''])

        self.connection = BrokerConnection(AMQP_URL)
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s', AMQP_URL, e)
            raise RuntimeError

        self.channel = self.connection.channel()
        self.exchange_name = AMQP_TOPIC

        self.exchange = Exchange(name=self.exchange_name, type='fanout', channel=self.channel)
        self.producer = Producer(exchange=self.exchange, channel=self.channel)

        super(FanoutPublisher, self).__init__(name)

        LOG.info('Configured fanout publisher on topic "%s"', AMQP_TOPIC)

    def pre_receive(self, alert, **kwargs):
        return alert

    def post_receive(self, alert, **kwargs):
        LOG.info('Sending message %s to AMQP topic "%s"', alert.get_id(), AMQP_TOPIC)
        body = alert.get_body(history=self.get_config('AMQP_SEND_ALERT_HISTORY', default=DEFAULT_AMQP_SEND_ALERT_HISTORY, type=bool, **kwargs))
        LOG.debug('Message: %s', body)
        self.producer.publish(body, declare=[self.exchange], retry=True)

    def status_change(self, alert, status, text, **kwargs):
        return
Esempio n. 5
0
class Publisher(object):
    def __init__(self, connection_string, exchange, is_active=True):
        self._is_active = is_active
        self.is_connected = True
        if not is_active:
            self.is_connected = False
            return

        self._connection = BrokerConnection(connection_string)
        self._connections = set([self._connection
                                 ])  #set of connection for the heartbeat
        self._exchange = Exchange(exchange,
                                  durable=True,
                                  delivry_mode=2,
                                  type='topic')
        self._connection.connect()
        monitor_heartbeats(self._connections)

    def _get_producer(self):
        producer = producers[self._connection].acquire(block=True, timeout=2)
        self._connections.add(producer.connection)
        return producer

    def publish(self, item, contributor):
        if not self._is_active:
            return

        with self._get_producer() as producer:
            try:
                producer.publish(item,
                                 exchange=self._exchange,
                                 routing_key=contributor,
                                 declare=[self._exchange])
                self.is_connected = True
            except socket.error:
                self.is_connected = False
                logging.getLogger(__name__).debug(
                    'Impossible to publish message !')
                raise

    def info(self):
        result = {
            "is_active": self._is_active,
            "is_connected": self.is_connected
        }
        if not self._is_active:
            return result

        with self._get_producer() as producer:
            res = producer.connection.info()
            if 'password' in res:
                del res['password']
            for key, value in res.items():
                result[key] = value
        return result
Esempio n. 6
0
class Messaging(object):

    amqp_opts = {
        'amqp_queue': '',                                   # do not send to queue by default
        'amqp_topic': 'notify',
        'amqp_url': 'amqp://*****:*****@localhost:5672//',  # RabbitMQ
        # 'amqp_url': 'mongodb://*****:*****@'        # AWS SQS (must define amqp_queue)
        # 'amqp_sqs_region': 'eu-west-1'                    # required if SQS is used
    }

    def __init__(self):

        config.register_opts(Messaging.amqp_opts)

        if CONF.debug:
            setup_logging(loglevel='DEBUG', loggers=[''])

        self.connection = None
        self.connect()

    def connect(self):

        if not CONF.amqp_url:
            return

        if CONF.amqp_url.startswith('sqs://'):
            CONF.amqp_url = 'sqs://' + CONF.amqp_url[6:].replace('/', '%2F')

        if CONF.amqp_sqs_region:
            transport_options = {'region': CONF.amqp_sqs_region}
        else:
            transport_options = {}

        self.connection = BrokerConnection(
            CONF.amqp_url,
            transport_options=transport_options
        )
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s', CONF.amqp_url, e)
            sys.exit(1)

        LOG.info('Connected to broker %s', CONF.amqp_url)

    def disconnect(self):

        return self.connection.release()

    def is_connected(self):

        return self.connection.connected
Esempio n. 7
0
class Audit:
    def __init__(self, hostname='localhost', port='5672',
                 userid='', password='', virtual_host='graylog',
                 exchange=None):
        self.hostname = hostname
        self.port = port
        self.userid = userid
        self.password = password
        self.virtual_host = virtual_host
        self.connection = BrokerConnection(virtual_host=virtual_host)
        self.exchange_setup = exchange or ExchangeSetup()

    def custom_exchange(self, exchange, exchange_type, routing_key, queue):
        """Broker exchange can be set after the object has been instantiated.

        Args:
            exchange (str): Exchange name
            exchange_type (str): AMQP exchange type, see your broker manual
            routing_key (str)
            queue (str)
        """
        self.exchange_setup.exchange = exchange
        self.exchange_setup.exchange_type = exchange_type
        self.exchange_setup.routing_key = routing_key
        self.exchange_setup.queue = queue

    def log(self, message):
        """Pushes argument object to message broker.

        Args:
            message (json/gelp): Message can depend on third-party log software
                Graylog uses gelp format: https://www.graylog.org/resources/gelf/
        """
        if (type(message) is not str) or (message == ''):
            print 'Unable to log empty message'
            return False
        if len(message) > 8192:
            print 'Message size too large'
            return False

        self.connection.connect()
        channel = self.connection.channel()
        exchange = Exchange(self.exchange_setup.exchange,
                            type=self.exchange_setup.exchange_type)

        bound_exchange = exchange(channel)
        bound_exchange.declare()

        # example_message = '{"short_message":"Kombu", "host":"example.org"}'
        message = bound_exchange.Message(message)
        bound_exchange.publish(message, routing_key=self.exchange_setup.routing_key)

        self.connection.release()
Esempio n. 8
0
def main():
    cfg = {
        'hostname': 'localhost',
        'userid': 'guest',
        'password': '******',
        'virtual_host': '/',
        'port': 5672
    }
    transport = 'pika'
    #transport = 'librabbitmq'
    connection = BrokerConnection(transport=transport, **cfg)
    connection.connect()

    cfg = {
        'name': 'simple-test-1',
        'auto_delete': True,
        'durable': False,
        'delivery_mode': 'transient'
    }
    channel = connection.channel()
    exchange = Exchange(channel=channel, **cfg)
    #exchange = exchange_def(channel)

    routing_key = 'simple-test-1-route'
    queue = Queue(exchange=exchange, routing_key=routing_key, **cfg)

    channel = connection.channel()
    producer = Producer(channel=channel,
                        exchange=exchange,
                        routing_key=routing_key)

    channel = connection.channel()
    consumer = Consumer(channel=channel, queues=[queue], callbacks=[receive])
    consumer.consume()

    def serve_forever():
        while True:
            #print 'drain'
            #gevent.sleep(0.0001)
            connection.drain_events(timeout=1)

    def publish_forever():
        while True:
            producer.publish(loremIpsum)
            gevent.sleep(0.0001)

    #g1, g2 = gevent.spawn(publish_forever), gevent.spawn(serve_forever)
    g2 = gevent.spawn(serve_forever)
    g1 = gevent.spawn(publish_forever)
    gevent.joinall([g1, g2])
Esempio n. 9
0
class FanoutPublisher(PluginBase):
    def __init__(self, name=None):
        if app.config['DEBUG']:
            setup_logging(loglevel='DEBUG', loggers=[''])

        self.connection = BrokerConnection(AMQP_URL)
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s', AMQP_URL,
                      e)
            raise RuntimeError

        self.channel = self.connection.channel()
        self.exchange_name = AMQP_TOPIC

        self.exchange = Exchange(name=self.exchange_name,
                                 type='fanout',
                                 channel=self.channel)
        self.producer = Producer(exchange=self.exchange, channel=self.channel)

        super(FanoutPublisher, self).__init__(name)

        LOG.info('Configured fanout publisher on topic "%s"', AMQP_TOPIC)

    def pre_receive(self, alert):
        return alert

    def post_receive(self, alert):
        LOG.info('Sending message %s to AMQP topic "%s"', alert.get_id(),
                 AMQP_TOPIC)

        try:
            body = alert.serialize  # alerta >= 5.0

            # update body's datetime-related fields  with utc-aware values
            body.update({
                key: body[key].replace(tzinfo=pytz.utc)
                for key in ['createTime', 'lastReceiveTime', 'receiveTime']
            })
        except Exception:
            body = alert.get_body()  # alerta < 5.0

        LOG.debug('Message: %s', body)
        self.producer.publish(body, declare=[self.exchange], retry=True)

    def status_change(self, alert, status, text):
        return
Esempio n. 10
0
class Publisher(object):
    def __init__(self, connection_string, exchange, is_active=True):
        self._is_active = is_active
        self.is_connected = True
        if not is_active:
            self.is_connected = False
            return

        self._connection = BrokerConnection(connection_string)
        self._connections = set([self._connection])#set of connection for the heartbeat
        self._exchange = Exchange(exchange, durable=True, delivry_mode=2, type='topic')
        self._connection.connect()
        monitor_heartbeats(self._connections)

    def _get_producer(self):
        producer = producers[self._connection].acquire(block=True, timeout=2)
        self._connections.add(producer.connection)
        return producer

    def publish(self, item, contributor):
        if not self._is_active:
            return

        with self._get_producer() as producer:
            try:
                producer.publish(item, exchange=self._exchange, routing_key=contributor, declare=[self._exchange])
                self.is_connected = True
            except socket.error:
                self.is_connected = False
                logging.getLogger(__name__).debug('Impossible to publish message !')
                raise

    def info(self):
        result = {
            "is_active": self._is_active,
            "is_connected": self.is_connected
        }
        if not self._is_active:
            return result

        with self._get_producer() as producer:
            res = producer.connection.info()
            if 'password' in res:
                del res['password']
            for key, value in res.items():
                result[key] = value
        return result
Esempio n. 11
0
class RabbitMQHandler(object):
    def __init__(self, connection_string, exchange):
        self._connection = BrokerConnection(connection_string)
        self._connections = {self._connection
                             }  # set of connection for the heartbeat
        self._exchange = Exchange(exchange,
                                  durable=True,
                                  delivery_mode=2,
                                  type="topic")
        monitor_heartbeats(self._connections)

    @retry(wait_fixed=200, stop_max_attempt_number=3)
    def publish(self, item, contributor_id):
        with self._connection.channel() as channel:
            with Producer(channel) as producer:
                producer.publish(item,
                                 exchange=self._exchange,
                                 routing_key=contributor_id,
                                 declare=[self._exchange])

    def info(self):
        info = self._connection.info()
        info.pop("password", None)
        return info

    def connect(self):
        self._connection.connect()

    def close(self):
        for c in self._connections:
            c.release()

    def listen_load_realtime(self, queue_name, max_retries=10):
        log = logging.getLogger(__name__)

        route = "task.load_realtime.*"
        log.info("listening route {} on exchange {}...".format(
            route, self._exchange))
        rt_queue = Queue(queue_name,
                         routing_key=route,
                         exchange=self._exchange,
                         durable=False)
        RTReloader(connection=self._connection,
                   rpc_queue=rt_queue,
                   exchange=self._exchange,
                   max_retries=max_retries).run()
Esempio n. 12
0
class MqServer( object ):
    """
    exchange='E_X7_W2S', queue='Q_X7_W2S',routing_key = 'RK_X7_W2S'
    """
    
    def __init__(self, callback, kwargs ):
        self.callback = callback
        if( kwargs ):
            self.kwargs = kwargs
        else:
            self.kwargs = MqDict
            
    def create_queue(self, hostname="localhost", userid="guest", password="******", virtual_host="/"): 
        self.conn = BrokerConnection(hostname, userid,password, virtual_host )   
        #define Web2Server exchange
        exchange = Exchange(self.kwargs["X7_E"], type="direct")
        self.queue = Queue(self.kwargs["X7_Q"], exchange, routing_key=self.kwargs["X7_RK"])    
        channel = self.conn.channel()
        consumer = Consumer(channel, self.queue, callbacks=[self.callback])
        consumer.consume()
        self.conn.connect()

        
        
    def connect(self, hostname="localhost", userid="guest", password="******", virtual_host="/"): 
        self.conn = BrokerConnection(hostname, userid,password, virtual_host )   
        #define Web2Server exchange
        exchange = Exchange(self.kwargs["X7_E"], type="direct")
        self.queue = Queue(self.kwargs["X7_Q"], exchange, routing_key=self.kwargs["X7_RK"])    
        channel = self.conn.channel()

        consumer = Consumer(channel, self.queue, callbacks=[self.callback])
        consumer.consume()
    
    def run(self, once=False):
        if( once ):
            self.conn.drain_events()
        else:
            while True:
                self.conn.drain_events()
    
    def get(self):
        message = self.queue.get(block=True)
        message.ack()
        return message
Esempio n. 13
0
class FanoutPublisher(PluginBase):

    def __init__(self, name=None):
        if app.config['DEBUG']:
            setup_logging(loglevel='DEBUG', loggers=[''])

        self.connection = BrokerConnection(AMQP_URL)
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s', AMQP_URL, e)
            raise RuntimeError

        self.channel = self.connection.channel()
        self.exchange_name = AMQP_TOPIC

        self.exchange = Exchange(name=self.exchange_name, type='fanout', channel=self.channel)
        self.producer = Producer(exchange=self.exchange, channel=self.channel)

        super(FanoutPublisher, self).__init__(name)

        LOG.info('Configured fanout publisher on topic "%s"', AMQP_TOPIC)

    def pre_receive(self, alert):
        return alert

    def post_receive(self, alert):
        LOG.info('Sending message %s to AMQP topic "%s"', alert.get_id(), AMQP_TOPIC)

        try:
            body = alert.serialize  # alerta >= 5.0

            # update body's datetime-related fields  with utc-aware values
            body.update({key: body[key].replace(tzinfo=pytz.utc) for key in ['createTime', 'lastReceiveTime', 'receiveTime']})
        except Exception:
            body = alert.get_body()  # alerta < 5.0

        LOG.debug('Message: %s', body)
        self.producer.publish(body, declare=[self.exchange], retry=True)

    def status_change(self, alert, status, text):
        return
Esempio n. 14
0
class FanoutPublisher(PluginBase):
    def __init__(self):

        if app.debug:
            setup_logging(loglevel='DEBUG', loggers=[''])

        self.connection = BrokerConnection(app.config['AMQP_URL'])
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s',
                      app.config['AMQP_URL'], e)
            raise RuntimeError

        self.channel = self.connection.channel()
        self.exchange_name = app.config['AMQP_TOPIC']

        self.exchange = Exchange(name=self.exchange_name,
                                 type='fanout',
                                 channel=self.channel)
        self.producer = Producer(exchange=self.exchange, channel=self.channel)

        LOG.info('Configured fanout publisher on topic "%s"',
                 app.config['AMQP_TOPIC'])

    def pre_receive(self, alert):

        return alert

    def post_receive(self, alert):

        LOG.info('Sending message %s to AMQP topic "%s"', alert.get_id(),
                 app.config['AMQP_TOPIC'])
        LOG.debug('Message: %s', alert.get_body())

        self.producer.publish(alert.get_body(),
                              declare=[self.exchange],
                              retry=True)

    def status_change(self, alert, status, text):
        return
Esempio n. 15
0
class Messaging(object):

    amqp_opts = {
        'amqp_queue': 'alerts',
        'amqp_topic': 'notify',
        'amqp_url': 'amqp://*****:*****@localhost:5672//',  # RabbitMQ
        # 'amqp_url': 'mongodb://localhost:27017/kombu',    # MongoDB
        # 'amqp_url': 'redis://localhost:6379/',            # Redis
    }

    def __init__(self):

        config.register_opts(Messaging.amqp_opts)

        self.connection = None
        self.channel = None
        self.connect()

    def connect(self):

        if not CONF.amqp_url:
            return

        self.connection = BrokerConnection(CONF.amqp_url)
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s', CONF.amqp_url, e)
            sys.exit(1)
        self.channel = self.connection.channel()

        LOG.info('Connected to broker %s', CONF.amqp_url)

    def disconnect(self):

        return self.connection.release()

    def is_connected(self):

        return self.connection.connected
Esempio n. 16
0
def main():
    cfg = {'hostname':'localhost', 'userid':'guest', 'password':'******', 'virtual_host':'/', 'port':5672}
    transport = 'pika'
    #transport = 'librabbitmq'
    connection = BrokerConnection(transport=transport, **cfg)
    connection.connect()

    cfg = {'name':'simple-test-1', 'auto_delete':True, 'durable':False, 'delivery_mode':'transient'}
    channel = connection.channel()
    exchange = Exchange(channel=channel, **cfg)
    #exchange = exchange_def(channel)

    routing_key = 'simple-test-1-route'
    queue = Queue(exchange=exchange, routing_key=routing_key, **cfg)

    channel = connection.channel()
    producer = Producer(channel=channel, exchange=exchange, routing_key=routing_key)

    channel = connection.channel()
    consumer = Consumer(channel=channel, queues=[queue], callbacks=[receive])
    consumer.consume()

    def serve_forever():
        while True:
            #print 'drain'
            #gevent.sleep(0.0001)
            connection.drain_events(timeout=1)
    def publish_forever():
        while True:
            producer.publish(loremIpsum)
            gevent.sleep(0.0001)

    #g1, g2 = gevent.spawn(publish_forever), gevent.spawn(serve_forever)
    g2 = gevent.spawn(serve_forever)
    g1 = gevent.spawn(publish_forever)
    gevent.joinall([g1, g2])
Esempio n. 17
0
class event2amqp():
    def __init__(self, host, port, user, password, virtual_host, exchange_name,
                 identifier, maxqueuelength, queue_dump_frequency):

        self.host = host
        self.port = port
        self.user = user
        self.password = password
        self.virtual_host = virtual_host
        self.exchange_name = exchange_name
        self.identifier = identifier
        self.maxqueuelength = maxqueuelength
        self.queue_dump_frequency = queue_dump_frequency

        self.connection_string = None

        self.connection = None
        self.channel = None
        self.producer = None
        self.exchange = None
        self.queue = deque([])

        self.tickage = 0

        self.load_queue()

    def create_connection(self):
        self.connection_string = "amqp://%s:%s@%s:%s/%s" % (
            self.user, self.password, self.host, self.port, self.virtual_host)
        try:
            self.connection = BrokerConnection(self.connection_string)
            return True
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" %
                         (error, func))
            return False

    def connect(self):
        logger.info("[Canopsis] connection with: %s" % self.connection_string)
        try:
            self.connection.connect()
            if not self.connected():
                return False
            else:
                self.get_channel()
                self.get_exchange()
                self.create_producer()
                return True
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" %
                         (error, func))
            return False

    def disconnect(self):
        try:
            if self.connected():
                self.connection.release()
            return True
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" %
                         (error, func))
            return False

    def connected(self):
        try:
            if self.connection.connected:
                return True
            else:
                return False
        except:
            return False

    def get_channel(self):
        try:
            self.channel = self.connection.channel()
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" %
                         (error, func))
            return False

    def get_exchange(self):
        try:
            self.exchange = Exchange(self.exchange_name,
                                     "topic",
                                     durable=True,
                                     auto_delete=False)
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" %
                         (error, func))
            return False

    def create_producer(self):
        try:
            self.producer = Producer(channel=self.channel,
                                     exchange=self.exchange,
                                     routing_key=self.virtual_host)
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" %
                         (error, func))
            return False

    def postmessage(self, message, retry=False):

        # process enqueud events if possible
        self.pop_events()

        if message["source_type"] == "component":
            key = "%s.%s.%s.%s.%s" % (
                message["connector"], message["connector_name"],
                message["event_type"], message["source_type"],
                message["component"])
        else:
            key = "%s.%s.%s.%s.%s[%s]" % (
                message["connector"], message["connector_name"],
                message["event_type"], message["source_type"],
                message["component"], message["resource"])

        # connection management
        if not self.connected():
            logger.error("[Canopsis] Create connection")
            self.create_connection()
            self.connect()

        # publish message
        if self.connected():
            logger.debug("[Canopsis] using routing key %s" % key)
            logger.debug("[Canopsis] sending %s" % str(message))
            try:
                self.producer.revive(self.channel)
                self.producer.publish(body=message,
                                      compression=None,
                                      routing_key=key,
                                      exchange=self.exchange_name)
                return True
            except:
                logger.error(
                    "[Canopsis] Not connected, going to queue messages until connection back"
                )
                self.queue.append({"key": key, "message": message})
                func = sys._getframe(1).f_code.co_name
                error = str(sys.exc_info()[0])
                logger.error("[Canopsis] Unexpected error: %s in %s" %
                             (error, func))
                # logger.error(str(traceback.format_exc()))
                return False
        else:
            errmsg = "[Canopsis] Not connected, going to queue messages until connection back (%s items in queue | max %s)" % (
                str(len(self.queue)), str(self.maxqueuelength))
            logger.error(errmsg)
            #enqueue_cano_event(key,message)
            if len(self.queue) < int(self.maxqueuelength):
                self.queue.append({"key": key, "message": message})
                logger.debug("[Canopsis] Queue length: %d" % len(self.queue))
                return True
            else:
                logger.error(
                    "[Canopsis] Maximum retention for event queue %s reached" %
                    str(self.maxqueuelength))
                return False

    def errback(self, exc, interval):
        logger.warning("Couldn't publish message: %r. Retry in %ds" %
                       (exc, interval))

    def pop_events(self):
        if self.connected():
            while len(self.queue) > 0:
                item = self.queue.pop()
                try:
                    logger.debug("[Canopsis] Pop item from queue [%s]: %s" %
                                 (str(len(self.queue)), str(item)))
                    self.producer.revive(self.channel)
                    self.producer.publish(body=item["message"],
                                          compression=None,
                                          routing_key=item["key"],
                                          exchange=self.exchange_name)
                except:
                    self.queue.append(item)
                    func = sys._getframe(1).f_code.co_name
                    error = str(sys.exc_info()[0])
                    logger.error("[Canopsis] Unexpected error: %s in %s" %
                                 (error, func))
                    return False
        else:
            return False

    def hook_tick(self, brok):

        self.tickage += 1

        # queue retention saving
        if self.tickage >= int(self.queue_dump_frequency) and len(
                self.queue) > 0:
            # flush queue to disk if queue age reach queue_dump_frequency
            self.save_queue()
            self.tickage = 0

        return True

    def save_queue(self):
        retentionfile = "%s/canopsis.dat" % os.getcwd()  #:fixme: use path.join
        logger.info("[Canopsis] saving to %s" % retentionfile)
        filehandler = open(retentionfile, 'w')
        pickle.dump(self.queue, filehandler)
        filehandler.close()

        return True

    def load_queue(self):
        retentionfile = "%s/canopsis.dat" % os.getcwd()
        logger.info("[Canopsis] loading from %s" % retentionfile)
        filehandler = open(retentionfile, 'r')

        try:
            self.queue = pickle.load(filehandler)
        except:
            pass
        return True
Esempio n. 18
0
from socket import gethostname
from time import time

from kombu import BrokerConnection

files = ['bulletin.pdf', 'med_4p_120k.pdf', 'small_45k.pdf', 'math_11p.pdf']
#files = ['bulletin.pdf',]

connection = BrokerConnection(
                hostname='rh2.dev.novagile.fr',
                transport="redis",
                virtual_host=0,
                port=6379)

print "Connection Producer to Redis"
connection.connect()

queue = connection.SimpleQueue("pdf_to_jpg")

for f in files:
    # open as binary
    my_file = open(f, "rb")
    my_file.seek(0)
    my_file_bcontent = my_file.read()
    my_file.close()

    # Push !
    queue.put({"file_content": my_file_bcontent,
                "file_name": f,
                "hostname":  gethostname(),
                "timestamp": time()},
Esempio n. 19
0
class RabbitMQHandler(object):
    def __init__(self, connection_string, exchange):
        self._connection = BrokerConnection(connection_string)
        self._connections = set([self._connection])  # set of connection for the heartbeat
        self._exchange = Exchange(exchange, durable=True, delivry_mode=2, type='topic')
        self._connection.connect()
        monitor_heartbeats(self._connections)

    def _get_producer(self):
        producer = producers[self._connection].acquire(block=True, timeout=2)
        self._connections.add(producer.connection)
        return producer

    def publish(self, item, contributor):
        with self._get_producer() as producer:
            producer.publish(item, exchange=self._exchange, routing_key=contributor, declare=[self._exchange])

    def info(self):
        if not self._is_active:
            return {}
        with self._get_producer() as producer:
            res = producer.connection.info()
            if 'password' in res:
                del res['password']
            return res

    def listen_load_realtime(self):
        log = logging.getLogger(__name__)

        def callback(body, message):
            task = task_pb2.Task()
            try:
                # `body` is of unicode type, but we need str type for
                # `ParseFromString()` to work.  It seems to work.
                # Maybe kombu estimate that, without any information,
                # the body should be something as json, and thus a
                # unicode string.  On the c++ side, I didn't manage to
                # find a way to give a content-type or something like
                # that.
                body = str(body)
                task.ParseFromString(body)
            except DecodeError as e:
                log.warn('invalid protobuf: {}'.format(str(e)))
                return

            log.info('getting a request: {}'.format(task))
            if task.action != task_pb2.LOAD_REALTIME or not task.load_realtime:
                return

            feed = convert_to_gtfsrt(RealTimeUpdate.all(task.load_realtime.contributors))

            with self._get_producer() as producer:
                producer.publish(feed.SerializeToString(), routing_key=task.load_realtime.queue_name)

        route = 'task.load_realtime.*'
        log.info('listening route {} on exchange {}...'.format(route, self._exchange))
        rt_queue = Queue('', routing_key=route, exchange=self._exchange, durable=False, auto_delete=True)
        with connections[self._connection].acquire(block=True) as conn:
            self._connections.add(conn)
            with Consumer(conn, queues=[rt_queue], callbacks=[callback]):
                while True:
                    try:
                        conn.drain_events(timeout=1)
                    except socket.timeout:
                        pass
Esempio n. 20
0
import boto
from kombu import BrokerConnection, Exchange, Queue, Consumer

connection = BrokerConnection()
connection.connect()

channel = connection.channel()
exchange = Exchange(
    name="android", 
    type="fanout", 
    channel=channel, 
    durable=True,
)
exchange.declare()

channel = connection.channel()
queue = Queue(
    name='filr',
    exchange=exchange,
    durable=True,
    auto_delete=False,
    channel=channel,
    routing_key='filr',
)
queue.declare();

def fetch(b,m):
    print b,m

consumer = Consumer(
    channel=connection.channel(),
Esempio n. 21
0
class Server(object):
	"""
	This Server class is used to provide an RPC server

	:keyword server_id: Id of the server
	:keyword amqp_host: The host of where the AMQP Broker is running.
	:keyword amqp_user: The username for the AMQP Broker.
	:keyword amqp_password: The password for the AMQP Broker.
	:keyword amqp_vhost: The virtual host of the AMQP Broker.
	:keyword amqp_port: The port of the AMQP Broker.
	:keyword ssl: Use SSL connection for the AMQP Broker.
	:keyword threaded: Use of multithreading. If set to true RPC call-execution
		will processed parallel (one thread per call) which dramatically improves
		performance.


	"""
	
	def __init__(self, 
				server_id,
				amqp_host='localhost', 
				amqp_user ='******',
				amqp_password='******',
				amqp_vhost='/',
				amqp_port=5672,
				ssl=False,
				threaded=False):
		self.logger = logging.getLogger('callme.server')
		self.logger.debug('Server ID: %s' % server_id)
		self.server_id = server_id
		self.threaded = threaded
		self.do_run = True
		self.is_stopped = True
		self.func_dict={}
		self.result_queue = queue.Queue()
		target_exchange = Exchange("server_"+server_id+"_ex", "direct", durable=False,
								auto_delete=True)	
		self.target_queue = Queue("server_"+server_id+"_queue", exchange=target_exchange, 
							auto_delete=True, durable=False)
		
		
		
		self.connection = BrokerConnection(hostname=amqp_host,
                              userid=amqp_user,
                              password=amqp_password,
                              virtual_host=amqp_vhost,
                              port=amqp_port,
                              ssl=ssl)
		try:
			self.connection.connect()
		except IOError:
			self.logger.critical("Connection Error: Probably AMQP User has not enough permissions")
			raise ConnectionError("Connection Error: Probably AMQP User has not enough permissions")
		
		self.channel = self.connection.channel()
		
		self.publish_connection = BrokerConnection(hostname=amqp_host,
                              userid=amqp_user,
                              password=amqp_password,
                              virtual_host=amqp_vhost,
                              port=amqp_port,
                              ssl=ssl)
		self.publish_channel = self.publish_connection.channel()
		
		# consume
		self.consumer = Consumer(self.channel, self.target_queue)
		if self.threaded == True:
			self.consumer.register_callback(self._on_request_threaded)
		else:
			self.consumer.register_callback(self._on_request)
		self.consumer.consume()
		
		self.logger.debug('Init done')
		
	def _on_request(self, body, message):
		"""
		This method is automatically called when a request is incoming. It 
		processes the incomming rpc calls in a serial manner (no multithreading)

		:param body: the body of the amqp message already unpickled by kombu
		:param message: the plain amqp kombu.message with aditional information
		"""
		self.logger.debug('Got Request')
		rpc_req = body
		
		if not isinstance(rpc_req, RpcRequest):
			self.logger.debug('Not an RpcRequest Instance')
			return
		
		self.logger.debug('Call func on Server %s' %self.server_id)
		try:
			self.logger.debug('corr_id: %s' % message.properties['correlation_id'])
			self.logger.debug('Call func with args %s' % repr(rpc_req.func_args))
			
			result = self.func_dict[rpc_req.func_name](*rpc_req.func_args)
			
			self.logger.debug('Result: %s' % repr(result))
			self.logger.debug('Build respnse')
			rpc_resp = RpcResponse(result)
		except Exception as e:
			self.logger.debug('exception happened')
			rpc_resp = RpcResponse(e, exception_raised=True)
			
		message.ack()
		
		self.logger.debug('Publish respnse')
		# producer 
		src_exchange = Exchange(message.properties['reply_to'], "direct", durable=False,
							auto_delete=True)
		self.producer = Producer(self.publish_channel, src_exchange, auto_declare=False)
		
		self.producer.publish(rpc_resp, serializer="pickle",
							correlation_id=message.properties['correlation_id'])
		
		self.logger.debug('acknowledge')
		


	def _on_request_threaded(self, body, message):
		"""
		This method is automatically called when a request is incoming and
		`threaded` set to `True`. It processes the incomming rpc calls in 
		a parallel manner (one thread for each request). A seperate Publisher
		thread is used to send back the results.

		:param body: the body of the amqp message already unpickled by kombu
		:param message: the plain amqp kombu.message with aditional information
		"""
		self.logger.debug('Got Request')
		rpc_req = body
		
		if not isinstance(rpc_req, RpcRequest):
			self.logger.debug('Not an RpcRequest Instance')
			return
		
		message.ack()
		self.logger.debug('acknowledge')
		
		def exec_func(body, message, result_queue):
			self.logger.debug('Call func on Server %s' %self.server_id)
			try:
				self.logger.debug('corr_id: %s' % message.properties['correlation_id'])
				self.logger.debug('Call func with args %s' % repr(rpc_req.func_args))
				
				result = self.func_dict[rpc_req.func_name](*rpc_req.func_args)
				
				self.logger.debug('Result: %s' % repr(result))
				self.logger.debug('Build respnse')
				rpc_resp = RpcResponse(result)
			except Exception as e:
				self.logger.debug('exception happened')
				rpc_resp = RpcResponse(e, exception_raised=True)
				
			result_queue.put(ResultSet(rpc_resp, 
									message.properties['correlation_id'],
									message.properties['reply_to']))
				
		p = Thread(target=exec_func, 
				name=message.properties['correlation_id'],
				args=(body, message, self.result_queue))
		p.start()
		
	
	def register_function(self, func, name):
		"""
		Registers a function as rpc function so that is accessible from the 
		proxy.
		
		:param func: The function we want to provide as rpc method
		:param name: The name with which the function is visible to the clients
		"""
		self.func_dict[name] = func
	
	def start(self):
		"""
		Starts the server. If `threaded` is `True` also starts the Publisher 
		thread.
		"""
		self.is_stopped = False
		if self.threaded == True:
			self.pub_thread = Publisher(self.result_queue, self.publish_channel)
			self.pub_thread.start()
			
		while self.do_run:
			try:
				self.logger.debug("drain_events: %s" % repr(self.do_run))
				self.connection.drain_events(timeout=1)
			except socket.timeout:
				self.logger.debug("do_run: %s" % repr(self.do_run))
			except:
				self.logger.debug("interrupt exception" )
				if self.threaded == True:
					self.pub_thread.stop()
				self.consumer.cancel()
				self.connection.close()
				self.publish_connection.close()
				self.is_stopped = True
				return
			
		if self.threaded == True:
			self.pub_thread.stop()
		self.logger.debug("Normal Exit" )
		self.consumer.cancel()
		self.connection.close()
		self.publish_connection.close()
		self.logger.debug("All closed" )
		self.is_stopped = True
		
	def stop(self):
		"""
		Stops the server.
		"""
		self.logger.debug('Stop server')
		self.do_run = False
		while not self.is_stopped:
			self.logger.debug('wait for stop')
			sleep(0.1)
Esempio n. 22
0
class event2amqp():

    def __init__(self,host,port,user,password,virtual_host, exchange_name,identifier,maxqueuelength,queue_dump_frequency):

        self.host = host
        self.port = port
        self.user = user
        self.password = password
        self.virtual_host = virtual_host
        self.exchange_name = exchange_name
        self.identifier = identifier
        self.maxqueuelength = maxqueuelength
        self.queue_dump_frequency = queue_dump_frequency

        self.connection_string = None

        self.connection = None
        self.channel = None
        self.producer = None
        self.exchange = None
        self.queue = deque([])

        self.tickage = 0

        self.load_queue()


    def create_connection(self):
        self.connection_string = "amqp://%s:%s@%s:%s/%s" % (self.user,self.password,self.host,self.port,self.virtual_host)
        try:        
            self.connection = BrokerConnection(self.connection_string)
            return True
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" % (error,func))
            return False

    def connect(self):
        logger.info("[Canopsis] connection with : %s" % self.connection_string)
        try:
            self.connection.connect()
            if not self.connected():
                return False
            else:
                self.get_channel()
                self.get_exchange()
                self.create_producer()                
                return True
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" % (error,func))
            return False

    def disconnect(self):
        try:        
            if self.connected():
                self.connection.release()
            return True
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" % (error,func))
            return False

    def connected(self):
        try:
            if self.connection.connected:            
                return True
            else:
                return False
        except:
            return False

    def get_channel(self):
        try:
            self.channel = self.connection.channel()
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" % (error,func))
            return False

    def get_exchange(self):
        try:
            self.exchange =  Exchange(self.exchange_name , "topic", durable=True, auto_delete=False)
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" % (error,func))
            return False

    def create_producer(self):
        try:
            self.producer = Producer(
                            channel=self.channel,
                            exchange=self.exchange,
                            routing_key=self.virtual_host
                            )
        except:
            func = sys._getframe(1).f_code.co_name
            error = str(sys.exc_info()[0])
            logger.error("[Canopsis] Unexpected error: %s in %s" % (error,func))
            return False

    def postmessage(self,message,retry=False):

        # process enqueud events if possible
        self.pop_events()

        if message["source_type"] == "component":
            key = "%s.%s.%s.%s.%s" % (
                    message["connector"],
                    message["connector_name"],
                    message["event_type"],
                    message["source_type"],
                    message["component"]
                )
        else:
            key = "%s.%s.%s.%s.%s[%s]" % (
                    message["connector"],
                    message["connector_name"],
                    message["event_type"],
                    message["source_type"],
                    message["component"],
                    message["resource"]
                )

        # connection management
        if not self.connected():
            logger.error("[Canopsis] Create connection")
            self.create_connection()
            self.connect()

        # publish message
        if self.connected():
            logger.info("[Canopsis] using routing key %s" % key)
            logger.info("[Canopsis] sending %s" % str(message))
            try:
                self.producer.revive(self.channel)                
                self.producer.publish(body=message, compression=None, routing_key=key, exchange=self.exchange_name)
                return True
            except:
                logger.error("[Canopsis] Not connected, going to queue messages until connection back")                
                self.queue.append({"key":key,"message":message})
                func = sys._getframe(1).f_code.co_name
                error = str(sys.exc_info()[0])
                logger.error("[Canopsis] Unexpected error: %s in %s" % (error,func))
                # logger.error(str(traceback.format_exc()))
                return False
        else:
            errmsg="[Canopsis] Not connected, going to queue messages until connection back (%s items in queue | max %s)" % (str(len(self.queue)),str(self.maxqueuelength))
            logger.info(errmsg)
            #enqueue_cano_event(key,message)
            if len(self.queue) < int(self.maxqueuelength):
                self.queue.append({"key":key,"message":message})
                logger.info("[Canopsis] Queue length : %d" % len(self.queue))                
                return True
            else:
                logger.error("[Canopsis] Maximum retention for event queue %s reached" % str(self.maxqueuelength))
                return False

    def errback(self,exc,interval):
        logger.warning("Couldn't publish message: %r. Retry in %ds" % (exc, interval))

    def pop_events(self):
        if self.connected():
            while len(self.queue) > 0:
                item = self.queue.pop()
                try:
                    logger.info("[Canopsis] Pop item from queue [%s] : %s" % (str(len(self.queue)),str(item)))
                    self.producer.revive(self.channel)                                    
                    self.producer.publish(body=item["message"], compression=None, routing_key=item["key"], exchange=self.exchange_name)
                except:
                    self.queue.append(item)
                    func = sys._getframe(1).f_code.co_name
                    error = str(sys.exc_info()[0])
                    logger.error("[Canopsis] Unexpected error: %s in %s" % (error,func))
                    return False
        else:
            return False

    def hook_tick(self, brok):

        self.tickage += 1

        # queue retention saving
        if self.tickage >= int(self.queue_dump_frequency) and len(self.queue) > 0:
            # flush queue to disk if queue age reach queue_dump_frequency
            self.save_queue()
            self.tickage = 0

        return True

    def save_queue(self):
        retentionfile="%s/canopsis.dat" % os.getcwd()
        logger.info("[Canopsis] saving to %s" % retentionfile)
        filehandler = open(retentionfile, 'w') 
        pickle.dump(self.queue, filehandler) 
        filehandler.close()

        return True

    def load_queue(self):
        retentionfile="%s/canopsis.dat" % os.getcwd()
        logger.info("[Canopsis] loading from %s" % retentionfile)
        filehandler = open(retentionfile, 'r') 

        try:
            self.queue = pickle.load(filehandler) 
        except:
            pass
        return True
Esempio n. 23
0
class test_pika(unittest.TestCase):

    def purge(self, names):
        chan = self.connection.channel()
        map(chan.queue_purge, names)

    def setUp(self):
        self.connection = BrokerConnection(transport="pika")
        try:
            self.connection.connect()
        except socket.error:
            self.connected = False
        else:
            self.connected = True
        self.exchange = Exchange("tamqplib", "direct")
        self.queue = Queue("tamqplib", self.exchange, "tamqplib")

    def test_produce__consume(self):
        if not self.connected:
            raise SkipTest("Broker not running.")
        chan1 = self.connection.channel()
        producer = Producer(chan1, self.exchange)

        producer.publish({"foo": "bar"}, routing_key="tamqplib")
        chan1.close()

        chan2 = self.connection.channel()
        consumer = Consumer(chan2, self.queue)
        message = consumeN(self.connection, consumer)
        self.assertDictEqual(message[0], {"foo": "bar"})
        chan2.close()
        self.purge(["tamqplib"])

    def test_produce__consume_multiple(self):
        if not self.connected:
            raise SkipTest("Broker not running.")
        chan1 = self.connection.channel()
        producer = Producer(chan1, self.exchange)
        b1 = Queue("pyamqplib.b1", self.exchange, "b1")
        b2 = Queue("pyamqplib.b2", self.exchange, "b2")
        b3 = Queue("pyamqplib.b3", self.exchange, "b3")

        producer.publish("b1", routing_key="b1")
        producer.publish("b2", routing_key="b2")
        producer.publish("b3", routing_key="b3")
        chan1.close()

        chan2 = self.connection.channel()
        consumer = Consumer(chan2, [b1, b2, b3])
        messages = consumeN(self.connection, consumer, 3)
        self.assertItemsEqual(messages, ["b1", "b2", "b3"])
        chan2.close()
        self.purge(["pyamqplib.b1", "pyamqplib.b2", "pyamqplib.b3"])

    def test_timeout(self):
        if not self.connected:
            raise SkipTest("Broker not running.")
        chan = self.connection.channel()
        self.purge([self.queue.name])
        consumer = Consumer(chan, self.queue)
        self.assertRaises(socket.timeout, self.connection.drain_events,
                timeout=0.3)
        consumer.cancel()

    def test_basic_get(self):
        chan1 = self.connection.channel()
        producer = Producer(chan1, self.exchange)
        producer.publish({"basic.get": "this"}, routing_key="basic_get")
        chan1.close()

        chan2 = self.connection.channel()
        queue = Queue("amqplib_basic_get", self.exchange, "basic_get")
        queue = queue(chan2)
        queue.declare()
        for i in range(50):
            m = queue.get()
            if m:
                break
            time.sleep(0.1)
        self.assertEqual(m.payload, {"basic.get": "this"})
        chan2.close()

    def tearDown(self):
        if self.connected:
            self.connection.close()
Esempio n. 24
0
class RabbitMQHandler(object):
    def __init__(self,
                 connection_string,
                 exchange_name,
                 exchange_type="topic"):
        self._connection = BrokerConnection(connection_string)
        self._connections = {self._connection
                             }  # set of connection for the heartbeat
        self._exchange = Exchange(exchange_name,
                                  durable=True,
                                  delivery_mode=2,
                                  type=exchange_type,
                                  auto_delete=False,
                                  no_declare=False)
        monitor_heartbeats(self._connections)

    @retry(wait_fixed=200, stop_max_attempt_number=3)
    def publish(self, item, contributor_id):
        with self._connection.channel() as channel:
            with Producer(channel) as producer:
                producer.publish(
                    item,
                    exchange=self._exchange,
                    routing_key=contributor_id,
                    declare=[self._exchange],
                    content_type="plain/text",
                )

    def info(self):
        info = self._connection.info()
        info.pop("password", None)
        return info

    def check_connection(self, force=False):
        """
        Trying to connect is the best way to check that the connection works
        if force is set to True, we will force the connection again
        """
        try:
            # we need to refresh the connection to be notified as soon as rabbitmq stopped working
            if force:
                self._connection._establish_connection()
            self._connection.ensure_connection(interval_start=0, max_retries=1)
            return True
        except Exception:
            return False

    def connect(self):
        self._connection.connect()

    def close(self):
        for c in self._connections:
            c.release()

    def listen_load_realtime(self, queue_name, max_retries=10):
        log = logging.getLogger(__name__)

        route = "task.load_realtime.*"
        log.info("listening route {} on exchange {}...".format(
            route, self._exchange))
        rt_queue = Queue(queue_name,
                         routing_key=route,
                         exchange=self._exchange,
                         durable=False)
        RTReloader(connection=self._connection,
                   rpc_queue=rt_queue,
                   exchange=self._exchange,
                   max_retries=max_retries).run()