Exemplo n.º 1
0
    def test_multiple_queues_with_same_exchange_and_routing_key(
            self, container_factory, entrypoint_tracker, rabbit_manager,
            exchange, wait_for_result, publish_message, counter, rabbit_config,
            backoff_count, fast_backoff):
        """ Message consumption backoff works when there are muliple queues
        receiving the published message
        """
        queue_one = Queue("one", exchange=exchange, routing_key="message")
        queue_two = Queue("two", exchange=exchange, routing_key="message")

        class ServiceOne(object):
            name = "service_one"

            @consume(queue_one)
            def method(self, payload):
                if counter["one"].increment() <= backoff_count:
                    raise Backoff()
                return "one"

        class ServiceTwo(object):
            name = "service_two"

            @consume(queue_two)
            def method(self, payload):
                counter["two"].increment()
                return "two"

        container_one = container_factory(ServiceOne, rabbit_config)
        container_one.start()
        container_two = container_factory(ServiceTwo, rabbit_config)
        container_two.start()

        with entrypoint_waiter(container_one,
                               'method',
                               callback=wait_for_result) as result_one:

            with entrypoint_waiter(container_two,
                                   'method',
                                   callback=wait_for_result) as result_two:

                publish_message(exchange, "msg", routing_key="message")

        # ensure all messages are processed
        vhost = rabbit_config['vhost']
        for delay in fast_backoff:
            backoff_queue = rabbit_manager.get_queue(
                vhost, get_backoff_queue_name(delay))
            assert backoff_queue['messages'] == 0

        service_queue_one = rabbit_manager.get_queue(vhost, queue_one.name)
        service_queue_two = rabbit_manager.get_queue(vhost, queue_two.name)
        assert service_queue_one['messages'] == 0
        assert service_queue_two['messages'] == 0

        assert result_one.get() == "one"
        assert result_two.get() == "two"

        # backoff from service_one not seen by service_two
        assert counter['one'] == backoff_count + 1
        assert counter['two'] == 1
Exemplo n.º 2
0
def get_queue_info(connection, queue):
    """Returns queue name, message count, consumer count
    """
    with connections[connection._pool_conn].acquire(block=True) as conn:
        q = Queue(queue.name, channel=conn, exchange=queue.exchange,
            durable=queue.durable, auto_delete=queue.auto_delete)
        # doesn't actually declare queue, just checks if it exists
        try:
            return q.queue_declare(passive=True)
        except Exception as e:
            # better way to check this?
            if "NOT_FOUND" in str(e):
                raise NotFoundError()
            raise
Exemplo n.º 3
0
    def init_rabbit_mq(self):
        """
        This function will attempt to connect to RabbitMQ Server and if successful
        return 'True'. Returns 'False' otherwise.
        """

        self.logger.info("Initializing RabbitMQ stuff")
        try:
            schedule_exchange = Exchange("airtime-media-monitor",
                                         "direct",
                                         durable=True,
                                         auto_delete=True)
            schedule_queue = Queue("media-monitor",
                                   exchange=schedule_exchange,
                                   key="filesystem")
            self.connection = BrokerConnection(
                self.config.cfg["rabbitmq_host"],
                self.config.cfg["rabbitmq_user"],
                self.config.cfg["rabbitmq_password"],
                self.config.cfg["rabbitmq_vhost"])
            channel = self.connection.channel()
            consumer = Consumer(channel, schedule_queue)
            consumer.register_callback(self.handle_message)
            consumer.consume()
        except Exception, e:
            self.logger.error(e)
            return False
Exemplo n.º 4
0
    def get_queues(self):
        """
            One queue for incomming messages, one queue for outgoing messages.
        """

        queues = {}

        queues['incoming_messages'] = Queue('incoming_messages',
                                            exchange=self.exchanges['psms'],
                                            routing_key="incoming_messages",
                                            durable=self.persistent)
        queues['outgoing_messages'] = Queue('outgoing_messages',
                                            exchange=self.exchanges['psms'],
                                            routing_key="outgoing_messages",
                                            durable=self.persistent)
        return queues
Exemplo n.º 5
0
class ConsumerService:
    """
    Microservice responsible for consume data notifications from Service4 and
    dispatching those data to the Client by saving those data to Redis database.

    Attributes:
        name (str): The microservice name.
        _publication (Exchange): Messagin exchange object.
        _queue  (Queue): Messaging publications queue to be consumed.
        _redis (Redis): Nameko Redis connector object.
    """

    name = 'consumer'
    _publication = Exchange('new_publication', type='direct')
    _queue = Queue('publication_queue', exchange=_publication)
    _redis = Redis('my_redis')

    @consume(_queue)
    def receive_new_publication(self, payload: str):
        """
        Responsible for consuming incoming data received from service4 by
        saving data to Redis Queue.

        Args:
            payload (str): Data to be consumed.
        """
        try:
            self._redis.rpush('publication_queue', payload)
        except Exception as e:
            print('Ooops!', e)
Exemplo n.º 6
0
class InceptionerService:
    """Service endpoint for Inceptioner"""
    name = "inceptioner_service"

    test_exchange = Exchange('nameko_test_exchange', type='direct')
    test_queue = Queue('nameko_test_queue', exchange=test_exchange)

    @http('GET', '/get/<int:value>')
    def get_method_for_test(self, request, value):
        return json.dumps({'value': value})


    @http('POST', '/recognize/base64')
    def do_post(self, request):
        logging.info('Received Request on recognition from base64')
        request_data = request.data
        logging.debug('Data Received: {}'.format(request_data))
        res = process_request(request.data)
        print(res)
        return str(res)


    @consume(test_queue)
    def handle_event(self, payload):
        logging.info('Received request on recognition on the queue')
        logging.debug('Data received: {}'.format(payload))

        res = process_request(payload)
        print(res)
        return res
Exemplo n.º 7
0
    def init_rabbit_mq(self):
        self.logger.info("Initializing RabbitMQ stuff")
        try:

            self.logger.info("rabbitmq_host: " + self.config["rabbitmq_host"])
            self.logger.info("rabbitmq_user: "******"rabbitmq_user"])
            self.logger.info("rabbitmq_password: "******"rabbitmq_password"])
            self.logger.info("rabbitmq_vhost: " +
                             self.config["rabbitmq_vhost"])
            """"""
            schedule_exchange = \
                    Exchange("airtime-pypo", "direct",
                        durable=True, auto_delete=True)
            schedule_queue = \
                    Queue("pypo-fetch", exchange=schedule_exchange, key="foo")
            connection = BrokerConnection(self.config["rabbitmq_host"], \
                    self.config["rabbitmq_user"], \
                    self.config["rabbitmq_password"], \
                    self.config["rabbitmq_vhost"])

            channel = connection.channel()
            self.simple_queue = SimpleQueue(channel, schedule_queue)
            """
            connection = Connection('amqp://*****:*****@172.16.82.1:5672//pypox')
            self.simple_queue = connection.SimpleQueue('pypo-fetch')
            #message = simple_queue.get(block=True, timeout=1)
            """

        except Exception, e:
            self.logger.error(e)
            return False
Exemplo n.º 8
0
    def init_rabbit_mq(self):
        try:
            self.logger.info("Initializing RabbitMQ message consumer...")
            schedule_exchange = Exchange("airtime-media-monitor",
                                         "direct",
                                         durable=True,
                                         auto_delete=True)
            schedule_queue = Queue("media-monitor",
                                   exchange=schedule_exchange,
                                   key="filesystem")
            self.connection = BrokerConnection(self.cfg["rabbitmq_host"],
                                               self.cfg["rabbitmq_user"],
                                               self.cfg["rabbitmq_password"],
                                               self.cfg["rabbitmq_vhost"])
            channel = self.connection.channel()

            self.simple_queue = SimpleQueue(channel, schedule_queue)

            self.logger.info("Initialized RabbitMQ consumer.")
        except Exception as e:
            self.logger.info("Failed to initialize RabbitMQ consumer")
            self.logger.error(e)
            return False

        return True
Exemplo n.º 9
0
class Consumer:
    name = 'worker_consumer'
    test = Exchange('test', type='direct')
    tq = Queue('q1', exchange=test)

    @consume(tq)
    def handle_consume(self, body):
        print("Received message: {0}".format(body))
Exemplo n.º 10
0
class Service:
    name = "service"

    def generate_message(self):
        return "Time is {}".format(arrow.utcnow())

    @rpc
    def method(self, timestamp):
        """ Return a message on or after `timestamp`.

        The method will be called repeatedly until `timestamp` has passed.
        """
        if arrow.get(timestamp) < arrow.utcnow():
            return self.generate_message()

        raise Backoff()

    @event_handler('src_service', 'event_type')
    def handle_event(self, event_data):
        """ Print a message on or after `event_data['timestamp']`

        The event will be redelivered repeatedly until `timestamp` has passed.
        """
        timestamp = event_data.get('timestamp')
        if arrow.get(timestamp) < arrow.utcnow():
            msg = self.generate_message()
            print(msg)
            return msg

        raise Backoff()

    @consume(Queue('messages'))
    def handle_message(self, payload):
        """ Print a message on or after `payload['timestamp']`

        The message will be redelivered repeatedly until `timestamp` has
        passed.
        """
        timestamp = payload.get('timestamp')
        if arrow.get(timestamp) < arrow.utcnow():
            msg = self.generate_message()
            print(msg)
            return msg

        raise Backoff()

    @rpc
    @entrypoint_retry(retry_for=ValueError)
    def decorated_method(self, timestamp):
        """ Return a message on or after `timestamp`.

        The method will be called repeatedly until `timestamp` has passed.
        """
        if arrow.get(timestamp) < arrow.utcnow():
            return self.generate_message()

        raise ValueError()
Exemplo n.º 11
0
    def test_declare(self, maybe_declare, publisher, get_message_from_queue,
                     routing_key, queue, exchange):
        declare = [
            Queue(name="q1", exchange=exchange, routing_key=routing_key),
            Queue(name="q2", exchange=exchange, routing_key=routing_key)
        ]

        publisher.publish("payload", declare=declare)

        assert maybe_declare.call_args_list == [
            call(exchange, ANY, ANY),
            call(queue, ANY, ANY),
            call(declare[0], ANY, ANY),
            call(declare[1], ANY, ANY)
        ]

        assert get_message_from_queue(queue.name).payload == "payload"
        assert get_message_from_queue(declare[0].name).payload == "payload"
        assert get_message_from_queue(declare[1].name).payload == "payload"
Exemplo n.º 12
0
 def get_consumers(self, Consumer, channel):
     return [
         Consumer(queues=[
             Queue(self.request_routing_key,
                   _amqp_exchange(),
                   routing_key=self.request_routing_key,
                   durable=False)
         ],
                  callbacks=[self.process_task])
     ]
Exemplo n.º 13
0
 def init_rabbit_mq(self):
     self.logger.info("Initializing RabbitMQ stuff")
     try:
         schedule_exchange = Exchange("airtime-pypo", "direct", durable=True, auto_delete=True)
         schedule_queue = Queue("pypo-fetch", exchange=schedule_exchange, key="foo")
         connection = BrokerConnection(config["rabbitmq_host"], config["rabbitmq_user"], config["rabbitmq_password"], config["rabbitmq_vhost"])
         channel = connection.channel()
         self.simple_queue = SimpleQueue(channel, schedule_queue)
     except Exception, e:
         self.logger.error(e)
         return False
Exemplo n.º 14
0
    def get_queues(self):
        """
            Return a dict with queues all worker should be able
            to use:

            - log queue to all the router to receive logs from external process
            - undelivered kombo message queues to handle orphan messages
        """
        queues = {}

        queues['logs'] = Queue('logs',
                               exchange=self.exchanges['psms'],
                               routing_key="logs",
                               durable=False)

        queues['undelivered_kombu_message'] = Queue(
            'ae.undeliver',
            exchange=self.exchanges['psms'],
            routing_key="ae.undeliver",
            durable=self.persistent)

        return queues
Exemplo n.º 15
0
 def make_queue(self, expiration):
     backoff_queue = Queue(
         name=get_backoff_queue_name(expiration),
         exchange=self.exchange,
         binding_arguments={
             'backoff': expiration,
             'x-match': 'any'
         },
         queue_arguments={
             'x-dead-letter-exchange': ""   # default exchange
         }
     )
     return backoff_queue
Exemplo n.º 16
0
def main():
    cfg = {
        'hostname': 'localhost',
        'userid': 'guest',
        'password': '******',
        'virtual_host': '/',
        'port': 5672
    }
    transport = 'pika'
    #transport = 'librabbitmq'
    connection = BrokerConnection(transport=transport, **cfg)
    connection.connect()

    cfg = {
        'name': 'simple-test-1',
        'auto_delete': True,
        'durable': False,
        'delivery_mode': 'transient'
    }
    channel = connection.channel()
    exchange = Exchange(channel=channel, **cfg)
    #exchange = exchange_def(channel)

    routing_key = 'simple-test-1-route'
    queue = Queue(exchange=exchange, routing_key=routing_key, **cfg)

    channel = connection.channel()
    producer = Producer(channel=channel,
                        exchange=exchange,
                        routing_key=routing_key)

    channel = connection.channel()
    consumer = Consumer(channel=channel, queues=[queue], callbacks=[receive])
    consumer.consume()

    def serve_forever():
        while True:
            #print 'drain'
            #gevent.sleep(0.0001)
            connection.drain_events(timeout=1)

    def publish_forever():
        while True:
            producer.publish(loremIpsum)
            gevent.sleep(0.0001)

    #g1, g2 = gevent.spawn(publish_forever), gevent.spawn(serve_forever)
    g2 = gevent.spawn(serve_forever)
    g1 = gevent.spawn(publish_forever)
    gevent.joinall([g1, g2])
Exemplo n.º 17
0
class Listener():
    """ Simple class to wrap the operations needed for an AMQP listener """

    def __init__(self, hostname="127.0.0.1", userid="guest", password="******",
                 virtual_host="/", port=5672):
        """ Setup a connection to the AMQP server, get a channel 
            Create a topic exchange, attach a bonded queue to it
            and register a consumer callback.
            
            A specific service listener implementation overrides the name 
            and routing_key
        """

        self.connection = BrokerConnection(hostname=hostname, 
                                           userid=userid, password=password, 
                                           virtual_host=virtual_host, port=port,
                                           insist=False, ssl=False)
        self.channel = self.connection.channel()
        self.exchange = Exchange(name=self.name, type="topic", durable=True,
                                 channel=self.channel)
        self.queue = Queue(self.name, exchange=self.exchange,
                           routing_key=self.routing_key)
        self.queue = self.queue(self.channel)
        self.queue.declare()
        self.queue.consume(consumer_tag="", callback=self.callback, no_ack=True)
        self.connection.connect()
        return

    def callback(self, msg):
        """ This callback is run when a message is recieved """
        return

    def consume(self):
        """ Event loop """
        while True:
            self.connection.drain_events()
        return
 def init_rabbit_mq(self):
     self.logger.info("Initializing RabbitMQ stuff")
     try:
         schedule_exchange = Exchange("airtime-pypo",
                                      "direct",
                                      durable=True,
                                      auto_delete=True)
         schedule_queue = Queue("pypo-fetch",
                                exchange=schedule_exchange,
                                key="foo")
         with Connection(self.config["host"], \
                         self.config["user"], \
                         self.config["password"], \
                         self.config["vhost"], \
                         heartbeat = 5) as connection:
             rabbit = RabbitConsumer(connection, [schedule_queue], self)
             rabbit.run()
     except Exception as e:
         self.logger.error(e)
Exemplo n.º 19
0
 def init_rabbit_mq(self):
     logger.info("Initializing RabbitMQ stuff")
     try:
         schedule_exchange = Exchange("airtime-pypo",
                                      "direct",
                                      durable=True,
                                      auto_delete=True)
         schedule_queue = Queue("pypo-fetch",
                                exchange=schedule_exchange,
                                key="foo")
         with Connection(
                 f"amqp://{self.config.user}:{self.config.password}"
                 f"@{self.config.host}:{self.config.port}"
                 f"/{self.config.vhost}",
                 heartbeat=5,
         ) as connection:
             rabbit = RabbitConsumer(connection, [schedule_queue], self)
             rabbit.run()
     except Exception as e:
         logger.error(e)
Exemplo n.º 20
0
 def init_rabbit_mq(self):
     self.logger.info("Initializing RabbitMQ stuff")
     try:
         schedule_exchange = Exchange("airtime-media-monitor",
                                      "direct",
                                      durable=True,
                                      auto_delete=True)
         schedule_queue = Queue("media-monitor",
                                exchange=schedule_exchange,
                                key="filesystem")
         self.connection = BrokerConnection(
             self.config.cfg["rabbitmq_host"],
             self.config.cfg["rabbitmq_user"],
             self.config.cfg["rabbitmq_password"], "/")
         channel = self.connection.channel()
         consumer = Consumer(channel, schedule_queue)
         consumer.register_callback(self.handle_message)
         consumer.consume()
     except Exception, e:
         self.logger.error(e)
         return False
Exemplo n.º 21
0
 def init_rabbit_mq(self):
     logger = logging.getLogger('fetch')
     logger.info("Initializing RabbitMQ stuff")
     try:
         schedule_exchange = Exchange("airtime-schedule",
                                      "direct",
                                      durable=True,
                                      auto_delete=True)
         schedule_queue = Queue("pypo-fetch",
                                exchange=schedule_exchange,
                                key="foo")
         self.connection = BrokerConnection(config["rabbitmq_host"],
                                            config["rabbitmq_user"],
                                            config["rabbitmq_password"],
                                            "/")
         channel = self.connection.channel()
         consumer = Consumer(channel, schedule_queue)
         consumer.register_callback(handle_message)
         consumer.consume()
     except Exception, e:
         logger.error(e)
         return False
Exemplo n.º 22
0
    def handle(self):
        log.debug("Start consuming")
        exchange = Exchange('amqp.topic', type='direct', durable=True)
        self._connection = BrokerConnection(*CONNECTION)
        channel = self._connection.channel()

        for entry in self.queues:
            log.debug("prepare to consume %s" % entry['routing_key'])
            queue = Queue(entry['name'],
                          exchange=exchange,
                          routing_key=entry['routing_key'])
            consumer = Consumer(channel, queue)
            consumer.register_callback(getattr(self, entry['handler']))
            consumer.consume()

        log.debug("start consuming...")
        while True:
            try:
                self._connection.drain_events()
            except socket.timeout:
                log.debug("nothing to consume...")
                break
        self._connection.close()
Exemplo n.º 23
0
    def deadlettering_exchange(self, rabbit_config, exchange, queue):
        conn = Connection(rabbit_config[AMQP_URI_CONFIG_KEY])

        with connections[conn].acquire(block=True) as connection:

            deadletter_exchange = Exchange(name="deadletter", type="topic")
            deadletter_exchange.maybe_bind(connection)
            deadletter_exchange.declare()

            deadletter_queue = Queue(
                name="deadletter",
                exchange=deadletter_exchange,
                routing_key="#",
                queue_arguments={'x-dead-letter-exchange': exchange.name})
            deadletter_queue.maybe_bind(connection)
            deadletter_queue.declare()

        return deadletter_exchange
Exemplo n.º 24
0
    def __init__(self, hostname="127.0.0.1", userid="guest", password="******",
                 virtual_host="/", port=5672):
        """ Setup a connection to the AMQP server, get a channel 
            Create a topic exchange, attach a bonded queue to it
            and register a consumer callback.
            
            A specific service listener implementation overrides the name 
            and routing_key
        """

        self.connection = BrokerConnection(hostname=hostname, 
                                           userid=userid, password=password, 
                                           virtual_host=virtual_host, port=port,
                                           insist=False, ssl=False)
        self.channel = self.connection.channel()
        self.exchange = Exchange(name=self.name, type="topic", durable=True,
                                 channel=self.channel)
        self.queue = Queue(self.name, exchange=self.exchange,
                           routing_key=self.routing_key)
        self.queue = self.queue(self.channel)
        self.queue.declare()
        self.queue.consume(consumer_tag="", callback=self.callback, no_ack=True)
        self.connection.connect()
        return
Exemplo n.º 25
0
def queue(exchange):
    return Queue("messages", exchange=exchange, routing_key="message")
Exemplo n.º 26
0
    def run(self):
        
        print 'pypo Pusher'
        if self.action == 'update_schedule':
            print 'update_schedule!!'
            credentials = pika.PlainCredentials(MQ_USER, MQ_PASS)
            connection = pika.BlockingConnection(pika.ConnectionParameters(MQ_HOST,
                                       5672,
                                       '/airtime',
                                       credentials))
            channel = connection.channel()
            channel.queue_declare(queue='pypo-fetch', durable=True)
            message = {
                       'schedule': {
                                    'media': {}
                                    },
                       'event_type': 'update_schedule'
                       }
            
            import json
            message = json.dumps(message)
            
            message = 'hallo'
            
            channel.basic_publish(exchange='airtime-pypo',
                      routing_key='pypo-fetch',
                      body=message)
            
            channel.close()
            connection.close()


            
            
        if self.action == 'update_schedule_kombu':
            print 'update_schedule!!'
            
            exchange = Exchange("airtime-pypo", "direct", durable=True, auto_delete=True)
            queue = Queue("pypo-fetch", exchange=exchange, key="foo", durable=True)
            
            connection = BrokerConnection(MQ_HOST, MQ_USER, MQ_PASS, MQ_VHOST)
            channel = connection.channel()
            
            simple_queue = SimpleQueue(channel, queue)
            
            
            
            
            
            message = {
                       'schedule': {
                                    'media': {}
                                    },
                       'event_type': 'update_schedule'
                       }
            
            
            print simple_queue.qsize()
            
            print 'do:'
            
            
            producer = Producer(channel, exchange=exchange, routing_key=None, serializer="json")
            
            
            
            producer.publish(message, routing_key='pypo-fetch')
            
            
            print simple_queue.qsize()
            channel.close()
Exemplo n.º 27
0
class NotifierService(object):
    name = 'notifier'
    misaki = web.Slack('misaki')
    error = ErrorHandler()

    @property
    def channel(self):
        return f'#{os.getenv("NOTIFICATION_CHANNEL", "notifications")}'

    @staticmethod
    def _format_notification(input_):
        keys = ('id', 'source', 'type', 'content')
        if not all(k in input_.keys() for k in keys):
            raise NotifierServiceError(
                'Some keys are missing in the input dict')
        blocks = [{
            'type': 'section',
            'text': {
                'type': 'mrkdwn',
                'text': f'*{input_["content"]}*',
            }
        }, {
            'type':
            'context',
            'elements': [{
                'type': 'mrkdwn',
                'text': f'{k}: {input_[k]}'
            } for k in input_ if k != 'content']
        }]
        return blocks

    @consume(queue=Queue(name='evt_all_notifications',
                         exchange=Exchange(name='all_notifications',
                                           type='topic',
                                           auto_delete=True)))
    def handle_all_notifications(self, payload):
        _log.info(f'Received {payload}')
        input_ = bson.json_util.loads(payload)
        self.misaki.api_call('chat.postMessage',
                             channel=self.channel,
                             blocks=self._format_notification(input_),
                             text=input_['content'])

    @rpc
    def send_to_slack(self, channel, msg, image_url=None, context=None):
        _log.info(f'Sending message {msg} to slack channel {channel} ...')
        slack_msg = [{
            'type': 'section',
            'text': {
                'type': 'mrkdwn',
                'text': f'*{msg}*'
            }
        }]
        if image_url:
            slack_msg.extend([{
                'type': 'section',
                'text': {
                    'type':
                    'mrkdwn',
                    'text':
                    f'Please find your image at the following <{image_url}|link>'
                }
            }, {
                'type': 'image',
                'image_url': image_url,
                'alt_text': 'Can not be displayed here'
            }])
        if context:
            slack_msg.append({
                'type': 'context',
                'elements': [{
                    'type': 'mrkdwn',
                    'text': context
                }]
            })
        self.misaki.api_call('chat.postMessage',
                             channel=channel,
                             blocks=slack_msg,
                             text=msg)

    @rtm.handle_message
    def handle_any_event(self, event, message):
        _log.info(event)
        _log.info(message)
Exemplo n.º 28
0
 def queue(self, amqp_uri, exchange, routing_key):
     queue = Queue(name="queue", exchange=exchange, routing_key=routing_key)
     return queue
Exemplo n.º 29
0
import sys
import cx_Oracle
from kombu.connection import BrokerConnection
from kombu.messaging import Exchange, Queue, Consumer
from datetime import datetime
from time import sleep
exchange = Exchange("amq.topic", "topic", durable=True)
q = Queue("oracle_job_inserter", exchange=exchange, key="log.job.#")
q.routing_key="log.job.#"
dsn = cx_Oracle.makedsn('host',1521,'workspace') 

def ts_literal(ts):
  return datetime.fromtimestamp(ts).isoformat().replace('T',' ')

def process_job(msg,body):
  while True:
    try:
        with cx_Oracle.Connection("user/password@"+dsn) as ora_con:
          cursor =  ora_con.cursor()
          print msg
          if not 'exit_status' in msg.keys():
            body.ack()
            break 
#          print dir(body)
          cluster = body.delivery_info["routing_key"].replace("log.job.","").upper()
          cursor.execute("""select queue_record_num from job_queue where QUEUE_NAME='%s' and "cluster"='%s'""" % (msg["queue"],cluster))
          queue_id = cursor.fetchall()
          if not queue_id:
            cursor.execute("""insert into job_queue (QUEUE_NAME,"cluster") VALUES ('%s','%s')""" % (msg["queue"],cluster))
            ora_con.commit()
            cursor.execute("""select queue_record_num from job_queue where QUEUE_NAME='%s' and "cluster"='%s'""" % (msg["queue"],cluster))
Exemplo n.º 30
0
class LoaderService(object):
    name = 'loader'

    metadata = RpcProxy('metadata')
    datastore = RpcProxy('datastore')
    referential = RpcProxy('referential')
    dispatch = EventDispatcher()
    error = ErrorHandler()

    @rpc
    def write(self, write_policy, meta, target_table, records, upsert_key=None, delete_keys=None, chunk_size=None):
        _log.info(
            f'Writing in {target_table} using {write_policy} strategy ...')
        if write_policy not in ('insert', 'upsert', 'bulk_insert', 'delete_insert', 'delete_bulk_insert',
                                'truncate_insert', 'truncate_bulk_insert'):
            _log.error(f'{write_policy} not supported')
            raise LoaderServiceError('Wrong value for parameter write_policy')

        if write_policy in ('bulk_insert', 'delete_bulk_insert', 'truncate_bulk_insert') and not chunk_size:
            _log.error('chunk_size missing')
            raise LoaderServiceError(
                'Bulk loading strategy requires a chunk size')

        try:
            meta = list(map(tuple, meta))
        except:
            _log.error('Bad formated meta')
            raise LoaderServiceError('Bad formated meta')

        if write_policy == 'insert':
            self.datastore.insert(
                target_table, bson.json_util.dumps(records), meta)
        elif write_policy == 'upsert':
            self.datastore.upsert(target_table, upsert_key,
                                  bson.json_util.dumps(records), meta)
        elif write_policy == 'bulk_insert':
            self.datastore.bulk_insert(
                target_table, bson.json_util.dumps(records), meta, chunk_size=chunk_size)
        elif write_policy == 'delete_insert':
            self.datastore.delete(target_table, delete_keys)
            self.datastore.insert(
                target_table, bson.json_util.dumps(records), meta)
        elif write_policy == 'delete_bulk_insert':
            self.datastore.delete(target_table, delete_keys)
            self.datastore.bulk_insert(
                target_table, bson.json_util.dumps(records), meta, chunk_size=chunk_size)
        elif write_policy == 'truncate_insert':
            self.datastore.truncate(target_table)
            self.datastore.insert(
                target_table, bson.json_util.dumps(records), meta)
        else:
            self.datastore.truncate(target_table)
            self.datastore.bulk_insert(
                target_table, bson.json_util.dumps(records), meta, chunk_size=chunk_size)

        _log.info('Datastore microservice wrote all the records !')
        return {'target_table': target_table, 'count': len(records)}

    def _compute_transformation(self, t, param_value=None):
        _log.info(f"Computing transformation {t['id']}")
        try:
            self.datastore.create_or_replace_python_function(
                t['function_name'], t['function'])
        except:
            _log.error(
                'Something went wrong while creating the underlying python function')
            raise LoaderServiceError(
                'An error occured while creating python function in transformation {}'.format(t['id']))

        if t['type'] == 'fit' and t['process_date'] is None:
            _log.info(
                'Transformation has been set as \"fit\" kind. This must be processed !')
            try:
                last_entry = bson.json_util.loads(
                    self.datareader.select(t['output']))
                if last_entry and len(last_entry) > 0:
                    _log.info('Deleting the previous result ...')
                    self.datastore.delete(t['target_table'], {
                                          'id': last_entry[0]['id']})
                _log.info('Computing current result ...')
                self.datastore.insert_from_select(
                    t['target_table'], t['output'], None)
            except:
                _log.error(
                    'Something went wrong while deleting and inserting the result')
                raise LoaderServiceError(
                    'An error occured while fitting transformation {}'.format(t['id']))
            _log.info('Updating process date in metadata ...')
            self.metadata.update_process_date(t['id'])
        elif t['type'] in ('transform', 'predict',) and t['materialized'] is True:
            _log.info(
                'Transformation has been set as materialized \"transform\" or \"predict\" kind. This must be processed !')
            if t['parameters'] is None:
                _log.info('No parameters truncating the table ...')
                self.datastore.truncate(t['target_table'])
                _log.info('Inserting the result ...')
                self.datastore.insert_from_select(
                    t['target_table'], t['output'], None)
            else:
                if len(t['parameters']) > 1:
                    raise LoaderServiceError(
                        'Does not support transformation with multiple parameters')
                param_name = t['parameters'][0]
                if param_value is None:
                    raise LoaderServiceError(
                        'Transformation requires a parameter')
                _log.info(
                    'We will delete the previous result according to the provided parameter')
                self.datastore.delete(t['target_table'], {
                    param_name: param_value})
                _log.info('Inserting the result ...')
                self.datastore.insert_from_select(
                    t['target_table'], t['output'], [param_value])
            _log.info('Updating process date in metadata ...')
            self.metadata.update_process_date(t['id'])

    def update_transformations(self, trigger_table, param_value=None):
        _log.info(f'Updating transformation related to {trigger_table} ...')
        meta = self.metadata.get_update_pipeline(trigger_table)
        if not meta:
            _log.info('Nothing to do ...')
            return {'trigger_table': trigger_table}
        pipeline = bson.json_util.loads(meta)
        for job in pipeline:
            for t in job['transformations']:
                self._compute_transformation(t, param_value)
        return {'trigger_table': trigger_table}

    def apply_transformation(self, transformation_id, param_value=None):
        result = self.metadata.get_transformation(transformation_id)

        transformation = bson.json_util.loads(result)
        self._compute_transformation(transformation, param_value)

        return {'id': transformation_id}

    def update_entry_ngrams(self, entry_id):
        return self.referential.update_entry_ngrams(entry_id)

    def add_entity(self, data):
        self.referential.add_entity(**data)
        return {'id': data['id']}

    def add_event(self, data):
        data = self.referential.add_event(**data)
        return {'id': data['id']}

    def add_informations_to_entity(self, data):
        data = self.referential.add_informations_to_entity(data['id'], data)
        return {'id': data['id']}

    @consume(queue=Queue(name='evt_all_inputs',
                         exchange=Exchange(name='all_inputs', type='topic', auto_delete=True)))
    def handle_all_inputs(self, payload):
        input_ = bson.json_util.loads(payload)
        _log.info(f'Handling input {input_["id"]}')
        if input_['status'] == 'UNCHANGED':
            _log.info('Received an unchanged input ... ignoring !')
            return
        ref = input_['referential']
        if ref.get('entities', None):
            _log.info('Handling entities ...')
            for e in ref['entities']:
                ent = self.add_entity(e)
                self.update_entry_ngrams(ent['id'])
        if ref.get('events', None):
            _log.info('Handling events ...')
            for e in ref['events']:
                evt = self.add_event(e)
                self.update_entry_ngrams(evt['id'])
        if ref.get('informations', None):
            _log.info('Handling informations ...')
            for e in ref['informations']:
                self.add_informations_to_entity(e)
        datastore = input_['datastore']
        for d in datastore:
            res = self.write(**d)
            d_keys = d.get('delete_keys', None)
            param_value = list(d_keys.values())[0] if d_keys else None
            self.update_transformations(
                res['target_table'], param_value=param_value)
        ack = bson.json_util.dumps({
            'id': input_['id'],
            'checksum': input_.get('checksum', None),
            'meta': input_.get('meta', None)})
        self.dispatch('input_loaded', ack)