Esempio n. 1
0
    def init_rabbit_mq(self):
        try:
            self.logger.info("Initializing RabbitMQ message consumer...")
            schedule_exchange = Exchange("airtime-media-monitor",
                                         "direct",
                                         durable=True,
                                         auto_delete=True)
            schedule_queue = Queue("media-monitor",
                                   exchange=schedule_exchange,
                                   key="filesystem")
            self.connection = BrokerConnection(self.cfg["rabbitmq_host"],
                                               self.cfg["rabbitmq_user"],
                                               self.cfg["rabbitmq_password"],
                                               self.cfg["rabbitmq_vhost"])
            channel = self.connection.channel()

            self.simple_queue = SimpleQueue(channel, schedule_queue)

            self.logger.info("Initialized RabbitMQ consumer.")
        except Exception as e:
            self.logger.info("Failed to initialize RabbitMQ consumer")
            self.logger.error(e)
            return False

        return True
Esempio n. 2
0
    def init_rabbit_mq(self):
        self.logger.info("Initializing RabbitMQ stuff")
        try:

            self.logger.info("rabbitmq_host: " + self.config["rabbitmq_host"])
            self.logger.info("rabbitmq_user: "******"rabbitmq_user"])
            self.logger.info("rabbitmq_password: "******"rabbitmq_password"])
            self.logger.info("rabbitmq_vhost: " +
                             self.config["rabbitmq_vhost"])
            """"""
            schedule_exchange = \
                    Exchange("airtime-pypo", "direct",
                        durable=True, auto_delete=True)
            schedule_queue = \
                    Queue("pypo-fetch", exchange=schedule_exchange, key="foo")
            connection = BrokerConnection(self.config["rabbitmq_host"], \
                    self.config["rabbitmq_user"], \
                    self.config["rabbitmq_password"], \
                    self.config["rabbitmq_vhost"])

            channel = connection.channel()
            self.simple_queue = SimpleQueue(channel, schedule_queue)
            """
            connection = Connection('amqp://*****:*****@172.16.82.1:5672//pypox')
            self.simple_queue = connection.SimpleQueue('pypo-fetch')
            #message = simple_queue.get(block=True, timeout=1)
            """

        except Exception, e:
            self.logger.error(e)
            return False
Esempio n. 3
0
class InceptionerService:
    """Service endpoint for Inceptioner"""
    name = "inceptioner_service"

    test_exchange = Exchange('nameko_test_exchange', type='direct')
    test_queue = Queue('nameko_test_queue', exchange=test_exchange)

    @http('GET', '/get/<int:value>')
    def get_method_for_test(self, request, value):
        return json.dumps({'value': value})


    @http('POST', '/recognize/base64')
    def do_post(self, request):
        logging.info('Received Request on recognition from base64')
        request_data = request.data
        logging.debug('Data Received: {}'.format(request_data))
        res = process_request(request.data)
        print(res)
        return str(res)


    @consume(test_queue)
    def handle_event(self, payload):
        logging.info('Received request on recognition on the queue')
        logging.debug('Data received: {}'.format(payload))

        res = process_request(payload)
        print(res)
        return res
Esempio n. 4
0
    def __init__(self, hostname="127.0.0.1", userid="guest", password="******",
                 virtual_host="/", port=5672, name="", routing_key=""):
        """ Setup a connection to the AMQP server, get a channel 
            and create an exchange.
            
            A specific service listener implementation overrides the name 
            and routing_key
        """
        if name == "":
            raise Exception("Name must be non-empty string")
        self.name = name
        self.routing_key = routing_key

        if routing_key == "":
            exchange_type = "fanout"
        elif "*" in routing_key or "#" in routing_key:
            exchange_type = "topic"
        else :
            exchange_type = "direct"

        self.connection = BrokerConnection(hostname=hostname, 
                                           userid=userid, password=password, 
                                           virtual_host=virtual_host, port=443,
                                           insist=False, ssl=False)
        self.channel = self.connection.channel()
        self.exchange = Exchange(name=self.name, type=exchange_type, durable=False,
                                 channel=self.channel)
        self.connection.connect()
        return
Esempio n. 5
0
class ConsumerService:
    """
    Microservice responsible for consume data notifications from Service4 and
    dispatching those data to the Client by saving those data to Redis database.

    Attributes:
        name (str): The microservice name.
        _publication (Exchange): Messagin exchange object.
        _queue  (Queue): Messaging publications queue to be consumed.
        _redis (Redis): Nameko Redis connector object.
    """

    name = 'consumer'
    _publication = Exchange('new_publication', type='direct')
    _queue = Queue('publication_queue', exchange=_publication)
    _redis = Redis('my_redis')

    @consume(_queue)
    def receive_new_publication(self, payload: str):
        """
        Responsible for consuming incoming data received from service4 by
        saving data to Redis Queue.

        Args:
            payload (str): Data to be consumed.
        """
        try:
            self._redis.rpush('publication_queue', payload)
        except Exception as e:
            print('Ooops!', e)
Esempio n. 6
0
    def init_rabbit_mq(self):
        """
        This function will attempt to connect to RabbitMQ Server and if successful
        return 'True'. Returns 'False' otherwise.
        """

        self.logger.info("Initializing RabbitMQ stuff")
        try:
            schedule_exchange = Exchange("airtime-media-monitor",
                                         "direct",
                                         durable=True,
                                         auto_delete=True)
            schedule_queue = Queue("media-monitor",
                                   exchange=schedule_exchange,
                                   key="filesystem")
            self.connection = BrokerConnection(
                self.config.cfg["rabbitmq_host"],
                self.config.cfg["rabbitmq_user"],
                self.config.cfg["rabbitmq_password"],
                self.config.cfg["rabbitmq_vhost"])
            channel = self.connection.channel()
            consumer = Consumer(channel, schedule_queue)
            consumer.register_callback(self.handle_message)
            consumer.consume()
        except Exception, e:
            self.logger.error(e)
            return False
Esempio n. 7
0
class Consumer:
    name = 'worker_consumer'
    test = Exchange('test', type='direct')
    tq = Queue('q1', exchange=test)

    @consume(tq)
    def handle_consume(self, body):
        print("Received message: {0}".format(body))
Esempio n. 8
0
 def setup_rabbit_mq_channel(self):
     service_exchange = Exchange(self.acord_control_exchange, "topic", durable=False)
     # connections/channels
     connection = BrokerConnection(self.rabbit_host, self.rabbit_user, self.rabbit_password)
     logging.info("Connection to RabbitMQ server successful")
     channel = connection.channel()
     # produce
     self.producer = Producer(channel, exchange=service_exchange, routing_key='notifications.info')
     self.publish = connection.ensure(self.producer, self.producer.publish, errback=self.errback, max_retries=3)
Esempio n. 9
0
    def get_exchanges(self):
        """
            Define one exchange only for all messages and log. Routing
            will be done only at the routing key level.
        """

        # todo: use topic routing ?
        # http://packages.python.org/kombu/reference/kombu.entity.html?#kombu.entity.Exchange.type

        return {'psms': Exchange("psms", "direct", durable=self.persistent)}
Esempio n. 10
0
class WorkerSubscriber:
    name = 'worker_subscriber'

    test = Exchange('test', type='direct')
    publish = Publisher(exchange=test)

    @event_handler("api", "say_hello")
    def handle_event(self, payload):
        print("{0} said hello!".format(payload))
        self.publish("Goodbye {0}".format(payload))
Esempio n. 11
0
 def init_rabbit_mq(self):
     self.logger.info("Initializing RabbitMQ stuff")
     try:
         schedule_exchange = Exchange("airtime-pypo", "direct", durable=True, auto_delete=True)
         schedule_queue = Queue("pypo-fetch", exchange=schedule_exchange, key="foo")
         connection = BrokerConnection(config["rabbitmq_host"], config["rabbitmq_user"], config["rabbitmq_password"], config["rabbitmq_vhost"])
         channel = connection.channel()
         self.simple_queue = SimpleQueue(channel, schedule_queue)
     except Exception, e:
         self.logger.error(e)
         return False
Esempio n. 12
0
 def setup_rabbit_mq_channel(self):
     service_exchange = Exchange(cfg.CONF.udpservice.acord_control_exchange, "topic", durable=False)
     rabbit_host = cfg.CONF.udpservice.rabbit_hosts
     rabbit_user = cfg.CONF.udpservice.rabbit_userid 
     rabbit_password = cfg.CONF.udpservice.rabbit_password
     # connections/channels
     connection = BrokerConnection(rabbit_host, rabbit_user, rabbit_password)
     print 'Connection to RabbitMQ server successful'
     channel = connection.channel()
     # produce
     self.producer = Producer(channel, exchange=service_exchange, routing_key='notifications.info')
Esempio n. 13
0
class Sender():
    """ Simple class to wrap the operations needed for an AMQP listener """

    def __init__(self, hostname="127.0.0.1", userid="guest", password="******",
                 virtual_host="/", port=5672, name="", routing_key=""):
        """ Setup a connection to the AMQP server, get a channel 
            and create an exchange.
            
            A specific service listener implementation overrides the name 
            and routing_key
        """
        if name == "":
            raise Exception("Name must be non-empty string")
        self.name = name
        self.routing_key = routing_key

        if routing_key == "":
            exchange_type = "fanout"
        elif "*" in routing_key or "#" in routing_key:
            exchange_type = "topic"
        else :
            exchange_type = "direct"

        self.connection = BrokerConnection(hostname=hostname, 
                                           userid=userid, password=password, 
                                           virtual_host=virtual_host, port=443,
                                           insist=False, ssl=False)
        self.channel = self.connection.channel()
        self.exchange = Exchange(name=self.name, type=exchange_type, durable=False,
                                 channel=self.channel)
        self.connection.connect()
        return

    def send(self, msg):
        """ Publishes a message to the AMQP server
            on the initialized exchange
            msg is a string, usually a JSON dump
        """
        self.exchange.publish(self.exchange.Message(msg), routing_key=self.routing_key)
        return
Esempio n. 14
0
def setup_rabbit_mq_channel():
     global producer
     global rabbit_user, rabbit_password, rabbit_host, vcpeservice_rabbit_exchange,cpe_publisher_id
     vcpeservice_exchange = Exchange(vcpeservice_rabbit_exchange, "topic", durable=False)
     # connections/channels
     connection = BrokerConnection(rabbit_host, rabbit_user, rabbit_password)
     logger.info('Connection to RabbitMQ server successful')
     channel = connection.channel()
     # produce
     producer = Producer(channel, exchange=vcpeservice_exchange, routing_key='notifications.info')
     p = subprocess.Popen('hostname', shell=True, stdout=subprocess.PIPE)
     (hostname, error) = p.communicate()
     cpe_publisher_id = cpe_publisher_id + '_on_' + hostname
     logger.info('cpe_publisher_id=%s',cpe_publisher_id)
Esempio n. 15
0
def main():
    cfg = {
        'hostname': 'localhost',
        'userid': 'guest',
        'password': '******',
        'virtual_host': '/',
        'port': 5672
    }
    transport = 'pika'
    #transport = 'librabbitmq'
    connection = BrokerConnection(transport=transport, **cfg)
    connection.connect()

    cfg = {
        'name': 'simple-test-1',
        'auto_delete': True,
        'durable': False,
        'delivery_mode': 'transient'
    }
    channel = connection.channel()
    exchange = Exchange(channel=channel, **cfg)
    #exchange = exchange_def(channel)

    routing_key = 'simple-test-1-route'
    queue = Queue(exchange=exchange, routing_key=routing_key, **cfg)

    channel = connection.channel()
    producer = Producer(channel=channel,
                        exchange=exchange,
                        routing_key=routing_key)

    channel = connection.channel()
    consumer = Consumer(channel=channel, queues=[queue], callbacks=[receive])
    consumer.consume()

    def serve_forever():
        while True:
            #print 'drain'
            #gevent.sleep(0.0001)
            connection.drain_events(timeout=1)

    def publish_forever():
        while True:
            producer.publish(loremIpsum)
            gevent.sleep(0.0001)

    #g1, g2 = gevent.spawn(publish_forever), gevent.spawn(serve_forever)
    g2 = gevent.spawn(serve_forever)
    g1 = gevent.spawn(publish_forever)
    gevent.joinall([g1, g2])
Esempio n. 16
0
class Service4:
    """
    Microservice responsible for receiving data notifications from Service3 and
    dispatching those data to the Client.

    Attributes:
        name (str): The microservice name.
        _redis (Redis): Nameko Redis connector object.
        _publication (Exchange): Messagin exchange object.
        _publish  (Publisher): Messaging publisher object.
    """

    name = 'service4'
    _redis = Redis('my_redis')
    _publication = Exchange('new_publication', type='direct')
    _publish = Publisher(exchange=_publication)

    @event_handler('service3', 'number_published')
    def receive_publication(self, payload: str):
        """
        Event handler function receiver published number from service3

        Args:
            payload (str): A new number published according service3 rules
        """
        self.dispatch_publication(payload)

    @rpc
    def dispatch_publication(self, payload: str):
        """
        Notify an event with the passed payload

        :param payload: A published number to be notify to the client
        """
        self._publish(payload)

    @rpc
    def get_history(self) -> List[str]:
        """
        Get the last 100 publications from Redis Database

        Returns:
            List[str]: Last publications
        """
        if self._redis.llen('published_numbers') > 100:
            history = self._redis.lrange('published_numbers', -100, -1)
        else:
            history = self._redis.lrange('published_numbers', 0, -1)
        return history
Esempio n. 17
0
 def setup_rabbit_mq_channel(self):
     ceilometer_exchange = Exchange(self.rabbit_exchange,
                                    "topic",
                                    durable=False)
     # connections/channels
     connection = BrokerConnection(self.rabbit_host, self.rabbit_user,
                                   self.rabbit_password)
     LOG.info(
         "BroadViewPublisher: Connection to RabbitMQ server successful")
     channel = connection.channel()
     # produce
     self._producer = Producer(channel,
                               exchange=ceilometer_exchange,
                               routing_key='notifications.info')
     self._publish = connection.ensure(self._producer,
                                       self._producer.publish,
                                       errback=self.errback,
                                       max_retries=3)
 def init_rabbit_mq(self):
     self.logger.info("Initializing RabbitMQ stuff")
     try:
         schedule_exchange = Exchange("airtime-pypo",
                                      "direct",
                                      durable=True,
                                      auto_delete=True)
         schedule_queue = Queue("pypo-fetch",
                                exchange=schedule_exchange,
                                key="foo")
         with Connection(self.config["host"], \
                         self.config["user"], \
                         self.config["password"], \
                         self.config["vhost"], \
                         heartbeat = 5) as connection:
             rabbit = RabbitConsumer(connection, [schedule_queue], self)
             rabbit.run()
     except Exception as e:
         self.logger.error(e)
Esempio n. 19
0
 def init_rabbit_mq(self):
     logger.info("Initializing RabbitMQ stuff")
     try:
         schedule_exchange = Exchange("airtime-pypo",
                                      "direct",
                                      durable=True,
                                      auto_delete=True)
         schedule_queue = Queue("pypo-fetch",
                                exchange=schedule_exchange,
                                key="foo")
         with Connection(
                 f"amqp://{self.config.user}:{self.config.password}"
                 f"@{self.config.host}:{self.config.port}"
                 f"/{self.config.vhost}",
                 heartbeat=5,
         ) as connection:
             rabbit = RabbitConsumer(connection, [schedule_queue], self)
             rabbit.run()
     except Exception as e:
         logger.error(e)
Esempio n. 20
0
 def init_rabbit_mq(self):
     self.logger.info("Initializing RabbitMQ stuff")
     try:
         schedule_exchange = Exchange("airtime-media-monitor",
                                      "direct",
                                      durable=True,
                                      auto_delete=True)
         schedule_queue = Queue("media-monitor",
                                exchange=schedule_exchange,
                                key="filesystem")
         self.connection = BrokerConnection(
             self.config.cfg["rabbitmq_host"],
             self.config.cfg["rabbitmq_user"],
             self.config.cfg["rabbitmq_password"], "/")
         channel = self.connection.channel()
         consumer = Consumer(channel, schedule_queue)
         consumer.register_callback(self.handle_message)
         consumer.consume()
     except Exception, e:
         self.logger.error(e)
         return False
Esempio n. 21
0
 def init_rabbit_mq(self):
     logger = logging.getLogger('fetch')
     logger.info("Initializing RabbitMQ stuff")
     try:
         schedule_exchange = Exchange("airtime-schedule",
                                      "direct",
                                      durable=True,
                                      auto_delete=True)
         schedule_queue = Queue("pypo-fetch",
                                exchange=schedule_exchange,
                                key="foo")
         self.connection = BrokerConnection(config["rabbitmq_host"],
                                            config["rabbitmq_user"],
                                            config["rabbitmq_password"],
                                            "/")
         channel = self.connection.channel()
         consumer = Consumer(channel, schedule_queue)
         consumer.register_callback(handle_message)
         consumer.consume()
     except Exception, e:
         logger.error(e)
         return False
Esempio n. 22
0
    def handle(self):
        log.debug("Start consuming")
        exchange = Exchange('amqp.topic', type='direct', durable=True)
        self._connection = BrokerConnection(*CONNECTION)
        channel = self._connection.channel()

        for entry in self.queues:
            log.debug("prepare to consume %s" % entry['routing_key'])
            queue = Queue(entry['name'],
                          exchange=exchange,
                          routing_key=entry['routing_key'])
            consumer = Consumer(channel, queue)
            consumer.register_callback(getattr(self, entry['handler']))
            consumer.consume()

        log.debug("start consuming...")
        while True:
            try:
                self._connection.drain_events()
            except socket.timeout:
                log.debug("nothing to consume...")
                break
        self._connection.close()
Esempio n. 23
0
    def deadlettering_exchange(self, rabbit_config, exchange, queue):
        conn = Connection(rabbit_config[AMQP_URI_CONFIG_KEY])

        with connections[conn].acquire(block=True) as connection:

            deadletter_exchange = Exchange(name="deadletter", type="topic")
            deadletter_exchange.maybe_bind(connection)
            deadletter_exchange.declare()

            deadletter_queue = Queue(
                name="deadletter",
                exchange=deadletter_exchange,
                routing_key="#",
                queue_arguments={'x-dead-letter-exchange': exchange.name})
            deadletter_queue.maybe_bind(connection)
            deadletter_queue.declare()

        return deadletter_exchange
def _amqp_exchange():
    return Exchange("rpc",
                    type="topic",
                    delivery_mode=TRANSIENT_DELIVERY_MODE,
                    durable=False)
Esempio n. 25
0
class NotifierService(object):
    name = 'notifier'
    misaki = web.Slack('misaki')
    error = ErrorHandler()

    @property
    def channel(self):
        return f'#{os.getenv("NOTIFICATION_CHANNEL", "notifications")}'

    @staticmethod
    def _format_notification(input_):
        keys = ('id', 'source', 'type', 'content')
        if not all(k in input_.keys() for k in keys):
            raise NotifierServiceError(
                'Some keys are missing in the input dict')
        blocks = [{
            'type': 'section',
            'text': {
                'type': 'mrkdwn',
                'text': f'*{input_["content"]}*',
            }
        }, {
            'type':
            'context',
            'elements': [{
                'type': 'mrkdwn',
                'text': f'{k}: {input_[k]}'
            } for k in input_ if k != 'content']
        }]
        return blocks

    @consume(queue=Queue(name='evt_all_notifications',
                         exchange=Exchange(name='all_notifications',
                                           type='topic',
                                           auto_delete=True)))
    def handle_all_notifications(self, payload):
        _log.info(f'Received {payload}')
        input_ = bson.json_util.loads(payload)
        self.misaki.api_call('chat.postMessage',
                             channel=self.channel,
                             blocks=self._format_notification(input_),
                             text=input_['content'])

    @rpc
    def send_to_slack(self, channel, msg, image_url=None, context=None):
        _log.info(f'Sending message {msg} to slack channel {channel} ...')
        slack_msg = [{
            'type': 'section',
            'text': {
                'type': 'mrkdwn',
                'text': f'*{msg}*'
            }
        }]
        if image_url:
            slack_msg.extend([{
                'type': 'section',
                'text': {
                    'type':
                    'mrkdwn',
                    'text':
                    f'Please find your image at the following <{image_url}|link>'
                }
            }, {
                'type': 'image',
                'image_url': image_url,
                'alt_text': 'Can not be displayed here'
            }])
        if context:
            slack_msg.append({
                'type': 'context',
                'elements': [{
                    'type': 'mrkdwn',
                    'text': context
                }]
            })
        self.misaki.api_call('chat.postMessage',
                             channel=channel,
                             blocks=slack_msg,
                             text=msg)

    @rtm.handle_message
    def handle_any_event(self, event, message):
        _log.info(event)
        _log.info(message)
Esempio n. 26
0
class ElectionCollectorService(object):
    name = 'election_collector'
    database = MongoDatabase(result_backend=False)
    election = Election()
    error = ErrorHandler()
    pub_input = Publisher(exchange=Exchange(name='all_inputs',
                                            type='topic',
                                            durable=True,
                                            auto_delete=True,
                                            delivery_mode=PERSISTENT))
    pub_notif = Publisher(exchange=Exchange(name='all_notifications',
                                            type='topic',
                                            durable=True,
                                            auto_delete=True,
                                            delivery_mode=PERSISTENT))

    def add_election(self, election_id):
        self.database['elections'].update_one({'id': election_id},
                                              {'$set': {
                                                  'id': election_id
                                              }},
                                              upsert=True)

    @staticmethod
    def handle_missing_number(doc, key):
        if key not in doc:
            return None
        d = doc[key]
        if 'Nombre' in doc[key]:
            d = doc[key]['Nombre']
        try:
            return int(d)
        except ValueError:
            return None

    @staticmethod
    def to_boolean(doc, key):
        if key not in doc:
            return None
        if doc[key] == 'O':
            return True
        return False

    @staticmethod
    def extract_scrutin(doc):
        _log.info('Handling scrutin informations ...')
        return {
            'scrutin_type': doc['Type'],
            'scrutin_annee': int(doc['Annee'])
        }

    @staticmethod
    def extract_commune(doc):
        _log.info('Handling commune informations ...')
        return {
            'commune_code': doc['CodSubCom'],
            'commune_lib': doc['LibSubCom'],
            'circonscription_code': doc.get('CodCirLg', None),
            'circonscription_lib': doc.get('LibFraSubCom', None),
            'mode_scrutin': doc.get('ModeScrutin', None)
        }

    @staticmethod
    def extract_tour(doc):
        _log.info('Handling tour informations ...')
        return {'num_tour': int(doc['NumTour'])}

    @staticmethod
    def extract_mention(doc):
        _log.info('Handling mention informations ...')
        return {
            'inscrits':
            int(doc['Inscrits']['Nombre']),
            'abstentions':
            int(doc['Abstentions']['Nombre']),
            'votants':
            int(doc['Votants']['Nombre']),
            'blancs':
            ElectionCollectorService.handle_missing_number(doc, 'Blancs'),
            'nuls':
            ElectionCollectorService.handle_missing_number(doc, 'Nuls'),
            'blancs_nuls':
            ElectionCollectorService.handle_missing_number(
                doc, 'BlancsOuNuls'),
            'exprimes':
            int(doc['Exprimes']['Nombre'])
        }

    @staticmethod
    def extract_resultats(doc):
        _log.info('Handling resultats informations ...')
        return {
            'nb_sap':
            ElectionCollectorService.handle_missing_number(doc, 'NbSap'),
            'nb_sp':
            ElectionCollectorService.handle_missing_number(
                doc, 'NbSiePourvus')
        }

    @staticmethod
    def extract_departement(doc):
        _log.info('Handling departement information ...')
        return {
            'departement_code': doc['CodDpt'],
            'departement_lib': doc['LibDpt']
        }

    @staticmethod
    def extract_nuance(doc):
        _log.info('Handling nuance information ...')
        return {
            'nuance_code':
            doc['CodNua'],
            'nuance_lib':
            doc['LibNua'],
            'nb_voix':
            int(doc['NbVoix']),
            'nuance_nb_siege':
            ElectionCollectorService.handle_missing_number(doc, 'NbSieges')
        }

    @staticmethod
    def extract_candidat(doc):
        _log.info('Handling candidat information ...')
        return {
            'candidat_numero':
            ElectionCollectorService.handle_missing_number(
                doc, 'NumPanneauCand'),
            'candidat_nom':
            doc['NomPsn'],
            'candidat_prenom':
            doc['PrenomPsn'],
            'candidat_civilite':
            doc['CivilitePsn'],
            'candidat_ordre':
            ElectionCollectorService.handle_missing_number(
                doc, 'NumeroOrdCand'),
            'candidat_elu':
            ElectionCollectorService.to_boolean(doc, 'Elu'),
            'nuance_code':
            doc.get('CodNua', None),
            'nuance_lib':
            doc.get('LibNua', None),
            'nb_voix':
            int(doc['NbVoix'])
        }

    @staticmethod
    def extract_liste(doc):
        _log.info('Handling liste information ...')
        return {
            'liste_code':
            doc['CodSeqLisCand'],
            'liste_lib':
            doc['NomListe'],
            'liste_tete_nom':
            doc.get('NomTeteListe', None),
            'liste_tete_prenom':
            doc.get('PrenomTeteListe', None),
            'liste_tete_civilite':
            doc.get('CiviliteTeteListe', None),
            'liste_nb_elus':
            ElectionCollectorService.handle_missing_number(doc, 'NbSieges'),
            'nb_voix':
            int(doc['NbVoix'])
        }

    @staticmethod
    def complete_records(records, extract_func, rec_type, prev):
        def create_record(r):
            c = extract_func(r)
            c.update(prev)
            c['type'] = rec_type
            for m in [meta[0] for meta in META]:
                if m not in c:
                    c[m] = None
            return c

        return [create_record(r) for r in records]

    @staticmethod
    def build_records(doc, er):
        _log.info(f'Building data from {er.url}')
        election = doc['Election']
        root = ElectionCollectorService.extract_scrutin(election['Scrutin'])
        root['election_id'] = er.election_id
        root['feed_id'] = er.feed_id

        def ensure_list(d):
            if isinstance(d, list):
                return d
            return [d]

        def handle_tour(t, prev):
            t_lvl = ElectionCollectorService.extract_tour(t)
            t_lvl.update(prev)
            t_lvl.update(
                ElectionCollectorService.extract_mention(t['Mentions']))

            res = t['Resultats']
            t_lvl.update(ElectionCollectorService.extract_resultats(res))
            if 'Nuances' in res:
                return ElectionCollectorService.complete_records(
                    res['Nuances']['Nuance'],
                    ElectionCollectorService.extract_nuance, 'N', t_lvl)
            elif 'Listes' in res:
                return ElectionCollectorService.complete_records(
                    res['Listes']['Liste'],
                    ElectionCollectorService.extract_liste, 'L', t_lvl)
            elif 'Candidats' in res:
                return ElectionCollectorService.complete_records(
                    res['Candidats']['Candidat'],
                    ElectionCollectorService.extract_candidat, 'C', t_lvl)
            else:
                raise ElectionCollectorError(
                    'Cannot find neither Nuances, Listes nor Candidats under Resultats'
                )

        def handle_commune(c, prev):
            c_lvl = ElectionCollectorService.extract_commune(c)
            c_lvl.update(prev)
            return list(
                itertools.chain.from_iterable([
                    handle_tour(t, c_lvl)
                    for t in ensure_list(c['Tours']['Tour'])
                ]))

        if 'Departement' in election:
            root.update(
                ElectionCollectorService.extract_departement(
                    election['Departement']))
            return list(
                itertools.chain.from_iterable([
                    handle_commune(c, root) for c in ensure_list(
                        election['Departement']['Communes']['Commune'])
                ]))
        return list(
            itertools.chain.from_iterable([
                handle_tour(t, root)
                for t in ensure_list(election['Tours']['Tour'])
            ]))

    def update_checksum(self, id_, checksum):
        self.database['elections'].update_one({'id': id_},
                                              {'$set': {
                                                  'checksum': checksum
                                              }})

    @rpc
    def publish(self, election_id):
        _log.info(f'Publishing election {election_id} ...')
        for r in self.election.results(election_id):
            _log.info(f'Getting {r.url} ...')
            doc = r.call()
            try:
                records = ElectionCollectorService.build_records(doc, r)
            except ElectionCollectorError as e:
                _log.error(f'Error on {r.url}: {str(e)}')
                continue
            data = {
                'referential': {},
                'datastore': [{
                    'write_policy': 'delete_bulk_insert',
                    'meta': META,
                    'target_table': 'french_election',
                    'delete_keys': {
                        'feed_id': r.feed_id
                    },
                    'records': records,
                    'chunk_size': 100
                }],
                'id':
                r.feed_id,
                'status':
                'CREATED',
                'checksum':
                None,
                'meta': {
                    'type': 'election',
                    'source': 'interieur',
                    'content_id': r.feed_id
                }
            }
            self.pub_input(bson.json_util.dumps(data))

    @event_handler('loader',
                   'input_loaded',
                   handler_type=BROADCAST,
                   reliable_delivery=False)
    def ack(self, payload):
        msg = bson.json_util.loads(payload)
        meta = msg.get('meta', None)
        if not meta:
            return
        if 'type' not in meta or 'source' not in meta or meta[
                'source'] != 'interieur':
            return

        self.pub_notif(
            bson.json_util.dumps({
                'id': msg['id'],
                'source': meta['source'],
                'type': meta['type'],
                'content': 'French election loaded!'
            }))

    @staticmethod
    def is_meta_valid(msg):
        if 'meta' not in msg:
            return False
        if 'type' not in msg['meta'] or 'source' not in msg['meta']:
            return False
        if msg['meta']['type'] != 'election' or msg['meta'][
                'source'] != 'interieur':
            return False
        if 'config' not in msg:
            _log.warning('Missing config within the message. Ignoring ...')
            return False
        if 'election' not in msg['config']:
            _log.error('Missing mandatory field: election')
            return False
        return True

    @event_handler('api_service',
                   'input_config',
                   handler_type=BROADCAST,
                   reliable_delivery=False)
    def handle_input_config(self, payload):
        msg = bson.json_util.loads(payload)

        if not ElectionCollectorService.is_meta_valid(msg):
            return

        election_id = msg['config']['election']
        _log.info('Received a related input config ...')
        self.add_election(election_id)
        self.publish(election_id)
Esempio n. 27
0
    def run(self):
        
        print 'pypo Pusher'
        if self.action == 'update_schedule':
            print 'update_schedule!!'
            credentials = pika.PlainCredentials(MQ_USER, MQ_PASS)
            connection = pika.BlockingConnection(pika.ConnectionParameters(MQ_HOST,
                                       5672,
                                       '/airtime',
                                       credentials))
            channel = connection.channel()
            channel.queue_declare(queue='pypo-fetch', durable=True)
            message = {
                       'schedule': {
                                    'media': {}
                                    },
                       'event_type': 'update_schedule'
                       }
            
            import json
            message = json.dumps(message)
            
            message = 'hallo'
            
            channel.basic_publish(exchange='airtime-pypo',
                      routing_key='pypo-fetch',
                      body=message)
            
            channel.close()
            connection.close()


            
            
        if self.action == 'update_schedule_kombu':
            print 'update_schedule!!'
            
            exchange = Exchange("airtime-pypo", "direct", durable=True, auto_delete=True)
            queue = Queue("pypo-fetch", exchange=exchange, key="foo", durable=True)
            
            connection = BrokerConnection(MQ_HOST, MQ_USER, MQ_PASS, MQ_VHOST)
            channel = connection.channel()
            
            simple_queue = SimpleQueue(channel, queue)
            
            
            
            
            
            message = {
                       'schedule': {
                                    'media': {}
                                    },
                       'event_type': 'update_schedule'
                       }
            
            
            print simple_queue.qsize()
            
            print 'do:'
            
            
            producer = Producer(channel, exchange=exchange, routing_key=None, serializer="json")
            
            
            
            producer.publish(message, routing_key='pypo-fetch')
            
            
            print simple_queue.qsize()
            channel.close()
Esempio n. 28
0
class LoaderService(object):
    name = 'loader'

    metadata = RpcProxy('metadata')
    datastore = RpcProxy('datastore')
    referential = RpcProxy('referential')
    dispatch = EventDispatcher()
    error = ErrorHandler()

    @rpc
    def write(self, write_policy, meta, target_table, records, upsert_key=None, delete_keys=None, chunk_size=None):
        _log.info(
            f'Writing in {target_table} using {write_policy} strategy ...')
        if write_policy not in ('insert', 'upsert', 'bulk_insert', 'delete_insert', 'delete_bulk_insert',
                                'truncate_insert', 'truncate_bulk_insert'):
            _log.error(f'{write_policy} not supported')
            raise LoaderServiceError('Wrong value for parameter write_policy')

        if write_policy in ('bulk_insert', 'delete_bulk_insert', 'truncate_bulk_insert') and not chunk_size:
            _log.error('chunk_size missing')
            raise LoaderServiceError(
                'Bulk loading strategy requires a chunk size')

        try:
            meta = list(map(tuple, meta))
        except:
            _log.error('Bad formated meta')
            raise LoaderServiceError('Bad formated meta')

        if write_policy == 'insert':
            self.datastore.insert(
                target_table, bson.json_util.dumps(records), meta)
        elif write_policy == 'upsert':
            self.datastore.upsert(target_table, upsert_key,
                                  bson.json_util.dumps(records), meta)
        elif write_policy == 'bulk_insert':
            self.datastore.bulk_insert(
                target_table, bson.json_util.dumps(records), meta, chunk_size=chunk_size)
        elif write_policy == 'delete_insert':
            self.datastore.delete(target_table, delete_keys)
            self.datastore.insert(
                target_table, bson.json_util.dumps(records), meta)
        elif write_policy == 'delete_bulk_insert':
            self.datastore.delete(target_table, delete_keys)
            self.datastore.bulk_insert(
                target_table, bson.json_util.dumps(records), meta, chunk_size=chunk_size)
        elif write_policy == 'truncate_insert':
            self.datastore.truncate(target_table)
            self.datastore.insert(
                target_table, bson.json_util.dumps(records), meta)
        else:
            self.datastore.truncate(target_table)
            self.datastore.bulk_insert(
                target_table, bson.json_util.dumps(records), meta, chunk_size=chunk_size)

        _log.info('Datastore microservice wrote all the records !')
        return {'target_table': target_table, 'count': len(records)}

    def _compute_transformation(self, t, param_value=None):
        _log.info(f"Computing transformation {t['id']}")
        try:
            self.datastore.create_or_replace_python_function(
                t['function_name'], t['function'])
        except:
            _log.error(
                'Something went wrong while creating the underlying python function')
            raise LoaderServiceError(
                'An error occured while creating python function in transformation {}'.format(t['id']))

        if t['type'] == 'fit' and t['process_date'] is None:
            _log.info(
                'Transformation has been set as \"fit\" kind. This must be processed !')
            try:
                last_entry = bson.json_util.loads(
                    self.datareader.select(t['output']))
                if last_entry and len(last_entry) > 0:
                    _log.info('Deleting the previous result ...')
                    self.datastore.delete(t['target_table'], {
                                          'id': last_entry[0]['id']})
                _log.info('Computing current result ...')
                self.datastore.insert_from_select(
                    t['target_table'], t['output'], None)
            except:
                _log.error(
                    'Something went wrong while deleting and inserting the result')
                raise LoaderServiceError(
                    'An error occured while fitting transformation {}'.format(t['id']))
            _log.info('Updating process date in metadata ...')
            self.metadata.update_process_date(t['id'])
        elif t['type'] in ('transform', 'predict',) and t['materialized'] is True:
            _log.info(
                'Transformation has been set as materialized \"transform\" or \"predict\" kind. This must be processed !')
            if t['parameters'] is None:
                _log.info('No parameters truncating the table ...')
                self.datastore.truncate(t['target_table'])
                _log.info('Inserting the result ...')
                self.datastore.insert_from_select(
                    t['target_table'], t['output'], None)
            else:
                if len(t['parameters']) > 1:
                    raise LoaderServiceError(
                        'Does not support transformation with multiple parameters')
                param_name = t['parameters'][0]
                if param_value is None:
                    raise LoaderServiceError(
                        'Transformation requires a parameter')
                _log.info(
                    'We will delete the previous result according to the provided parameter')
                self.datastore.delete(t['target_table'], {
                    param_name: param_value})
                _log.info('Inserting the result ...')
                self.datastore.insert_from_select(
                    t['target_table'], t['output'], [param_value])
            _log.info('Updating process date in metadata ...')
            self.metadata.update_process_date(t['id'])

    def update_transformations(self, trigger_table, param_value=None):
        _log.info(f'Updating transformation related to {trigger_table} ...')
        meta = self.metadata.get_update_pipeline(trigger_table)
        if not meta:
            _log.info('Nothing to do ...')
            return {'trigger_table': trigger_table}
        pipeline = bson.json_util.loads(meta)
        for job in pipeline:
            for t in job['transformations']:
                self._compute_transformation(t, param_value)
        return {'trigger_table': trigger_table}

    def apply_transformation(self, transformation_id, param_value=None):
        result = self.metadata.get_transformation(transformation_id)

        transformation = bson.json_util.loads(result)
        self._compute_transformation(transformation, param_value)

        return {'id': transformation_id}

    def update_entry_ngrams(self, entry_id):
        return self.referential.update_entry_ngrams(entry_id)

    def add_entity(self, data):
        self.referential.add_entity(**data)
        return {'id': data['id']}

    def add_event(self, data):
        data = self.referential.add_event(**data)
        return {'id': data['id']}

    def add_informations_to_entity(self, data):
        data = self.referential.add_informations_to_entity(data['id'], data)
        return {'id': data['id']}

    @consume(queue=Queue(name='evt_all_inputs',
                         exchange=Exchange(name='all_inputs', type='topic', auto_delete=True)))
    def handle_all_inputs(self, payload):
        input_ = bson.json_util.loads(payload)
        _log.info(f'Handling input {input_["id"]}')
        if input_['status'] == 'UNCHANGED':
            _log.info('Received an unchanged input ... ignoring !')
            return
        ref = input_['referential']
        if ref.get('entities', None):
            _log.info('Handling entities ...')
            for e in ref['entities']:
                ent = self.add_entity(e)
                self.update_entry_ngrams(ent['id'])
        if ref.get('events', None):
            _log.info('Handling events ...')
            for e in ref['events']:
                evt = self.add_event(e)
                self.update_entry_ngrams(evt['id'])
        if ref.get('informations', None):
            _log.info('Handling informations ...')
            for e in ref['informations']:
                self.add_informations_to_entity(e)
        datastore = input_['datastore']
        for d in datastore:
            res = self.write(**d)
            d_keys = d.get('delete_keys', None)
            param_value = list(d_keys.values())[0] if d_keys else None
            self.update_transformations(
                res['target_table'], param_value=param_value)
        ack = bson.json_util.dumps({
            'id': input_['id'],
            'checksum': input_.get('checksum', None),
            'meta': input_.get('meta', None)})
        self.dispatch('input_loaded', ack)
Esempio n. 29
0
class SDMXCollectorService(object):
    name = 'sdmx_collector'
    database = MongoDatabase(result_backend=False)
    sdmx = SDMX()
    error = ErrorHandler()
    pub_input = Publisher(exchange=Exchange(name='all_inputs',
                                            type='topic',
                                            durable=True,
                                            auto_delete=True,
                                            delivery_mode=PERSISTENT))
    pub_notif = Publisher(exchange=Exchange(name='all_notifications',
                                            type='topic',
                                            durable=True,
                                            auto_delete=True,
                                            delivery_mode=PERSISTENT))

    def add_dataflow(self, root_url, agency_id, resource_id, version, kind,
                     keys):
        try:
            self.sdmx.initialize(root_url, agency_id, resource_id, version,
                                 kind, keys)
        except Exception as e:
            raise SDMXCollectorError(str(e))
        self.database['dataset'].create_index([('agency', pymongo.ASCENDING),
                                               ('resource', pymongo.ASCENDING)
                                               ])
        self.database['dataset'].create_index('id')
        _id = SDMXCollectorService.table_name(agency_id, resource_id)
        doc = {
            'agency': agency_id,
            'resource': resource_id,
            'id': _id,
            'version': version,
            'kind': kind,
            'keys': keys or {},
            'root_url': root_url
        }
        self.database['dataset'].update_one(
            {
                'agency': agency_id,
                'resource': resource_id
            }, {'$set': doc},
            upsert=True)
        return _id

    def get_dataflows(self):
        return self.database['dataset'].find({})

    @staticmethod
    def clean(l):
        return re.sub(r'[^0-9a-zA-Z_]+', '_', l)

    @staticmethod
    def table_name(provider, dataflow):
        return f'{SDMXCollectorService.clean(provider.lower())}_{SDMXCollectorService.clean(dataflow.lower())}'

    @staticmethod
    def to_table_meta(meta, provider, dataflow):
        table_name = SDMXCollectorService.table_name(provider, dataflow)

        codelist = meta['codelist']

        def handle_dim_att(d, is_dim=True):
            name, code = d
            cl = [c for c in codelist if c[0] == code]
            if not cl:
                return (SDMXCollectorService.clean(name), 'TEXT')
            if is_dim:
                size = functools.reduce(
                    lambda x, y: len(y[1]) if len(y[1]) > x else x, cl, 1)
                return (SDMXCollectorService.clean(name), f'VARCHAR({size})')
            size = functools.reduce(lambda x, y: x + len(y[1]), cl, 0)
            return (SDMXCollectorService.clean(name), f'VARCHAR({size})')

        table_meta = [handle_dim_att(d) for d in meta['dimensions'] if d[1]]
        table_meta = table_meta + [
            handle_dim_att(d, is_dim=False) for d in meta['attributes'] if d[1]
        ]
        table_meta.append((SDMXCollectorService.clean(meta['time_dimension']),
                           'VARCHAR(20)'))
        table_meta.append(
            (SDMXCollectorService.clean(meta['primary_measure']), 'FLOAT'))
        table_meta.append(('query', 'VARCHAR(250)'))

        return {
            'write_policy': 'delete_bulk_insert',
            'meta': table_meta,
            'target_table': table_name,
            'chunk_size': 500,
            'delete_keys': {
                'query': meta['query']
            }
        }

    @staticmethod
    def codelist_table_meta(agency):
        return {
            'write_policy':
            'upsert',
            'meta': [('code_id', 'VARCHAR(250)'), ('id', 'VARCHAR(250)'),
                     ('code_name', 'TEXT'), ('name', 'TEXT'),
                     ('ref', 'VARCHAR(32)')],
            'target_table':
            f'{SDMXCollectorService.clean(agency).lower()}_codelist',
            'upsert_key':
            'ref'
        }

    @staticmethod
    def checksum(data):
        return hashlib.md5(''.join([str(r) for r in data
                                    ]).encode('utf-8')).hexdigest()

    def get_status(self, provider, dataflow, checksum):
        old = self.database['dataset'].find_one({
            'agency': provider,
            'resource': dataflow
        })
        if not old or 'checksum' not in old:
            return 'CREATED'
        if old['checksum'] == checksum:
            return 'UNCHANGED'
        return 'UPDATED'

    @staticmethod
    def dataflow_to_entity(df):
        return {
            'id': df['id'],
            'common_name': df['name'],
            'provider': df['structure']['agency_id'],
            'type': 'dataflow',
            'informations': df
        }

    def get_dataset(self, root_url, agency, resource, version, kind, keys):
        self.sdmx.initialize(root_url, agency, resource, version, kind, keys)
        meta = {
            'name': self.sdmx.name(),
            'codelist': self.sdmx.codelist(),
            'dimensions': self.sdmx.dimensions(),
            'attributes': self.sdmx.attributes(),
            'primary_measure': self.sdmx.primary_measure(),
            'time_dimension': self.sdmx.time_dimension(),
            'query': self.sdmx.query()
        }
        table_meta = SDMXCollectorService.to_table_meta(meta, agency, resource)

        def handle_number(m, v):
            if m.lower() in ('float', 'double'):
                try:
                    d = float(v)
                    if math.isnan(d):
                        return None
                    return d
                except:
                    return None
            return v

        data = [{
            k[0]: handle_number(
                k[1],
                r.get(k[0], None) if k[0] != 'query' else meta['query'])
            for k in table_meta['meta']
        } for r in self.sdmx.data()]

        codelist_meta = SDMXCollectorService.codelist_table_meta(agency)

        def hash_row(row):
            h = hashlib.md5(str(row).encode('utf-8'))
            return h.hexdigest()

        codelist = [
            dict(
                zip([m[0] for m in codelist_meta['meta']],
                    r + (hash_row(r[0:2]), ))) for r in meta['codelist']
        ]

        checksum = SDMXCollectorService.checksum(data)
        return {
            'referential': {
                'entities': [{
                    'id': table_meta['target_table'],
                    'common_name': meta['name'],
                    'provider': 'internal',
                    'type': 'dataset',
                    'informations': {
                        'id': table_meta['target_table'],
                        'name': meta['name'],
                        'table': table_meta['target_table']
                    }
                }, *[
                    SDMXCollectorService.dataflow_to_entity(d)
                    for d in self.sdmx.agency_dataflows
                ]]
            },
            'datastore': [{
                **table_meta, 'records': data
            }, {
                **codelist_meta, 'records': codelist
            }],
            'checksum':
            checksum,
            'id':
            table_meta['target_table'],
            'status':
            self.get_status(agency, resource, checksum),
            'meta': {
                'type': SDMXCollectorService.clean(agency).lower(),
                'source': 'sdmx'
            }
        }

    def update_checksum(self, id_, checksum):
        self.database['dataset'].update_one({'id': id_},
                                            {'$set': {
                                                'checksum': checksum
                                            }})

    @timer(interval=24 * 60 * 60)
    @rpc
    def publish(self):
        for f in self.get_dataflows():
            agency = f['agency']
            resource = f['resource']
            root_url = f['root_url']
            version = f['version']
            kind = f['kind']
            keys = f['keys']
            _log.info(
                f'Downloading dataset {resource} provided by {agency} ...')
            try:
                dataset = self.get_dataset(root_url, agency, resource, version,
                                           kind, keys)
            except Exception as e:
                _log.error(
                    f'Can not handle dataset {resource} provided by {agency}: {str(e)}'
                )
                continue
            _log.info('Publishing ...')
            self.pub_input(bson.json_util.dumps(dataset))

    @event_handler('loader',
                   'input_loaded',
                   handler_type=BROADCAST,
                   reliable_delivery=False)
    def ack(self, payload):
        msg = bson.json_util.loads(payload)
        meta = msg.get('meta', None)
        if not meta:
            return
        checksum = msg.get('checksum', None)
        if not checksum:
            return
        if 'source' not in meta or meta['source'] != 'sdmx':
            return
        t = meta['type']

        _log.info(f'Acknowledging {t} file: {msg["id"]}')
        self.update_checksum(msg['id'], checksum)

        _log.info(f'Publishing notification for {msg["id"]}')
        self.pub_notif(
            bson.json_util.dumps({
                'id': msg['id'],
                'source': 'sdmx',
                'type': t,
                'content': msg["id"]
            }))

    @event_handler('api_service',
                   'input_config',
                   handler_type=BROADCAST,
                   reliable_delivery=False)
    def handle_input_config(self, payload):
        msg = bson.json_util.loads(payload)

        if 'meta' not in msg or 'source' not in msg[
                'meta'] or msg['meta']['source'] != 'sdmx':
            return

        _log.info('Received a related input config ...')
        if 'config' not in msg:
            _log.warning('No config within the message. Ignoring ...')
            return

        config = msg['config']

        if 'agency' not in config or 'resource' not in config or\
            'version' not in config or 'kind' not in config or 'keys' not in config\
                or 'root_url' not in config:
            _log.error(
                'Missing at least one of these mandatory fields: root_url, agency, resource, version, kind or keys'
            )
            return

        id_ = self.add_dataflow(config['root_url'], config['agency'],
                                config['resource'], config['version'],
                                config['kind'], config['keys'])

        self.pub_notif(
            bson.json_util.dumps({
                'id': id_,
                'source': msg['meta']['source'],
                'type': '',
                'content': 'A new SDMX feed has been added.'
            }))
        self.publish()
Esempio n. 30
0
 def exchange(self, amqp_uri):
     exchange = Exchange(name="exchange")
     return exchange
Esempio n. 31
0
 def exchange(self):
     backoff_exchange = Exchange(
         type="headers",
         name="backoff"
     )
     return backoff_exchange
Esempio n. 32
0
def exchange():
    return Exchange("messages")