class TransmettreCertificatTransactionTest:
    def __init__(self):
        self.contexte = ContexteRessourcesMilleGrilles()
        self.contexte.initialiser(init_message=True)

        self._logger = logging.getLogger('test')
        self.generateur_transaction = GenerateurTransaction(
            self.contexte.configuration, self.contexte.message_dao)

        self.enveloppe_certificat = None

    def charger_certificat(self, fichier):

        verificateur = VerificateurCertificats(self.contexte)
        self.enveloppe_certificat = verificateur.charger_certificat(fichier)
        verificateur.verifier_chaine(self.enveloppe_certificat)

    def transmettre(self):
        certificat = self.enveloppe_certificat.certificat
        self._logger.debug("Certificat a transmettre: %s" % str(certificat))
        certificat_pem = str(
            certificat.public_bytes(serialization.Encoding.PEM), 'utf-8')
        self._logger.debug("Certificat string: %s" % certificat_pem)

        message = {
            'certificat_pem': certificat_pem,
            'fingerprint': self.enveloppe_certificat.fingerprint_ascii
        }

        domaine = '%s.%s' % (ConstantesPki.DOMAINE_NOM,
                             ConstantesPki.TRANSACTION_EVENEMENT_CERTIFICAT)
        self.generateur_transaction.soumettre_transaction(message, domaine)
Exemple #2
0
class CeduleurMessageTest:
    def __init__(self):
        self.contexte = ContexteRessourcesMilleGrilles()
        self.contexte.initialiser(init_message=True)

    def deconnecter(self):
        self.contexte.message_dao.deconnecter()

    def transmettre_evenement_ceduleur(self):

        timestamp_utc = datetime.datetime.now(tz=pytz.UTC)
        ts_dict = {
            'UTC': timestamp_utc.timetuple(),
            'joursemaine': timestamp_utc.weekday()
        }

        # Faire la liste des timezones a inclure. La routing key va utiliser la version courte de la timezone.
        timezones = [
            pytz.UTC,
        ]

        indicateurs = ['heure']
        nom_timezones = []
        for tz in timezones:
            local_tz_name = str(tz)
            nom_timezones.append(local_tz_name)

        ts_dict['timezones'] = nom_timezones

        self.contexte.message_dao.transmettre_evenement_ceduleur(
            ts_dict, indicateurs)
Exemple #3
0
    def __init__(self, connecter=True):
        contexte = ContexteRessourcesMilleGrilles()
        contexte.initialiser(connecter=connecter)

        super().__init__(contexte)
        self.contexte.message_dao.register_channel_listener(self)
        self.generateur = GenerateurTransaction(self.contexte)
        self.channel = None
        self.event_recu = Event()
        self.messages = list()
Exemple #4
0
    def __init__(self):
        contexte = ContexteRessourcesMilleGrilles()

        print("contexte.initialiser()")
        contexte.initialiser()
        print("ioloop MQ")
        self.thread_ioloop = Thread(name="MQ-ioloop", target=contexte.message_dao.run_ioloop)
        self.thread_ioloop.start()

        print("super.init")
        super().__init__(contexte, intervalle_secs=5)
    def __init__(self):
        contexte = ContexteRessourcesMilleGrilles()
        print("contexte.initialiser()")
        contexte.initialiser(init_document=False)

        self.document_ids = ['514951f2f43211e99259b827eb53ee51']

        super().__init__(contexte,
                         senseur_ids=self.document_ids,
                         timezone_horloge='America/Toronto',
                         intervalle_secs=5)
class BaseMongo:

    def __init__(self):
        self._contexte = ContexteRessourcesMilleGrilles()
        self._contexte.initialiser(init_message=True)
        self.document_dao = self._contexte.document_dao

    def deconnecter(self):
        self.document_dao.deconnecter()

    @property
    def contexte(self):
        return self._contexte
Exemple #7
0
    def __init__(self):
        contexte = ContexteRessourcesMilleGrilles()

        self.__logger = logging.getLogger(__name__ + '.' + self.__class__.__name__)

        self.__logger.info("contexte.initialiser()")
        contexte.initialiser()

        self.__logger.info("ioloop MQ")
        self.thread_ioloop = Thread(name="MQ-ioloop", target=contexte.message_dao.run_ioloop)
        self.thread_ioloop.start()

        self.__logger.info("super.init")
        super().__init__(contexte, intervalle_secs=5)
Exemple #8
0
class HachageTest:
    def __init__(self):
        self.contexte = ContexteRessourcesMilleGrilles()
        self.contexte.initialiser()
        self._signateur = SignateurTransaction(self.contexte.configuration)

        self._logger = logging.getLogger("HachageTest")
        self._logger.setLevel(logging.DEBUG)

    def hacher_fichier(self, path):
        with open(path) as f:
            dict_message = json.loads(f.read())
        hash = self._signateur.hacher_contenu(dict_message)
        self._logger.info("Hachage: %s" % hash)
class GenererRapportsFenetresSenseurs:

    def __init__(self):
        self._logger = logging.getLogger('%s' % self.__class__.__name__)
        self._logger.setLevel(logging.INFO)
        self._contexte = ContexteRessourcesMilleGrilles()
        self._contexte.initialiser(init_document=True, init_message=True)

        self._producteur_doc_senseurspassifs = ProducteurDocumentSenseurPassif(self._contexte.document_dao)

    def calculer_fenetre_horaire(self):
        self._producteur_doc_senseurspassifs.generer_fenetre_horaire()

    def calculer_fenetre_derniereheure(self):
        self._producteur_doc_senseurspassifs.ajouter_derniereheure_fenetre_horaire()

    def calculer_fenetre_quotidienne(self):
        self._producteur_doc_senseurspassifs.generer_fenetre_quotidienne()

    def calculer_fenetre_dernierjour(self):
        self._producteur_doc_senseurspassifs.ajouter_dernierjour_fenetre_quotidienne()
Exemple #10
0
    def __init__(self):
        contexte = ContexteRessourcesMilleGrilles()
        contexte.initialiser(init_document=False)
        super().__init__(contexte)

        self.__thread_ioloop = Thread(
            name="MQ-ioloop", target=self.contexte.message_dao.run_ioloop)
        self.__thread_ioloop.start()
        self.generateur = self.contexte.generateur_transactions
        self.pret = Event()
        self.recu = Event()

        # Enregistrer la reply-to queue
        print("Attente du channel")
        self.contexte.message_dao.attendre_channel(5)

        self.message_dao.inscrire_topic(self.configuration.exchange_noeuds, [],
                                        self.set_cb_queue)
        # self.channel = self.message_dao.channel
        # self.channel.queue_declare(durable=True, exclusive=True, callback=self.set_cb_queue)
        self.queue_name = None
        self.pret.wait(5)
Exemple #11
0
# Script de test pour transmettre message de transaction

import datetime
import time
import json

from millegrilles.dao.Configuration import ContexteRessourcesMilleGrilles
from millegrilles.dao.MessageDAO import BaseCallback
from millegrilles.transaction.GenerateurTransaction import GenerateurTransaction
from millegrilles import Constantes
from millegrilles.Constantes import ConstantesDomaines, ConstantesBackup
from millegrilles.domaines.Principale import ConstantesPrincipale
from threading import Thread, Event

contexte = ContexteRessourcesMilleGrilles()
contexte.initialiser()


class MessagesSample(BaseCallback):
    def __init__(self):
        super().__init__(contexte)
        self.contexte.message_dao.register_channel_listener(self)
        self.generateur = GenerateurTransaction(self.contexte)

        self.channel = None
        self.event_recu = Event()

    def on_channel_open(self, channel):
        # Enregistrer la reply-to queue
        self.channel = channel
        channel.queue_declare(durable=True,
Exemple #12
0
class ConnexionPrincipal:
    """
    Connexion au noeud protege principal
    """

    # def __init__(self, client_docker: docker.DockerClient, service_monitor: ServiceMonitorDependant):
    def __init__(self, client_docker: docker.DockerClient, service_monitor):
        self.__docker = client_docker
        self.__service_monitor = service_monitor

        self.__contexte: ContexteRessourcesMilleGrilles = cast(ContexteRessourcesMilleGrilles, None)
        self.__traitement_messages_principal: TraitementMessagesConnexionPrincipale = cast(TraitementMessagesConnexionPrincipale, None)
        self.__transfert_messages_principal: TransfertMessages = cast(TransfertMessages, None)

    def connecter(self):
        gestionnaire_docker = self.__service_monitor.gestionnaire_docker
        config_connexion_docker = gestionnaire_docker.charger_config_recente('millegrille.connexion')['config']
        config_connexion = json.loads(b64decode(config_connexion_docker.attrs['Spec']['Data']))
        # clecert_monitor = self.__service_monitor.clc

        gestionnaire_certificats = self.__service_monitor.gestionnaire_certificats
        certificats = gestionnaire_certificats.certificats
        path_secrets = gestionnaire_certificats.secret_path
        ca_certs_file = certificats['pki.millegrille.cert']
        monitor_cert_file = certificats['pki.%s.cert' % ConstantesGenerateurCertificat.ROLE_MONITOR_DEPENDANT]
        monitor_key_file = path.join(path_secrets, ConstantesServiceMonitor.DOCKER_CONFIG_MONITOR_DEPENDANT_KEY + '.pem')

        node_name = config_connexion['principal_mq_url']

        additionnals = [{
            'MG_MQ_HOST': node_name,
            'MG_MQ_PORT': 5673,
            'MG_MQ_CA_CERTS': ca_certs_file,
            'MG_MQ_CERTFILE': monitor_cert_file,
            'MG_MQ_KEYFILE': monitor_key_file,
            'MG_MQ_SSL': 'on',
            'MG_MQ_AUTH_CERT': 'on',
        }]

        configuration = TransactionConfiguration()
        self.__contexte = ContexteRessourcesMilleGrilles(configuration=configuration, additionals=additionnals)

        # Connecter a MQ du noeud principal
        self.__contexte.initialiser(init_message=True, connecter=True)

        self.__traitement_messages_principal = TraitementMessagesConnexionPrincipale(self.__service_monitor, self.__contexte)
        self.__contexte.message_dao.register_channel_listener(self.__traitement_messages_principal)

    def initialiser_relai_messages(self, fonction_relai):
        self.__transfert_messages_principal = TransfertMessages(
            self.__contexte, fonction_relai, self.__service_monitor.nodename)
        self.__contexte.message_dao.register_channel_listener(self.__transfert_messages_principal)

    def relayer_message(self, message_dict, routing_key, exchange, reply_to=None, correlation_id=None):
        """
        Relai un message recu vers le noeud principal

        :param message_dict:
        :param routing_key:
        :param exchange:
        :param reply_to:
        :param correlation_id:
        :return:
        """
        headers = {'noeud_source': self.__service_monitor.nodename}
        if reply_to == TransfertMessages.LOCAL_Q_PLACEHOLDER:
            # Mettre la queue de relai cote principal pour recevoir la reponse
            reply_to = self.__transfert_messages_principal.queue_name
        self.generateur_transactions.emettre_message(message_dict, routing_key, [exchange], reply_to, correlation_id, headers)

    @property
    def reply_q(self):
        return self.__traitement_messages_principal.queue_name

    @property
    def generateur_transactions(self):
        return self.__contexte.generateur_transactions

    def enregistrer_domaine(self, nom_domaine: str, exchanges_routing: dict):
        self.__transfert_messages_principal.ajouter_domaine(nom_domaine, exchanges_routing)
Exemple #13
0
class TestVerificateurs:
    def __init__(self):
        self._contexte = ContexteRessourcesMilleGrilles()
        self._contexte.initialiser()

        self.securite = VerificateurCertificats(self._contexte)
import datetime, time

from millegrilles.dao.Configuration import ContexteRessourcesMilleGrilles
from millegrilles.dao.MessageDAO import BaseCallback
from millegrilles.transaction.GenerateurTransaction import GenerateurTransaction
from millegrilles import Constantes
from millegrilles.domaines.Principale import ConstantesPrincipale
from threading import Thread, Event

import json
import uuid


contexte = ContexteRessourcesMilleGrilles()
contexte.initialiser(init_document=False)


class MessagesSample(BaseCallback):

    def __init__(self):
        super().__init__(contexte)
        self.contexte.message_dao.register_channel_listener(self)
        self.generateur = GenerateurTransaction(self.contexte)

        self.queue_name = None

        self.channel = None
        self.event_recu = Event()

    def on_channel_open(self, channel):
class ModeleConfiguration:
    def __init__(self):
        self._logger = logging.getLogger('%s' % self.__class__.__name__)
        self._logger.setLevel(logging.INFO)
        self._contexte = ContexteRessourcesMilleGrilles()
        self.parser = None  # Parser de ligne de commande
        self.args = None  # Arguments de la ligne de commande

        self.__fermeture_event = Event()

        self.__certificat_event_handler = GestionnaireEvenementsCertificat(
            self._contexte)
        self.__channel = None

    def initialiser(self,
                    init_document=False,
                    init_message=True,
                    connecter=True):
        # Gerer les signaux OS, permet de deconnecter les ressources au besoin
        signal.signal(signal.SIGINT, self.exit_gracefully)
        signal.signal(signal.SIGTERM, self.exit_gracefully)

        self._contexte.initialiser(init_message=init_message,
                                   connecter=connecter)

        if init_message:
            self._contexte.message_dao.register_channel_listener(self)

    def on_channel_open(self, channel):
        channel.basic_qos(prefetch_count=1)
        channel.add_on_close_callback(self.on_channel_close)
        self.__channel = channel
        self.__certificat_event_handler.initialiser()

    def on_channel_close(self, channel=None, code=None, reason=None):
        self.__channel = None
        self._logger.warning("MQ Channel ferme")
        if not self.__fermeture_event.is_set():
            self.contexte.message_dao.enter_error_state()

    def __on_return(self, channel, method, properties, body):
        pass

    def configurer_parser(self):
        self.parser = argparse.ArgumentParser(
            description="Fonctionnalite MilleGrilles")

        self.parser.add_argument(
            '--debug',
            action="store_true",
            required=False,
            help="Active le debugging (logger, tres verbose)")

        self.parser.add_argument(
            '--info',
            action="store_true",
            required=False,
            help="Afficher davantage de messages (verbose)")

    def print_help(self):
        self.parser.print_help()

    def exit_gracefully(self, signum=None, frame=None):
        self.__fermeture_event.set()
        self.deconnecter()

    def parse(self):
        self.args = self.parser.parse_args()

    def executer(self):
        raise NotImplemented("Cette methode doit etre redefinie")

    def connecter(self):
        if self._contexte.message_dao is not None:
            self._contexte.message_dao.connecter()

    def deconnecter(self):
        if self._contexte.message_dao is not None:
            self._contexte.message_dao.deconnecter()

    def set_logging_level(self):
        """ Utilise args pour ajuster le logging level (debug, info) """
        if self.args.debug:
            self._logger.setLevel(logging.DEBUG)
            logging.getLogger('millegrilles').setLevel(logging.DEBUG)
            logging.getLogger('events').setLevel(logging.WARNING)
        elif self.args.info:
            self._logger.setLevel(logging.INFO)
            logging.getLogger('millegrilles').setLevel(logging.INFO)

    def main(self):

        return_code = 0

        try:
            # Preparer logging
            logging.basicConfig(format=Constantes.LOGGING_FORMAT,
                                level=logging.WARNING)
            # logging.getLogger('millegrilles.dao.MessageDAO').setLevel(logging.INFO)
            self._logger.info("\n-----------\n\n-----------")
            self._logger.info("Demarrage de %s en cours\n-----------" %
                              self.__class__.__name__)

            # Faire le parsing des arguments pour verifier s'il en manque
            self.configurer_parser()
            self.parse()

            self.set_logging_level()

            self._logger.info("Initialisation")
            self.initialiser()  # Initialiser les ressources

            self._logger.info("Debut execution")
            self.executer()  # Executer le download et envoyer message
            self.__fermeture_event.set()
            self._logger.info("Fin execution " + self.__class__.__name__)

        except Exception as e:
            return_code = 1
            print("MAIN: Erreur fatale, voir log. Erreur %s" % str(e))
            self._logger.exception("MAIN: Erreur")
            self.print_help()
        finally:
            self.exit_gracefully()

        self._logger.info("Main terminee, attente cleanup")
        time.sleep(0.2)
        self._logger.info("Main terminee, exit.")

        if threading.active_count() > 1:
            ok_threads = ['MainThread', 'pymongo_kill_cursors_thread']
            for thread in threading.enumerate():
                if thread.name not in ok_threads:
                    self._logger.warning(
                        "Thread ouverte apres demande de fermeture: %s" %
                        thread.name)

            time.sleep(5)
            thread_encore_ouverte = False
            for thread in threading.enumerate():
                if thread.name not in ok_threads:
                    self._logger.error(
                        "Thread encore ouverte apres demande de fermeture: %s"
                        % thread.name)
                    thread_encore_ouverte = True

            if thread_encore_ouverte:
                self._logger.error(
                    "Threads encore ouvertes, on force la sortie")

        sys.exit(return_code)

    @property
    def contexte(self) -> ContexteRessourcesMilleGrilles:
        return self._contexte

    @property
    def channel(self):
        return self.__channel
class Generateur:
    def __init__(self, args):
        self._args = args

        # Charger signateur de transaction
        self._contexte = ContexteRessourcesMilleGrilles()
        self._contexte.initialiser(False, False)
        self._signateur = SignateurTransaction(self._contexte)
        self._signateur.initialiser()

        self._formatteur = FormatteurMessageMilleGrilles(
            self._contexte.idmg, self._signateur)

        self.__logger = logging.getLogger(__name__ + '.' +
                                          self.__class__.__name__)

    def generer_catalogue_applications(self):
        """
        Genere les fichiers de configuration d'application et le fichier de catalogue d'applications
        :return:
        """
        path_catalogues = path.join(self._args.path or '.')

        path_archives_application = path.join(path_catalogues,
                                              'generes/applications')
        try:
            mkdir(path_archives_application)
        except FileExistsError:
            pass

        catalogue_apps = dict()
        fpconfig, path_config_temp = tempfile.mkstemp()
        for rep, config in IterateurApplications(path_catalogues):
            nom_application = config['nom']
            self.__logger.debug("Repertoire : %s" % rep)
            catalogue_apps[nom_application] = {'version': config['version']}

            # Verifier si on doit creer une archive tar pour cette application
            # Tous les fichiers sauf docker.json sont inclus et sauvegarde sous une archive tar.xz
            # dans l'entree de catalogue
            fichier_app = [f for f in listdir(rep) if f not in ['docker.json']]
            if len(fichier_app) > 0:
                with tarfile.open(path_config_temp, 'w:xz') as fichier:
                    # Faire liste de tous les fichiers de configuration de l'application
                    # (exclure docker.json - genere separement)
                    for filename in fichier_app:
                        file_path = path.join(rep, filename)
                        fichier.add(file_path, arcname=filename)

                # Lire fichier .tar, convertir en base64
                with open(path_config_temp, 'rb') as fichier:
                    contenu_tar_b64 = b64encode(fichier.read())

                config['scripts'] = contenu_tar_b64.decode('utf-8')

            # Preparer archive .json.xz avec le fichier de configuration signe et les scripts
            config = self.signer(
                config, ConstantesCatalogueApplications.
                TRANSACTION_CATALOGUE_APPLICATION)
            path_archive_application = path.join(path_archives_application,
                                                 nom_application + '.json.xz')
            with lzma.open(path_archive_application, 'wt') as output:
                json.dump(config, output)

        unlink(path_config_temp)  # Cleanup fichier temporaire

        catalogue = {'applications': catalogue_apps}
        catalogue = self.signer(
            catalogue,
            ConstantesCatalogueApplications.TRANSACTION_CATALOGUE_APPLICATIONS)

        # Exporter fichier de catalogue
        path_output = path.join(path_catalogues, 'generes',
                                'catalogue.applications.json.xz')
        with lzma.open(path_output, 'wt') as output:
            json.dump(catalogue, output)

    def generer_catalogue_domaines(self):
        """
        Generer un fichier de catalogue de tous les domaines
        :return:
        """

        path_catalogues = path.join(self._args.path or '.')

        path_fichier_domaines = path.join(path_catalogues, 'domaines.json')
        catalogue_domaines = dict()
        for nom_domaine, configuration in IterateurDomaines(
                path_fichier_domaines):
            self.__logger.debug("Domaine %s, configuration : %s" %
                                (nom_domaine, configuration))
            catalogue_domaines[nom_domaine] = configuration

        # Generer signature du catalogue
        catalogue = {
            'domaines': catalogue_domaines,
        }
        catalogue = self.signer(
            catalogue,
            ConstantesCatalogueApplications.TRANSACTION_CATALOGUE_DOMAINES)

        # Exporter fichier de catalogue
        path_output = path.join(path_catalogues, 'generes',
                                'catalogue.domaines.json.xz')
        with lzma.open(path_output, 'wt') as output:
            json.dump(catalogue, output)

    def signer(self, contenu: dict, domaine_action: str):
        message_signe, uuid_enveloppe = self._formatteur.signer_message(
            contenu, domaine_action, ajouter_chaine_certs=True)
        return message_signe

    def generer(self):
        self.generer_catalogue_applications()
        self.generer_catalogue_domaines()