Example #1
0
def query_database_for_list_of_profiles():
    connexion = DB_connexion()
    data = connexion.Query_SQL_fetchall(pfl.select_user_profil)
    del connexion
    pfl_list = []
    for line in data:
        pfl_list.append({'label': line[1], 'value': line[0]})
    return pfl_list
Example #2
0
def recuperation_id(sql_select: str, valeur_inserer: tuple):
    connexion = DB_connexion()
    try:
        id_table = connexion.Query_SQL_fetchone(sql_select, valeur_inserer)[0]
    except:
        id_table = None
    del connexion
    return id_table
Example #3
0
def query_database_for_list_of_categories():
    connexion = DB_connexion()
    data = connexion.Query_SQL_fetchall(ctg.select_category)
    del connexion
    ctg_list = []
    for line in data:
        ctg_list.append({'label': line[1], 'value': line[0]})
    return ctg_list
Example #4
0
 def __init__(self, id_project=0):
     if not id_project:
         FileLogger.log(logging.ERROR, "'id_project' could not be empty!")
         return
     self.__id_project = id_project
     self.__connexion = DB_connexion()
     data = self.__connexion.Query_SQL_fetchone(prj.select_project_with_id,
                                                [self.__id_project])
     self.__project_ID = data[0]
     self.__api_KEY = data[1]
Example #5
0
def query_database_for_list_of_messages():
    connexion = DB_connexion()
    data = connexion.Query_SQL_fetchall(msg.select_message_for_api)
    del connexion
    msg_list = []
    for line in data:
        msg_list.append({
            'id': line[0],
            'message': line[1],
            'Start Date': line[2],
            'End Date': line[3]
        })
    return msg_list
Example #6
0
def insert_project(project_ID: str, api_key: str):

    dico = {
        'project_ID': project_ID,
        'api_key': api_key
    }
    try:
        connexion = DB_connexion()
        id_project = connexion.Insert_SQL_fetchone(
            project.insert_project, dico)[0]
        del connexion
        return id_project
    except (psycopg2.Error, AttributeError) as Error:
        print(Error)
Example #7
0
def insert_selection(project_ID: str, api_key: str, selection_name: str, description: str):

    dico = {
        'project_ID': project_ID,
        'api_key': api_key,
        'selection': selection_name,
        'description': description
    }
    try:
        connexion = DB_connexion()
        connexion.Insert_SQL(selection.insert_selection, dico)
        del connexion
        return "That's ok !"
    except (psycopg2.Error, AttributeError) as Error:
        return Error
Example #8
0
def connexion_admin(nom_admin: str, password: str, inscription: bool = False):
    '''permet de verifier si un utilisateur existe ou pas!'''

    connexion = DB_connexion()

    list_admin: list = connexion.Query_SQL_fetchall(admin.select_admin)

    if inscription == False:
        try:
            mdp_base: str = connexion.Query_SQL_fetchone(
                admin.select_password, [nom_admin, ])[0]
            existe: bool = check_password_hash(mdp_base, password)
        except:
            existe: bool = False
        del connexion
        return existe, list_admin
    else:
        del connexion
        return list_admin
Example #9
0
def insert_administrator(username: str, password: str, mail: str = None):

    id_admin = recuperation_id(admin.select_id_admin, (username,))
    hash = 'pbkdf2:sha256'
    password_hash = generate_password_hash(password, hash)
    if type(id_admin) != type(int()):
        dico: dict[str, bool] = {
            'admin_name': username,
            'admin_pwd_hash': password_hash,
            'admin_email': mail}
        try:
            connexion = DB_connexion()
            id_admin = connexion.Insert_SQL_fetchone(
                admin.insert_admin, dico)[0]
            del connexion
        except Error:
            print('Failed user insert !' + Error)
    else:
        print('There is already an user !')
    return id_admin
Example #10
0
class Selection():
    """Classe permettant de gérer les sélections d'un projet
    """
    def __init__(self, id_project=0):
        if not id_project:
            FileLogger.log(logging.ERROR, "'id_project' could not be empty!")
            return
        self.__id_project = id_project
        self.__connexion = DB_connexion()
        data = self.__connexion.Query_SQL_fetchone(prj.select_project_with_id,
                                                   [self.__id_project])
        self.__project_ID = data[0]
        self.__api_KEY = data[1]

    def __del__(self):
        del self.__connexion

        FileLogger.log(logging.DEBUG,
                       "Destruction of Selection class instance")
        pass

    def __Create(self, selection=[]):
        """Insertion des sélections dans la table des selections 
        """
        try:
            if not selection:
                return
            # Récupération des données dans la bdd
            metadata = sqlalchemy.MetaData(bind=None)
            tInfo_selection = sqlalchemy.Table(
                'selection',
                metadata,
                autoload=True,
                autoload_with=self.__connexion.engine())

            lines = self.__connexion.engine().connect().execute(
                tInfo_selection.insert(None),
                [{
                    'id_project': self.__id_project,
                    'selection': aselection.selection,
                    'description': aselection.description,
                } for aselection in selection])
            nb_connexion = self.__connexion.number_connections()
            FileLogger.log(
                logging.DEBUG,
                f"New instance created, number of connexion : {nb_connexion}")
            FileLogger.log(
                logging.DEBUG,
                f"{lines.rowcount} selection(s) for project_ID: {self.__id_project} inserted!"
            )
        except Exception:
            FileLogger.log(logging.ERROR, traceback.print_exc())

    def __Read(self):
        """Lecture des selections pour un projet donné.
        """
        try:

            metadata = sqlalchemy.MetaData(bind=None)
            tInfo_selection = sqlalchemy.Table(
                'selection',
                metadata,
                autoload=True,
                autoload_with=self.__connexion.engine())

            dico_selection = {}

            query = sqlalchemy.select([tInfo_selection]).where(
                sqlalchemy.and_(
                    tInfo_selection.c.id_project == self.__id_project,
                )).distinct()

            result = self.__connexion.engine().connect().execute(query)
            nb_connexion = self.__connexion.number_connections()
            FileLogger.log(
                logging.DEBUG,
                f"New instance created, number of connexion : {nb_connexion}")
            if result.rowcount == 0:
                return dico_selection
            for row in result:
                aselection = selectionModel(row[tInfo_selection.c.id_project],
                                            row[tInfo_selection.c.selection],
                                            row[tInfo_selection.c.description])
                key = "{0}_#_{1}".format(aselection.id_project,
                                         aselection.selection)

                if not key in dico_selection:
                    dico_selection[key] = aselection

            return dico_selection

        except Exception:
            FileLogger.log(logging.ERROR, traceback.print_exc())

    def __Update(self, selection=[]):
        """Mise à jour des données dans la table selection
        """
        try:

            if selection == None:
                return

            metadata = sqlalchemy.MetaData(bind=None)
            tInfo_selection = sqlalchemy.Table(
                'selection',
                metadata,
                autoload=True,
                autoload_with=self.__connexion.engine())

            query = tInfo_selection.update(None).where(
                sqlalchemy.and_(
                    tInfo_selection.c.id_project == int(self.__id_project),
                    tInfo_selection.c.selection == sqlalchemy.bindparam(
                        'c_selection'),
                )).values(description=sqlalchemy.bindparam('description'), )

            lines = self.__connexion.engine().connect().execute(
                query, [{
                    'c_selection': str(aselection.selection),
                    'description': aselection.description,
                } for aselection in selection])
            nb_connexion = self.__connexion.number_connections()
            FileLogger.log(
                logging.DEBUG,
                f"New instance created, number of connexion : {nb_connexion}")
            FileLogger.log(
                logging.DEBUG,
                f"{lines.rowcount} selection(s) for project_ID: {self.__id_project} updated!"
            )

        except Exception:
            FileLogger.log(logging.ERROR, traceback.print_exc())

    def __Delete(self, selection=[]):
        """Suppression de selection(s).
        """
        try:

            metadata = sqlalchemy.MetaData(bind=None)
            tInfo_selection = sqlalchemy.Table(
                'selection',
                metadata,
                autoload=True,
                autoload_with=self.__connexion.engine())

            lines = 0
            for aselection in selection:
                selection = re.split(r'_#_', aselection)[1]
                query = tInfo_selection.delete(None).where(
                    sqlalchemy.and_(
                        tInfo_selection.c.id_project == self.__id_project,
                        tInfo_selection.c.selection == selection))
                line = self.__connexion.engine().connect().execute(query)
                nb_connexion = self.__connexion.number_connections()
                FileLogger.log(
                    logging.DEBUG,
                    f"New instance created, number of connexion : {nb_connexion}"
                )
                lines += int(line.rowcount)
            FileLogger.log(
                logging.DEBUG,
                f"{lines} selection(s) for project_ID: {self.__id_project} deleted!"
            )

        except Exception:
            FileLogger.log(logging.ERROR, traceback.print_exc())

    def Execute(self):
        """Execution des traitements pour récupérer les selection d'un project_ID.
        """

        try:
            # Chargement de toutes les selections pour un project_ID.
            dict_selection = self.__Read()
            listUpdate_selection = []
            listInsert_selection = []
            listOfKeys = []
            selection_df = apex.retrieve_selection_list(
                self.__id_project, self.__project_ID, self.__api_KEY)
            for _i, row in selection_df.iterrows():
                aselection = selectionModel(
                    self.__id_project,
                    row["selection"],
                    row["description"],
                )

                currentKey = "{0}_#_{1}".format(aselection.id_project,
                                                aselection.selection)

                if currentKey in listOfKeys:  # Si on a déjà traité cette clé.
                    continue
                listOfKeys.append(currentKey)
                if not currentKey in dict_selection:
                    listInsert_selection.append(aselection)
                else:
                    listUpdate_selection.append(aselection)
                    del dict_selection[currentKey]

            # Update
            if listUpdate_selection:
                FileLogger.log(
                    logging.DEBUG,
                    f"Selection list for project_ID: {self.__id_project} Update in progress..."
                )
                self.__Update(listUpdate_selection)

            # Delete
            if dict_selection:
                FileLogger.log(
                    logging.DEBUG,
                    f"Selection list for project_ID: {self.__id_project} Delete in progress..."
                )
                self.__Delete(dict_selection)

            # insert
            if listInsert_selection:
                FileLogger.log(
                    logging.DEBUG,
                    f"Selection list for project_ID: {self.__id_project} Insert in progress..."
                )
                self.__Create(listInsert_selection)

        except Exception:
            FileLogger.log(logging.ERROR, traceback.print_exc())
Example #11
0
"""----------------------------
Creation date  : 2020-07-20
Last update    : 2020-07-21
----------------------------"""

import logging
from LoggerModule.FileLogger import FileLogger as FileLogger
from DB_Connexion import DB_connexion
import Table_project as prj
import app

if __name__ == "__main__":
    connexion = DB_connexion()
    project_list = connexion.Query_SQL_fetchall(prj.select_project_information)
    del connexion
    for project in project_list:
        project_id = project[0]
        app.asynchronous_selection_extract.apply_async(args=[project_id],
                                                       countdown=2)
        FileLogger.log(logging.DEBUG,
                       f"Periodic Task executed for project {project_id}")
Example #12
0
def query_database_for_list_of_filtered_locations(categories, profiles):
    connexion = DB_connexion()
    FileLogger.log(
        logging.DEBUG,
        f"{categories} categories and {profiles} profiles extraction start")
    set_of_all_location = set()
    list_of_location = connexion.Query_SQL_fetchall(apidae.select_apidae)
    for location in list_of_location:
        set_of_all_location.add(location[1])
    nb_locations = len(set_of_all_location)
    nb_locations_extracted = len(list_of_location)
    FileLogger.log(
        logging.DEBUG,
        f"{nb_locations} different locations in set for {nb_locations_extracted} location extracted !!!"
    )
    set_of_location_id = set()
    list_of_location = connexion.Query_SQL_fetchall(
        apidae.select_apidae_with_categorie_list_edited_and_profil_list_edited,
        [profiles, categories])
    for location in list_of_location:
        set_of_location_id.add(location[0])
        try:
            set_of_all_location.remove(location[0])
        except:
            FileLogger.log(
                logging.ERROR,
                f"{location[0]} no more in set of all locations !!!")
    nb_locations_for_json = len(set_of_location_id)
    nb_locations = len(set_of_all_location)
    FileLogger.log(
        logging.DEBUG,
        f"1st step : {nb_locations_for_json} locations for json out of {nb_locations} different locations remaining in set for {nb_locations_extracted} location extracted !!!"
    )
    list_of_location = connexion.Query_SQL_fetchall(
        apidae.select_apidae_with_categorie_list_edited_and_profil_list,
        [profiles, categories])
    for location in list_of_location:
        set_of_location_id.add(location[0])
        try:
            set_of_all_location.remove(location[0])
        except:
            FileLogger.log(
                logging.ERROR,
                f"{location[0]} no more in set of all locations !!!")
    nb_locations_for_json = len(set_of_location_id)
    nb_locations = len(set_of_all_location)
    FileLogger.log(
        logging.DEBUG,
        f"2nd step : {nb_locations_for_json} locations for json out of {nb_locations} different locations remaining in set for {nb_locations_extracted} location extracted !!!"
    )
    list_of_location = connexion.Query_SQL_fetchall(
        apidae.select_apidae_with_categorie_list_and_profil_list_edited,
        [profiles, categories])
    for location in list_of_location:
        set_of_location_id.add(location[0])
        try:
            set_of_all_location.remove(location[0])
        except:
            FileLogger.log(
                logging.ERROR,
                f"{location[0]} no more in set of all locations !!!")
    nb_locations_for_json = len(set_of_location_id)
    nb_locations = len(set_of_all_location)
    FileLogger.log(
        logging.DEBUG,
        f"3rd step : {nb_locations_for_json} locations for json out of {nb_locations} different locations remaining in set for {nb_locations_extracted} location extracted !!!"
    )
    list_of_location = connexion.Query_SQL_fetchall(
        apidae.select_apidae_with_categorie_list_and_profil_list,
        [profiles, categories])
    for location in list_of_location:
        set_of_location_id.add(location[0])
        try:
            set_of_all_location.remove(location[0])
        except:
            FileLogger.log(
                logging.ERROR,
                f"{location[0]} no more in set of all locations !!!")
    nb_locations_for_json = len(set_of_location_id)
    nb_locations = len(set_of_all_location)
    FileLogger.log(
        logging.DEBUG,
        f"4th step : {nb_locations_for_json} locations for json out of {nb_locations} different locations remaining in set for {nb_locations_extracted} location extracted !!!"
    )
    locations_list = []
    nb_location = 0
    for id_location in set_of_location_id:
        data = connexion.Query_SQL_fetchone(
            apidae.select_apidae_1_id_apidae_with_data_edited, [id_location])
        dict_for_apidae, dict_for_geometry = functions.create_dict_for_lieu_validated(
            data)
        # liste properties, geometry
        locations_list.append(dict_for_apidae)
        # locations_list.append(dict_for_properties)  # properties only
        nb_location += 1
    FileLogger.log(
        logging.DEBUG,
        f"{nb_locations_for_json} in set of location and {nb_location} locations extracted !!!"
    )
    del connexion
    return nb_location, locations_list
Example #13
0
    Table_message.drop_message, Table_elementReference.drop_elementRef,
    Table_project.drop_project, Table_selection.drop_selection,
    Table_extraction.drop_selection_extraction, Table_Apidae.drop_apidae,
    Table_category.drop_category, Table_profil.drop_user_profil,
    Table_relation_eltref_prf.drop_relation_eltref_profil,
    Table_relation_eltref_ctg.drop_relation_eltref_category,
    Table_relation_selection_profil.drop_relation_selection_profil,
    Table_relation_selection_category.drop_relation_selection_category,
    Table_relation_category_data_from_apidae.drop_relation_category_apidae,
    Table_relation_profil_data_from_apidae.drop_relation_profil_apidae,
    Table_admin.drop_admin, Table_freshness.drop_freshness_level,
    Table_message.message, Table_elementReference.elementRef,
    Table_project.project, Table_project.default_project_for_manual_entry,
    Table_selection.selection,
    Table_selection.default_selection_for_manual_entry,
    Table_extraction.selection_extraction, Table_Apidae.apidae,
    Table_category.category, Table_profil.user_profil,
    Table_relation_eltref_prf.relation_eltref_profil,
    Table_relation_eltref_ctg.relation_eltref_category,
    Table_relation_selection_profil.relation_selection_profil,
    Table_relation_selection_category.relation_selection_category,
    Table_relation_category_data_from_apidae.relation_category_apidae,
    Table_relation_profil_data_from_apidae.relation_profil_apidae,
    Table_admin.admin, Table_freshness.freshness_level)

if __name__ == "__main__":
    connexion = DB_connexion()
    for value in full_actions_list:
        connexion.Insert_SQL(value)
    del connexion