def check_table_specific(year):
    """
        Verify if the table already exists on the database.\n
        Also it checks if the table already has information about the "year".
    """
    tableCharacteristics = """ "id_rede" INT, "id_estacao" INT, "id_gas" VARCHAR(255), "date" timestamp, "value" FLOAT"""
    check = check_table(DB_NAME,
                        TABLENAME,
                        tableCharacteristics,
                        db_user,
                        db_password,
                        db_host,
                        db_port,
                        delete=False)

    if check == True:
        sql = """select count(id_estacao) from "%s" where EXTRACT(year from date) = %s """ % (
            TABLENAME, year)
        resp = sqlExecute(DB_NAME, db_user, db_password, db_host, db_port, sql,
                          True)
        if resp['success']:
            if resp['data'][0][0] == 0:
                return None
            else:
                msg = """The database already have the year {0}.\n
                Use this command: DELETE from "tablename" where EXTRACT(year from date) = {0}.""".format(
                    year)
                log_logs_file(msg)
                return msg

    return "Check the Logs"
def save_APA_Data(year=None):
    """
        To a specific year, save all the data from all stations to the Database named "APA_EMEP_DATA".\n
        The data is all saved on the same table, "<TABLENAME>".\n
        The data is retrieve from https://qualar1.apambiente.pt/qualar/excel_new.php?excel=1
    """
    if year is None:
        sql = "drop table {}".format(global_settings.APA_TABLE_NAME)
        sqlExecute(DB_NAME, db_user, db_password, db_host, db_port, sql, False)

    inputs_information = extract_all_information()
    gas_info = pd.read_excel(global_settings.PATH_EXCEL_GAS)
    gas_names = gas_info['Gas'].tolist()
    str_estacoes = ""
    tableCharacteristics = """ "id_rede" INT, "id_estacao" INT, "id_gas" VARCHAR(255), "date" timestamp, "value" FLOAT"""

    count_download_excels = 0

    information = {
        'gas_info': gas_info,
        'gas_names': gas_names,
        'str_estacoes': "",
    }

    # for index, row in estacoes_info.iterrows():
    for input_info in inputs_information:
        if year is None:
            years = extract_year_for_station(input_info[0], input_info[1])
            for year in years:
                count_download_excels += 1
                extract_and_saved_excel(input_info[0], input_info[1], year,
                                        information, tableCharacteristics)
        else:
            count_download_excels += 1
            extract_and_saved_excel(input_info[0], input_info[1], year,
                                    information, tableCharacteristics)

    if year is None:
        year = "todos os anos possíveis"

    if len(str_estacoes.split(", ")) != count_download_excels:
        log_logs_file("Sucesso ano {}".format(year))
        return "Sucesso ano {}".format(year)

    log_logs_file(
        "Não foi possível atualizar os dados do ano {}.".format(year))
    return "Não foi possível atualizar os dados do ano {}.".format(year)
Example #3
0
def copy_excel_database(directory,
                        fileName,
                        Database,
                        tableName,
                        tableCharacteristics,
                        functionInsert,
                        db_user,
                        db_password,
                        db_host,
                        db_port,
                        sheet_name=None,
                        matrix=False,
                        delete=True,
                        create=True,
                        information={},
                        cv=False):
    """
        Copy the data from the excel to a table in the database

        Steps:
            - Verify if the table exists
            - Open the Excel
            - Create the sql statements to upload the data
            - Execute the sql statements
    """

    if check_table(Database, tableName, tableCharacteristics, db_user,
                   db_password, db_host, db_port, delete, create):
        path = directory + fileName
        if cv:
            if sheet_name is not None:
                dataframe = pd.read_csv(path, sheet_name=sheet_name)
            else:
                dataframe = pd.read_csv(path)

        else:
            if sheet_name is not None:
                dataframe = pd.read_excel(path, sheet_name=sheet_name)
            else:
                dataframe = pd.read_excel(path)

        if matrix:
            dataframe = dataframe.to_numpy()

        sql = functionInsert(tableName, dataframe, fileName, information)

        if sql != '':
            resp = sqlExecute(Database, db_user, db_password, db_host, db_port,
                              sql, False)

            if not resp['success']:
                log_logs_file('Error: {}'.format(resp['data']))

            return resp
        else:
            log_logs_file("Sql statement is empty")

            return None
Example #4
0
def createView(Database, db_user, db_passwrod, db_host, db_port,
               foreignOptions):
    """
        Create a view that connects two or more tables
    """

    tableName = foreignOptions[0]['table']

    from_args = '"%s"' % (tableName)
    select_args = ''

    columnNames = {}

    foreignTables = []
    for opts in foreignOptions:
        f_Name = opts['f_table']
        foreignTables.append(f_Name)
        aux = columnNamesOfTable(Database, f_Name, db_user, db_passwrod,
                                 db_host, db_port)
        aux.remove(opts['f_col'])
        columnNames[f_Name] = aux
        from_args += ' INNER JOIN "{1}" ON "{0}"."{2}" = "{1}"."{3}"'.format(
            tableName, f_Name, opts['col'], opts['f_col'])

    aux = columnNamesOfTable(Database, foreignOptions[0]['table'], db_user,
                             db_passwrod, db_host, db_port)

    for key in columnNames:
        f_keys = foreignkeyTable(Database, key, db_user, db_passwrod, db_host,
                                 db_port)
        for f_key in f_keys:
            if f_key['f_table'] in foreignTables:
                columnNames[key].remove(f_key['col'])
        for val in columnNames[key]:
            select_args += '"%s"."%s",' % (key, val)

    for val in aux:
        select_args += '"%s"."%s",' % (tableName, val)

    sql = """CREATE VIEW %s_view as SELECT %s FROM %s """ % (
        tableName, select_args[:-1], from_args)

    # print(sql)

    rec = sqlExecute(Database, db_user, db_passwrod, db_host, db_port, sql,
                     False)

    if rec['success']:
        return "%s_view" % (tableName)

    return tableName
Example #5
0
def get_gasValues(id_estacao, id_gas, resolution, date):
    """
        Return a html table that contains all the values for a specific station with a specific gas at a specific temporal resolution, using date as reference
    """
    sql = sql_GasValues(id_estacao, id_gas, resolution, date)
    html = ""
    resp = sqlExecute(DB_NAME, db_user, db_password, db_host, db_port, sql,
                      True)
    if resp['success']:
        html, data_array = makeHtmlTable(resp['data'])

    return HttpResponse(content_type='text/json',
                        content=json.dumps({
                            "htmlTable": html,
                            "data_array": data_array,
                        }))
Example #6
0
def datumCode(name, Database, db_user, db_password, db_host, db_port):
    """
        Get the EPSG Datum Code for a specific name from the epsg database
    """
    sql = """e.coord_ref_sys_code from epsg_coordinatereferencesystem as e 
        INNER JOIN spatial_ref_sys as s ON e.coord_ref_sys_code=s.srid WHERE e.coord_ref_sys_name='%s'""" % (
        name)

    rec = sqlExecute(Database, db_user, db_password, db_host, db_port, sql,
                     True)

    if rec['success']:
        data = rec['data'][0][0]

        return {'success': True, 'data': data}

    return rec
Example #7
0
def get_ListGas(id_estacao):
    """
        Return all the gas that the station has information
    """
    table_gas_info = global_settings.GAS_INFORMATION_TABLE
    sql = '''SELECT DISTINCT id_gas, "Nome" FROM "{0}" inner join "{1}" on id_gas = "ID_APA" WHERE id_estacao = {2}'''.format(
        TABLENAME, table_gas_info, id_estacao)
    resp = sqlExecute(DB_NAME, db_user, db_password, db_host, db_port, sql,
                      True)
    listGas = []
    if resp['success']:
        for id_gas in resp['data']:
            listGas.append({'Name': id_gas[1], 'Id': id_gas[0]})
    return HttpResponse(content_type='text/json',
                        content=json.dumps({
                            "listGas": listGas,
                        }))
Example #8
0
def createMappingTable(data, tableName, coordenatePoints, excelColumns,
                       excelForeignCols, datum_code, tableView, Database,
                       db_user, db_password, db_host, db_port):
    """
        Create the rule to later be more easier and quickly to upload the excle with the configorations to the database
    """
    my_data = {}

    my_data["tableView"] = tableView
    my_data["coordenatePoints"] = False
    my_data["coordenateInfo"] = {}
    my_data["variables"] = {}

    if coordenatePoints:
        aux_json = {}
        my_data["coordenatePoints"] = True
        name = data["nameCoord"]
        if name == "":
            name = "location"
        aux_json["colName"] = name
        aux_json["latName"] = data["latName"]
        aux_json["longName"] = data['longName']
        aux_json["latDegDec"] = data['latDegDec']
        aux_json["longDegDec"] = data['longDegDec']
        aux_json["datum"] = datum_code
        my_data["coordenateInfo"] = aux_json

    for name in excelColumns:
        if name == data['latName'] or name == data['longName']:
            continue

        dataType = data[name]
        if dataType == 'None':
            if name in excelForeignCols.keys():
                dataType = excelForeignCols[name]['type']

        my_data["variables"][name] = dataType

    sql = """INSERT INTO mapping_json(tableName, JSON) VALUES ('%s', '%s')""" % (
        tableName, json.dumps(my_data))

    resp_sql = sqlExecute(Database, db_user, db_password, db_host, db_port,
                          sql, False)

    return resp_sql
Example #9
0
def update_bbox_geoserver(workspace, datastore, tableName, location_col_name,
                          database, db_user, db_pass, db_host, db_port):
    headers_xml = {'Content-Type': 'application/xml; charset=UTF-8'}
    headers_json_content = {'Content-Type': 'application/json'}
    headers_json_accept = {'accept': 'application/json'}

    r = requests.get('{0}/workspaces/{1}/datastores/{2}/featuretypes/{3}'\
                    .format(gs_rest_url, workspace, datastore, tableName),
                                auth=HTTPBasicAuth(user, pwd),
                                headers=headers_json_accept
                                )

    if r.status_code == 200:
        json_content = r.json()

        sql = '''SELECT ST_Extent("{0}"::geometry) as a from "{1}"'''.format(
            location_col_name, tableName)
        rec = sqlExecute(database, db_user, db_pass, db_host, db_port, sql,
                         True)
        if rec['success']:
            bbox = rec['data'][0][0]
            bbox = bbox.split('BOX(')[1].split(')')[0].split(',')
            mins = bbox[0].split(' ')
            maxs = bbox[1].split(' ')

            json_content['featureType']['nativeBoundingBox']['minx'] = mins[0]
            json_content['featureType']['nativeBoundingBox']['miny'] = mins[1]
            json_content['featureType']['nativeBoundingBox']['maxx'] = maxs[0]
            json_content['featureType']['nativeBoundingBox']['maxy'] = maxs[1]

            json_content['featureType']['latLonBoundingBox']['minx'] = mins[0]
            json_content['featureType']['latLonBoundingBox']['miny'] = mins[1]
            json_content['featureType']['latLonBoundingBox']['maxx'] = maxs[0]
            json_content['featureType']['latLonBoundingBox']['maxy'] = maxs[1]

        r = requests.put('{0}/workspaces/{1}/datastores/{2}/featuretypes/{3}'\
                    .format(gs_rest_url, workspace, datastore, tableName),
                                auth=HTTPBasicAuth(user, pwd),
                                json=json_content,
                                headers=headers_json_content
                                )

        print(r.text, r.status_code)
Example #10
0
def retrieveDatumNames(Database, db_user, db_password, db_host, db_port):
    """
        Get all the EPSG Datum Names from the epsg database
    """
    sql = """SELECT e.coord_ref_sys_name from epsg_coordinatereferencesystem as e 
        INNER JOIN spatial_ref_sys as s ON e.coord_ref_sys_code=s.srid"""

    rec = sqlExecute(Database, db_user, db_password, db_host, db_port, sql,
                     True)

    if rec['success']:
        data = []

        for aux in rec['data']:
            if aux[0] != None:
                data.append(aux[0])

        data = sorted(data)
        return {'success': True, 'data': data}

    else:
        return rec
Example #11
0
def getMappingTable(database, user, password, host, port, tableName):
    """
        Get the map to convert the excel to a new entry on a specific table
    """

    sql = """SELECT json FROM mapping_json WHERE tableName = '%s'""" % (
        tableName)

    rec = sqlExecute(database, user, password, host, port, sql, True)

    if rec['success']:
        if len(rec['data']) < 1:
            return {
                'success':
                False,
                'error':
                "There is no entry with the table_name = %s or this entry doesn't have the value on the column JSON"
                % (tableName)
            }
        return {'success': True, 'data': rec['data'][0][0]}

    return rec
Example #12
0
def searchTablesByDatabase(database,
                           user,
                           password,
                           host,
                           port,
                           mapping_json=False):
    """
        Get the list of tables from : 
            - the database if mapping_json == False
            - the table named mapping_json, otherwise
    """
    if mapping_json:
        sql = """SELECT tableName FROM mapping_json"""
    else:
        sql = """SELECT table_name FROM information_schema.tables
                WHERE table_schema = 'public' and table_type='BASE TABLE'"""

    rec = sqlExecute(database, user, password, host, port, sql, True)

    if rec['success']:
        data = []

        if mapping_json:
            for aux in rec['data']:
                data.append(aux[0])
        else:
            for aux in rec['data']:
                if not ('spatial_ref_sys' in aux[0]
                        or 'mapping_json' in aux[0]):
                    data.append(aux[0])

        data = sorted(data)
        return {'success': True, 'data': data}

    else:
        return rec
Example #13
0
def create_table_excel_general(tableName, columns, data_df, keys, data,
                               coordenatePoints, datum_code, foreignData,
                               Database, db_user, db_password, db_host,
                               db_port):
    """
        Create a table according to the form and format of the excel
    """

    timeDimesionAttr = ''

    #Creating table as per requirement
    sql = '''CREATE TABLE "%s"( ''' % (tableName)
    for name in columns:
        dataType = data[name]

        if name == data['latName'] or name == data['longName']:
            continue

        # Compare the information of selecting data keys with the information from choosing a foreign key
        if dataType == 'None':
            if name in foreignData.keys():
                dataType = foreignData[name]['type']
            else:
                return [
                    None, None, None,
                    "The column %s doesn't have a data type" % (name)
                ]
        else:
            if name in foreignData.keys():
                if dataType != foreignData[name]['type']:
                    return [
                        None, None, None,
                        "The data type of the column %s you choose is different from the foreign table"
                        % (name)
                    ]

        if 'timestamp' in dataType.lower():
            timeDimesionAttr = name

        sql += """ "%s" %s""" % (name, dataType)
        if name == data['table2primaryKey']:
            sql += ' PRIMARY KEY'
        sql += ','
        keys[name] = name

    if coordenatePoints:
        name = data['nameCoord']
        if name == '':
            name = 'location'

        if 'coord' in foreignData.keys():
            if foreignData['coord']['type'] != 'geography(POINT)':
                return [
                    None, None, None,
                    "The data type of the column coord should be GEOGRAPHY(POINT) but it is a foreign key to a foreign column with different data type"
                ]

        sql += """ "%s" geography(POINT)""" % (name)
        if 'coord' == data['table2primaryKey']:
            sql += ' PRIMARY KEY'
        sql += ","
        keys['coord'] = name

    foreignOptions = []

    for name in foreignData:
        auxname = name
        if auxname == 'coord':
            auxname = data['nameCoord']
            if auxname == '':
                auxname = 'location'
        sql += """ FOREIGN KEY ("%s") REFERENCES %s (%s),""" % (
            auxname, foreignData[name]['table'], foreignData[name]['column'])
        foreignOptions.append({
            'f_col': foreignData[name]['column'],
            'col': auxname,
            'f_table': foreignData[name]['table'],
            'table': tableName
        })

    sql = sql[:len(sql) - 1] + ')'

    resp_sql = sqlExecute(Database, db_user, db_password, db_host, db_port,
                          sql, False)

    return [resp_sql, foreignOptions, timeDimesionAttr, '']