Exemple #1
0
def ui_interface(request, template='WebAPA/ui_interface.html'):
    """
        GET - View that creates the interface to the user to insert the yaer to retrieve the data from APA site 
        POST - For a given parameter it uploads the data from the APA site
    """

    if request.method == 'GET':
        return render(request,
                      template,
                      context={
                          "path_update": reverse("APA_ui_interface"),
                          "path_verification": reverse("Verify_data_year"),
                      })

    if request.method == 'POST':
        try:
            year = request.POST['year']
        except:
            log_logs_file("Start the update APA DATA all years available")
            msg = save_APA_Data()
            log_logs_file("End the update APA DATA all years available")
            return HttpResponse(content_type="text/plain", content=msg)

        msg = upload_data(year)
        return HttpResponse(content_type="text/plain", content=msg)

    return HttpResponse(status=404)
def check_table_specific(year):
    """
        Verify if the table already exists on the database.\n
        Also it checks if the table already has information about the "year".
    """
    tableCharacteristics = """ "id_rede" INT, "id_estacao" INT, "id_gas" VARCHAR(255), "date" timestamp, "value" FLOAT"""
    check = check_table(DB_NAME,
                        TABLENAME,
                        tableCharacteristics,
                        db_user,
                        db_password,
                        db_host,
                        db_port,
                        delete=False)

    if check == True:
        sql = """select count(id_estacao) from "%s" where EXTRACT(year from date) = %s """ % (
            TABLENAME, year)
        resp = sqlExecute(DB_NAME, db_user, db_password, db_host, db_port, sql,
                          True)
        if resp['success']:
            if resp['data'][0][0] == 0:
                return None
            else:
                msg = """The database already have the year {0}.\n
                Use this command: DELETE from "tablename" where EXTRACT(year from date) = {0}.""".format(
                    year)
                log_logs_file(msg)
                return msg

    return "Check the Logs"
Exemple #3
0
def copy_excel_database(directory,
                        fileName,
                        Database,
                        tableName,
                        tableCharacteristics,
                        functionInsert,
                        db_user,
                        db_password,
                        db_host,
                        db_port,
                        sheet_name=None,
                        matrix=False,
                        delete=True,
                        create=True,
                        information={},
                        cv=False):
    """
        Copy the data from the excel to a table in the database

        Steps:
            - Verify if the table exists
            - Open the Excel
            - Create the sql statements to upload the data
            - Execute the sql statements
    """

    if check_table(Database, tableName, tableCharacteristics, db_user,
                   db_password, db_host, db_port, delete, create):
        path = directory + fileName
        if cv:
            if sheet_name is not None:
                dataframe = pd.read_csv(path, sheet_name=sheet_name)
            else:
                dataframe = pd.read_csv(path)

        else:
            if sheet_name is not None:
                dataframe = pd.read_excel(path, sheet_name=sheet_name)
            else:
                dataframe = pd.read_excel(path)

        if matrix:
            dataframe = dataframe.to_numpy()

        sql = functionInsert(tableName, dataframe, fileName, information)

        if sql != '':
            resp = sqlExecute(Database, db_user, db_password, db_host, db_port,
                              sql, False)

            if not resp['success']:
                log_logs_file('Error: {}'.format(resp['data']))

            return resp
        else:
            log_logs_file("Sql statement is empty")

            return None
Exemple #4
0
def stations_loc(path):
    """
        Upload to database and GeoServer the data about the locations of the stations
    """

    tableName = "stations_location"
    copy_excel_database(
        '', path, Database, tableName,
        """ "Id_rede" INT, "Id_estacao" INT, "Name" VARCHAR(255), "Location" geography(POINT), "Tipo" VARCHAR(255)""",
        create_sql_stations_loc, db_user, db_password, db_host, db_port)

    xml = '<featureType><name>%s</name></featureType>' % (tableName)
    workspace = 'APA_EMEP_Data'
    datastore = 'Database'

    database_configuration = {
        "dataStore": {
            "name": "{}".format(datastore),
            "connectionParameters": {
                "entry": [{
                    "@key": "host",
                    "$": "{}".format(global_settings.POSTGRESQL_HOST)
                }, {
                    "@key": "port",
                    "$": "{}".format(global_settings.POSTGRESQL_PORT)
                }, {
                    "@key": "database",
                    "$": "{}".format(Database)
                }, {
                    "@key": "user",
                    "$": "{}".format(global_settings.POSTGRESQL_USERNAME)
                }, {
                    "@key": "passwd",
                    "$": "{}".format(global_settings.POSTGRESQL_PASSWORD)
                }, {
                    "@key": "dbtype",
                    "$": "postgis"
                }]
            }
        }
    }

    if checkWorkspace(workspace, gs_rest_url, user, pwd, False):
        if verify_datastore(workspace, datastore, gs_rest_url, user, pwd,
                            database_configuration):
            if checkFeature(workspace, datastore, tableName, gs_rest_url, user,
                            pwd):
                r = requests.post('{0}/workspaces/{1}/datastores/{2}/featuretypes'\
                    .format(gs_rest_url, workspace,datastore),
                                auth=HTTPBasicAuth(user, pwd),
                                data=xml,
                                headers=headers_xml
                                )
                log_logs_file("Upload Layer {}".format(r.status_code))
Exemple #5
0
def update_data_all_years(request):
    """
        From the parameters it will create an interval where it will upload the APA data from APA site
    """

    if request.method == 'GET':
        try:
            min_year = int(request.GET['min_year'])
            max_year = int(request.GET['max_year'])
        except Exception as e:
            log_error_file(str(e))
            log_logs_file("Start the update APA DATA all years available")
            save_APA_Data()
            log_logs_file("End the update APA DATA all years available")
            return HttpResponse(content_type="text/plain", content="Success")

        steps = max_year - min_year
        if steps >= 0:
            log_logs_file("Start the update APA DATA between {} and {}".format(
                min_year, max_year))
            if steps > 0:
                for i in range(steps + 1):
                    upload_data(min_year + i)
            else:
                upload_data(min_year)

            log_logs_file("End the update APA DATA between {} and {}".format(
                min_year, max_year))
            return HttpResponse(content_type="text/plain", content="Success")

    return HttpResponse(status=404)
def save_APA_Data(year=None):
    """
        To a specific year, save all the data from all stations to the Database named "APA_EMEP_DATA".\n
        The data is all saved on the same table, "<TABLENAME>".\n
        The data is retrieve from https://qualar1.apambiente.pt/qualar/excel_new.php?excel=1
    """
    if year is None:
        sql = "drop table {}".format(global_settings.APA_TABLE_NAME)
        sqlExecute(DB_NAME, db_user, db_password, db_host, db_port, sql, False)

    inputs_information = extract_all_information()
    gas_info = pd.read_excel(global_settings.PATH_EXCEL_GAS)
    gas_names = gas_info['Gas'].tolist()
    str_estacoes = ""
    tableCharacteristics = """ "id_rede" INT, "id_estacao" INT, "id_gas" VARCHAR(255), "date" timestamp, "value" FLOAT"""

    count_download_excels = 0

    information = {
        'gas_info': gas_info,
        'gas_names': gas_names,
        'str_estacoes': "",
    }

    # for index, row in estacoes_info.iterrows():
    for input_info in inputs_information:
        if year is None:
            years = extract_year_for_station(input_info[0], input_info[1])
            for year in years:
                count_download_excels += 1
                extract_and_saved_excel(input_info[0], input_info[1], year,
                                        information, tableCharacteristics)
        else:
            count_download_excels += 1
            extract_and_saved_excel(input_info[0], input_info[1], year,
                                    information, tableCharacteristics)

    if year is None:
        year = "todos os anos possíveis"

    if len(str_estacoes.split(", ")) != count_download_excels:
        log_logs_file("Sucesso ano {}".format(year))
        return "Sucesso ano {}".format(year)

    log_logs_file(
        "Não foi possível atualizar os dados do ano {}.".format(year))
    return "Não foi possível atualizar os dados do ano {}.".format(year)
def save_to_database(tableName, dataframe, fileName, information):
    """
        Save tha dataframe on the database
    """

    gas_info = information['gas_info']
    Id_rede = information['Id_rede']
    Id_estacao = information['Id_estacao']
    gas_names = information['gas_names']

    sql = ''

    if len(dataframe.columns) < 2:
        information['str_estacoes'] += '{}'.format(Id_estacao) + ', '
        return

    key_value = '''"id_rede", "id_estacao" , "id_gas", "date", "value"'''
    for column in dataframe.columns:
        if 'Data' != column:
            try:
                if column[-1] == ' ':
                    index = gas_names.index(column[:-1])
                else:
                    index = gas_names.index(column)
            except Exception as e:
                log_logs_file(str(e))
                continue
            for i in range(len(dataframe[column])):
                if pd.isnull(dataframe[column][i]) or pd.isnull(
                        dataframe['Data'][i]):
                    continue
                if is_number_tryexcept(dataframe[column][i]):
                    value = "%s, %s, '%s','%s', %f" % (
                        Id_rede, Id_estacao, gas_info['Id'][index],
                        dataframe['Data'][i],
                        float(dataframe[column][i].replace(',', '.')))
                    sql += '''INSERT INTO "%s"(%s) VALUES (%s); ''' % (
                        tableName, key_value, value)

    if sql == "":
        information['str_estacoes'] += '{}'.format(Id_estacao) + ', '

    return sql
Exemple #8
0
def publish_resource_geoserver(workspace, datastore, database, tableName,
                               timeDimesionAttr):
    headers_xml = {'Content-Type': 'application/xml; charset=UTF-8'}
    xml = '<featureType><name>%s</name></featureType>' % (tableName)

    database_configuration = {
        "dataStore": {
            "name": "{}".format(datastore),
            "connectionParameters": {
                "entry": [{
                    "@key": "host",
                    "$": "{}".format(global_settings.POSTGRESQL_HOST)
                }, {
                    "@key": "port",
                    "$": "{}".format(global_settings.POSTGRESQL_PORT)
                }, {
                    "@key": "database",
                    "$": "{}".format(database)
                }, {
                    "@key": "user",
                    "$": "{}".format(global_settings.POSTGRESQL_USERNAME)
                }, {
                    "@key": "passwd",
                    "$": "{}".format(global_settings.POSTGRESQL_PASSWORD)
                }, {
                    "@key": "dbtype",
                    "$": "postgis"
                }, {
                    "@key": "Expose primary keys",
                    "$": "true"
                }]
            }
        }
    }

    if verify_datastore(workspace, datastore, gs_rest_url, user, pwd,
                        database_configuration):

        r = requests.post('{0}/workspaces/{1}/datastores/{2}/featuretypes'\
            .format(gs_rest_url, workspace,datastore),
                        auth=HTTPBasicAuth(user, pwd),
                        data=xml,
                        headers=headers_xml
                        )
        if r.status_code == 201:
            log_logs_file(
                "Success uploading table {} to geoserver".format(tableName))

            if timeDimesionAttr != '':
                data_xml = '<featureType>\
                        <enabled>true</enabled>\
                        <metadata><entry key="time">\
                        <dimensionInfo>\
                        <enabled>true</enabled>\
                        <attribute>%s</attribute>\
                        <presentation>LIST</presentation>\
                        <units>ISO8601</units>\
                        <defaultValue>\
                        <strategy>MINIMUM</strategy>\
                        </defaultValue>\
                        </dimensionInfo>\
                        </entry></metadata>\
                        </featureType>' % (timeDimesionAttr)

                r = requests.put('{0}/workspaces/{1}/datastores/{2}/featuretypes/{3}'\
                    .format(gs_rest_url, workspace, datastore, tableName),
                                auth=HTTPBasicAuth(user, pwd),
                                data=data_xml,
                                headers=headers_xml
                                )
        else:
            log_logs_file(
                "Error uploading table {} to geoserver".format(tableName))
Exemple #9
0
def addNewEntries(request, template='GeoExcel/error_page.html'):
    """
        ADD new entries to a specific table
    """
    if request.method == 'POST':
        Database = global_settings.GENERIC_SAVE_EXCEL_DB
        db_user = global_settings.POSTGRESQL_USERNAME
        db_password = global_settings.POSTGRESQL_PASSWORD
        db_host = global_settings.POSTGRESQL_HOST
        db_port = global_settings.POSTGRESQL_PORT

        path = request.POST['path']
        tableName = request.POST['tableName']
        rec = getMappingTable(Database, db_user, db_password, db_host, db_port,
                              tableName)

        if not rec['success']:
            return render(request, template, {
                "action": reverse('layer_upload'),
                "error": "%s" % (rec['data'])
            })
        mapping = rec['data']
        data_df = pandas.read_csv(path)
        columns = data_df.columns.ravel()

        data = mapping['variables']

        data.update(mapping['coordenateInfo'])

        keys = {}

        if mapping['coordenatePoints']:
            datum_code = data["datum"]
            data[data['latName']] = 'None'
            data[data['longName']] = 'None'
            keys['coord'] = data['colName']
            location_col_name = data['colName']
        else:
            datum_code = 0
            location_col_name = None

        for name in columns:
            if mapping['coordenatePoints']:
                if name == data['latName'] or name == data['longName']:
                    continue
            keys[name] = name

        resp_sql = InsertValues(path, keys, data, mapping['coordenatePoints'],
                                datum_code, tableName, Database, db_user,
                                db_password, db_host, db_port)

        update_layer(tableName, location_col_name)

        #Remove the tmp file
        os.remove(path)

        if not resp_sql['success']:
            error_msg = 'Error Insert Values: {}'.format(resp_sql['data'])
            log_logs_file(error_msg)

            return render(request, template, {
                "action": reverse('layer_upload'),
                "error": error_msg
            })

        return redirect('layer_upload')

    return render(
        request, template, {
            "action": reverse('layer_upload'),
            "error": "Wrong method to access %s" % (reverse('addNewEntries'))
        })
Exemple #10
0
def submitTable(request, template='GeoExcel/error_page.html'):
    """
        Convert the excel to a table form and upload to the database
    """
    if request.method == 'POST':
        coordenatePoints = False
        data_UI = request.POST
        keys = {}

        path = data_UI['path']
        tableName = data_UI['tableName']
        foreignKeys = int(data_UI['foreignKeys'])
        foreignData = {}
        data_dataframe = pandas.read_csv(path)
        columns = data_dataframe.columns.ravel()

        Database = global_settings.GENERIC_SAVE_EXCEL_DB
        db_user = global_settings.POSTGRESQL_USERNAME
        db_password = global_settings.POSTGRESQL_PASSWORD
        db_host = global_settings.POSTGRESQL_HOST
        db_port = global_settings.POSTGRESQL_PORT

        if tableName == '' or path == '':
            return render(
                request, template, {
                    "action": reverse('layer_upload'),
                    "error": "One of the inputs is None"
                })

        tables_database_dict = searchTablesByDatabase(
            global_settings.GENERIC_SAVE_EXCEL_DB, db_user, db_password,
            db_host, db_port)

        if tables_database_dict['success']:
            if tableName in tables_database_dict['data']:
                return render(
                    request, template, {
                        "action": reverse('layer_upload'),
                        "error": "Table {} already exists".format(tableName)
                    })

        if ('None' != data_UI['latName'] and 'None'
                == data_UI['longName']) or ('None' == data_UI['latName']
                                            and 'None' != data_UI['longName']):
            return render(
                request, template, {
                    "action":
                    reverse('layer_upload'),
                    "error":
                    "One of the fields of the 1st Step is None while the other field is not None"
                })

        datum_code = '4326'
        if data_UI['datum'] != 'None':
            datum = datumCode(data_UI['datum'], "epsg", db_user, db_password,
                              db_host, db_port)
            if datum['success']:
                datum_code = datum['data']

        for i in range(1, foreignKeys + 1):
            if ( not (data_UI['column3Step%s'%(i)] != 'None' and data_UI['tableForeign%s'%(i)] != 'None' and data_UI['columnForeign%s'%(i)] != 'None')) and \
                ( not (data_UI['column3Step%s'%(i)] == 'None' and data_UI['tableForeign%s'%(i)] == 'None' and data_UI['columnForeign%s'%(i)] == 'None')):

                return render(
                    request, template, {
                        "action":
                        reverse('layer_upload'),
                        "error":
                        "One of the fields of the 3st Step is None while the other field is not None"
                    })

        if ('None' != data_UI['latName'] and 'None' != data_UI['longName']):
            coordenatePoints = True

        # Make a dicitionary that contains the information about the foreign keys, also if there is a misunderstanding it gives an error
        for i in range(1, foreignKeys + 1):
            if (data_UI['column3Step%s' % (i)] == 'None'
                    or data_UI['tableForeign%s' % (i)] == 'None'
                    or data_UI['columnForeign%s' % (i)] == 'None'
                    or data_UI['dataType%s' % (i)] == 'None'):
                continue
            # Se só quisermos que uma coluna só tenha uma foreign key
            if data_UI['column3Step%s' % (i)] in foreignData.keys():
                return render(
                    request, template, {
                        "action": reverse('layer_upload'),
                        "error":
                        "Choose the same column to be multiple column "
                    })
            else:
                foreignData[data_UI['column3Step%s' % (i)]] = {
                    'type': data_UI['dataType%s' % (i)],
                    'table': data_UI['tableForeign%s' % (i)],
                    'column': data_UI['columnForeign%s' % (i)]
                }

        #Creating table as per requirement
        resp_sql, foreignOptions, timeDimesionAttr, error_msg = create_table_excel_general(
            tableName, columns, data_dataframe, keys, data_UI,
            coordenatePoints, datum_code, foreignData, Database, db_user,
            db_password, db_host, db_port)
        if error_msg != "":
            return render(request, template, {
                "action": reverse('layer_upload'),
                "error": error_msg
            })

        if not resp_sql['success']:
            error_msg = 'Error Creating Table: {}'.format(resp_sql['data'])
            log_logs_file(error_msg)

            return render(request, template, {
                "action": reverse('layer_upload'),
                "error": error_msg
            })

        if len(foreignOptions) > 0:
            tableView = createView(Database, db_user, db_password, db_host,
                                   db_port, foreignOptions)
        else:
            tableView = 'None'

        resp_sql = createMappingTable(data_UI, tableName, coordenatePoints,
                                      columns, foreignData, datum_code,
                                      tableView, Database, db_user,
                                      db_password, db_host, db_port)

        if not resp_sql['success']:
            error_msg = 'Error Insert Values: {}'.format(resp_sql['data'])
            log_logs_file(error_msg)

            return render(request, template, {
                "action": reverse('layer_upload'),
                "error": error_msg
            })

        resp_sql = InsertValues(path, keys, data_UI, coordenatePoints,
                                datum_code, tableName, Database, db_user,
                                db_password, db_host, db_port, foreignData)

        if resp_sql is None:
            error_msg = 'Error Insert Values: SQL is empty for some reason.'
            log_logs_file(error_msg)

            error_msg += '\nCaution: Check if the data type is chosen correctly.'
            return render(request, template, {
                "action": reverse('layer_upload'),
                "error": error_msg
            })

        if not resp_sql['success']:
            error_msg = 'Error Insert Values: {}'.format(resp_sql['data'])
            log_logs_file(error_msg)

            return render(request, template, {
                "action": reverse('layer_upload'),
                "error": error_msg
            })

        #Remove the tmp file
        os.remove(path)

        if tableView == 'None':
            save_layer(tableName, timeDimesionAttr)
        else:
            save_layer(tableView, timeDimesionAttr)

        return redirect('layer_upload')

    return render(
        request, template, {
            "action": reverse('layer_upload'),
            "error": "Wrong method to access %s" % (reverse('submitTable'))
        })