Example #1
0
def folderShp_Intersection(inFolder, intFeatures, outFolder):
    """
    Intersect all feature classes in a folder with the feature classes
    listed in the argument intFeatures (path to the file).
    """

    import os

    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.oss.ops import create_folder

    # Environment
    arcpy.env.overwriteOutput = True
    # Workspace
    arcpy.env.workspace = inFolder

    if type(intFeatures) != list:
        intFeatures = [intFeatures]

    if not os.path.exists(outFolder):
        create_folder(outFolder)

    # List feature classes in inFolder
    fc_infld = arcpy.ListFeatureClasses()

    # Create Layer objects
    lyr_infld = [feat_lyr(os.path.join(inFolder, str(fc))) for fc in fc_infld]
    lyr_intFeat = [feat_lyr(fc) for fc in intFeatures]

    # Intersect things
    for i in range(len(lyr_infld)):
        intersect([lyr_infld[i]] + lyr_intFeat,
                  os.path.join(outFolder, os.path.basename(str(fc_infld[i]))))
Example #2
0
def foldershp_to_foldershp(inFld,
                           outFld,
                           destiny_file_format,
                           file_format='.shp',
                           useApi='ogr'):
    """
    Execute shp_to_shp for every file in inFld (path to folder)
    
    useApi options:
    * ogr;
    """

    import os
    from gasp.oss import list_files, get_filename

    if not os.path.exists(outFld):
        from gasp.oss.ops import create_folder
        create_folder(outFld)

    geo_files = list_files(inFld, file_format=file_format)

    for f in geo_files:
        shp_to_shp(f, os.path.join(outFld, '{}.{}'.format(
            get_filename(f), destiny_file_format if \
                destiny_file_format[0] == '.' else '.' + destiny_file_format
        )), gisApi=useApi)

    return outFld
Example #3
0
def bash_matrix_od(origins, destinationShp, network, costCol, oneway, grsWork,
                   output):
    """
    Produce matrix OD using GRASS GIS - BASH MODE
    """

    from gasp.session import run_grass
    from gasp.oss import get_filename
    from gasp.oss.ops import create_folder
    from gasp.mng.split import splitShp_by_range
    from gasp.mng.gen import merge_feat

    # SPLIT ORIGINS IN PARTS
    originsFld = create_folder(os.path.join(grsWork, 'origins_parts'))

    originsList = splitShp_by_range(origins, 100, originsFld)

    # Open an GRASS GIS Session
    gbase = run_grass(grsWork,
                      grassBIN="grass76",
                      location=grsLoc,
                      srs=network)

    import grass.script as grass
    import grass.script.setup as gsetup

    RESULTS = []
    R_FOLDER = create_folder(os.path.join(grsWork, 'res_parts'))

    for e in range(len(originsList)):
        gsetup.init(gbase, grsWork, "grs_loc_{}".format(e), 'PERMANENT')

        from gasp.to.shp.grs import shp_to_grs, grs_to_shp

        # Add Data to GRASS GIS
        rdvMain = shp_to_grs(network, get_filename(network, forceLower=True))

        # Produce Matrix
        result_part = prod_matrix(originsList[e], destinationShp, rdvMain,
                                  costCol, oneway)

        # Export Result
        shp = grs_to_shp(result_part,
                         os.path.join(R_FOLDER, result_part + '.shp'),
                         geom_type="line",
                         lyrN=3)

        RESULTS.append(shp)

    merge_feat(RESULTS, output, api='pandas')

    return output
Example #4
0
File: exct.py Project: zonakre/gasp
def clip_by_feature(inputFeatures,
                    clipFeatures,
                    folderOutputs,
                    base_name,
                    clip_feat_id='FID'):
    """
    Clip inputFeatures for each feature in the clipFeatures layer
    Store all produced layers in the folderOutputs.
    """

    import os
    from gasp.oss.ops import create_folder
    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.cpu.arcg.mng.fld import type_fields

    # ########### #
    # Environment #
    # ########### #
    arcpy.env.overwriteOutput = True

    # ################ #
    # Now, is for real #
    # ################ #
    inputLyr = feat_lyr(inputFeatures)
    clipLyr = feat_lyr(clipFeatures)

    if not os.path.exists(folderOutputs):
        create_folder(folderOutputs)

    wTmp = create_folder(os.path.join(folderOutputs, 'tmp_clip'))

    # Get id's field type
    fld_type = type_fields(clipLyr, field=str(clip_feat_id))

    expression = '{fld}=\'{_id}\'' if str(fld_type) == 'String' else \
        '{fld}={_id}'

    c = arcpy.SearchCursor(clipLyr)
    l = c.next()
    while l:
        fid = str(l.getValue(clip_feat_id))

        selection = select_by_attr(
            clipLyr, expression.format(fld=clip_feat_id, _id=fid),
            os.path.join(wTmp, 'clp_{}.shp'.format(fid)))

        clip_f = clip(
            inputLyr, selection,
            os.path.join(folderOutputs, '{}_{}.shp'.format(base_name, fid)))

        l = c.next()
Example #5
0
def copy_fromdb_todb2(conFrom, conTo, tables):
    """
    Send PGSQL Tables from one database to another using
    pg_dump and pg_restore
    """

    import os
    from gasp import goToList
    from gasp.oss.ops import create_folder, del_folder
    from gasp.sql.mng.tbl import dump_table
    from gasp.sql.mng.tbl import restore_table

    tmpFolder = create_folder(os.path.dirname(os.path.abspath(__file__)),
                              randName=True)

    tables = goToList(tables)

    for table in tables:
        # Dump
        sqlScript = dump_table(conFrom, table,
                               os.path.join(tmpFolder, table + ".sql"))

        # Restore
        tblname = restore_table(conTo, sqlScript, table)

    del_folder(tmpFolder)
Example #6
0
def summarize_table_fields(table,
                           outFld,
                           fld_name_fld_name=None,
                           __upper=False):
    """
    Summarize all fields in a table
    """

    from gasp import exec_cmd
    from gasp.oss.ops import create_folder

    # List table fields:
    fields = lst_fld(table)

    # For each field, query data to summarize the values in the field
    cmd = 'ogr2ogr {o} {i} -dialect sqlite -sql "{s};"'

    if not os.path.exists(outFld):
        tmp = create_folder(outFld)

    for field in fields:
        outTbl = os.path.join(outFld, '{}.dbf'.format(field))

        outcmd = exec_cmd(
            cmd.format(i=table,
                       o=outTbl,
                       s='SELECT {f_}{f} FROM {t} GROUP BY {f}'.format(
                           f=field,
                           t=os.path.splitext(os.path.basename(table))[0],
                           f_='' if not fld_name_fld_name else
                           '{}, '.format(fld_name_fld_name))))
Example #7
0
File: lyrs.py Project: zonakre/gasp
def publish_raster_layer(layername,
                         datastore,
                         workspace,
                         epsg_code,
                         conf={
                             'USER': '******',
                             'PASSWORD': '******',
                             'HOST': 'localhost',
                             'PORT': '8888'
                         },
                         protocol='http'):
    """
    Publish a Raster layer
    """

    import os
    import requests
    from gasp.to.Xml import write_xml_tree
    from gasp import random_str
    from gasp.oss.ops import create_folder, del_folder
    from gasp.prop.prj import epsg_to_wkt

    url = ('{pro}://{host}:{port}/geoserver/rest/workspaces/{work}/'
           'coveragestores/{storename}/coverages').format(host=conf['HOST'],
                                                          port=conf['PORT'],
                                                          work=workspace,
                                                          storename=datastore,
                                                          pro=protocol)

    # Create obj with data to be written in the xml
    xmlTree = {
        "coverage": {
            "name": layername,
            "title": layername,
            "nativeCRS": str(epsg_to_wkt(epsg_code)),
            "srs": 'EPSG:{}'.format(str(epsg_code)),
        }
    }

    # Write XML
    wTmp = create_folder(
        os.path.join(os.path.dirname(os.path.abspath(__file__)),
                     random_str(7)))

    xml_file = write_xml_tree(xmlTree, os.path.join(wTmp, 'rst_lyr.xml'))

    # Create layer
    with open(xml_file, 'rb') as f:
        r = requests.post(url,
                          data=f,
                          headers={'content-type': 'text/xml'},
                          auth=(conf['USER'], conf['PASSWORD']))

    del_folder(wTmp)

    return r
Example #8
0
File: lyrs.py Project: zonakre/gasp
def publish_postgis_layer(workspace,
                          store,
                          pg_table,
                          title=None,
                          gs_con={
                              'USER': '******',
                              'PASSWORD': '******',
                              'HOST': 'localhost',
                              'PORT': '8888'
                          },
                          protocol='http'):
    """
    Publish PostGIS table in geoserver
    """

    import os
    import requests

    from gasp.oss.ops import create_folder, del_folder
    from gasp import random_str
    from gasp.to.Xml import write_xml_tree

    # Create folder to write xml
    wTmp = create_folder(
        os.path.join(os.path.dirname(os.path.abspath(__file__)),
                     random_str(7)))

    # Create obj with data to be written in the xml
    lyr_title = "Title {}".format(pg_table) if not title else title
    elements = {"featureType": {"name": pg_table, "title": lyr_title}}

    # Write the xml
    xml_file = write_xml_tree(elements,
                              os.path.join(wTmp, '{}.xml'.format(pg_table)))

    # Create Geoserver Layer
    url = ('{pro}://{host}:{port}/geoserver/rest/workspaces/{wname}/'
           'datastores/{store_name}/featuretypes').format(host=gs_con['HOST'],
                                                          port=gs_con['PORT'],
                                                          wname=workspace,
                                                          store_name=store,
                                                          pro=protocol)

    with open(xml_file, 'rb') as __xml:
        r = requests.post(url,
                          data=__xml,
                          headers={'content-type': 'text/xml'},
                          auth=(gs_con['USER'], gs_con['PASSWORD']))

        __xml.close()

    del_folder(wTmp)

    return r
Example #9
0
def clip_each_feature(rst, shp, feature_id, work, out_basename):
    """
    Clip a raster dataset for each feature in a feature class
    """

    import arcpy
    import os

    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.cpu.arcg.lyr import rst_lyr
    from gasp.cpu.arcg.anls.exct import select_by_attr
    from gasp.oss.ops import create_folder

    # ########### #
    # Environment #
    # ########### #
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = work

    # ###### #
    # Do it! #
    # ###### #
    # Open feature class
    lyr_shp = feat_lyr(shp)
    lyr_rst = rst_lyr(rst)

    # Create folder for some temporary files
    wTmp = create_folder(os.path.join(work, 'tmp'))

    # Get id's field type
    fields = arcpy.ListFields(lyr_shp)
    for f in fields:
        if str(f.name) == str(feature_id):
            fld_type = f.type
            break

    expression = '{fld}=\'{_id}\'' if str(fld_type) == 'String' else \
        '{fld}={_id}'

    del fields, f

    # Run the clip tool for each feature in the shp input
    c = arcpy.SearchCursor(lyr_shp)
    l = c.next()
    while l:
        fid = str(l.getValue(feature_id))
        selection = select_by_attr(
            lyr_shp, expression.format(fld=feature_id, _id=fid),
            os.path.join(wTmp, 'each_{}.shp'.format(fid)))

        clip_rst = clip_raster(lyr_rst, selection,
                               '{b}_{_id}.tif'.format(b=out_basename, _id=fid))

        l = c.next()
Example #10
0
def copy_fromdb_todb(conFromDb, conToDb, tables, qForTbl=None, api='pandas'):
    """
    Send PGSQL Tables from one database to other
    """

    from gasp import goToList

    api = 'pandas' if api != 'pandas' and api != 'psql' else api

    tables = goToList(tables)

    if api == 'pandas':
        from gasp.fm.sql import query_to_df
        from gasp.to.sql import df_to_db

        for table in tables:
            if not qForTbl:
                tblDf = query_to_df(conFromDb,
                                    "SELECT * FROM {}".format(table),
                                    db_api='psql')

            else:
                if table not in qForTbl:
                    tblDf = query_to_df(conFromDb,
                                        "SELECT * FROM {}".format(table),
                                        db_api='psql')

                else:
                    tblDf = query_to_df(conFromDb,
                                        qForTbl[table],
                                        db_api='psql')

            df_to_db(conToDb, tblDf, table, api='psql')

    else:
        import os
        from gasp.oss.ops import create_folder, del_folder
        from gasp.sql.mng.tbl import dump_table
        from gasp.sql.mng.tbl import restore_table

        tmpFolder = create_folder(os.path.dirname(os.path.abspath(__file__)),
                                  randName=True)

        for table in tables:
            # Dump
            sqlScript = dump_table(conFromDb, table,
                                   os.path.join(tmpFolder, table + ".sql"))

            # Restore
            tblname = restore_table(conToDb, sqlScript, table)

        del_folder(tmpFolder)
Example #11
0
def identify_groups(folder, splitStr, groupPos, outFolder):
    """
    Identifica o grupo a que um ficheiro pertence e envia-o para uma nova
    pasta com os ficheiros que pertencem a esse grupo.
    
    Como e que o grupo e identificado?
    * O nome do ficheiro e partido em dois em funcao de splitStr;
    * O groupPos identifica qual e a parte (primeira ou segunda) que 
    corresponde ao grupo.
    """

    import os

    from gasp.oss import list_files
    from gasp.oss.ops import create_folder
    from gasp.oss.ops import copy_file

    files = list_files(folder)

    # List groups and relate files with groups:
    groups = {}
    for _file in files:
        # Split filename
        filename = os.path.splitext(os.path.basename(_file))[0]
        fileForm = os.path.splitext(os.path.basename(_file))[1]
        group = filename.split(splitStr)[groupPos]
        namePos = 1 if not groupPos else 0

        if group not in groups:
            groups[group] = [[filename.split(splitStr)[namePos], fileForm]]
        else:
            groups[group].append([filename.split(splitStr)[namePos], fileForm])

    # Create one folder for each group and put there the files related
    # with that group.
    for group in groups:
        group_folder = create_folder(os.path.join(outFolder, group))

        for filename in groups[group]:
            copy_file(
                os.path.join(
                    folder, '{a}{b}{c}{d}'.format(a=filename[0],
                                                  b=splitStr,
                                                  c=group,
                                                  d=filename[1])),
                os.path.join(group_folder, '{a}{b}'.format(a=filename[0],
                                                           b=filename[1])))
Example #12
0
File: num.py Project: zonakre/gasp
def osm2lulc(osmdata,
             nomenclature,
             refRaster,
             lulcRst,
             epsg=3857,
             overwrite=None,
             dataStore=None,
             roadsAPI='SQLITE'):
    """
    Convert OSM data into Land Use/Land Cover Information
    
    A matrix based approach
    
    roadsAPI Options:
    * SQLITE
    * POSTGIS
    """

    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import os
    import numpy
    import datetime
    import json
    from threading import Thread
    from osgeo import gdal
    # ************************************************************************ #
    # Dependencies #
    # ************************************************************************ #
    from gasp.fm.rst import rst_to_array
    from gasp.prop.rst import get_cellsize
    from gasp.oss.ops import create_folder, copy_file
    if roadsAPI == 'POSTGIS':
        from gasp.sql.mng.db import create_db
        from gasp.osm2lulc.utils import osm_to_pgsql
        from gasp.osm2lulc.mod2 import pg_num_roads
    else:
        from gasp.osm2lulc.utils import osm_to_sqdb
        from gasp.osm2lulc.mod2 import num_roads
    from gasp.osm2lulc.utils import osm_project, add_lulc_to_osmfeat
    from gasp.osm2lulc.mod1 import num_selection
    from gasp.osm2lulc.m3_4 import num_selbyarea
    from gasp.osm2lulc.mod5 import num_base_buffer
    from gasp.osm2lulc.mod6 import num_assign_builds
    from gasp.to.rst import array_to_raster
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    if not os.path.exists(os.path.dirname(lulcRst)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcRst)))

    conPGSQL = json.load(
        open(
            os.path.join(os.path.dirname(os.path.abspath(__file__)),
                         'con-postgresql.json'),
            'r')) if roadsAPI == 'POSTGIS' else None

    time_a = datetime.datetime.now().replace(microsecond=0)
    from gasp.osm2lulc.var import osmTableData, PRIORITIES

    workspace = os.path.join(os.path.dirname(lulcRst),
                             'num_osmto') if not dataStore else dataStore

    # Check if workspace exists:
    if os.path.exists(workspace):
        if overwrite:
            create_folder(workspace, overwrite=True)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        create_folder(workspace, overwrite=None)

    CELLSIZE = get_cellsize(refRaster, xy=False, gisApi='gdal')
    time_b = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Convert OSM file to SQLITE DB or to POSTGIS DB #
    # ************************************************************************ #
    if roadsAPI == 'POSTGIS':
        conPGSQL["DATABASE"] = create_db(conPGSQL,
                                         os.path.splitext(
                                             os.path.basename(osmdata))[0],
                                         overwrite=True)
        osm_db = osm_to_pgsql(osmdata, conPGSQL)

    else:
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(conPGSQL if roadsAPI == 'POSTGIS' else osm_db,
                        osmTableData,
                        nomenclature,
                        api=roadsAPI)
    time_d = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(
        conPGSQL if roadsAPI == 'POSTGIS' else osm_db,
        epsg,
        api=roadsAPI,
        isGlobeLand=None if nomenclature != "GLOBE_LAND_30" else True)
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # MapResults #
    # ************************************************************************ #
    mergeOut = {}
    timeCheck = {}
    RULES = [1, 2, 3, 4, 5, 7]

    def run_rule(ruleID):
        time_start = datetime.datetime.now().replace(microsecond=0)
        _osmdb = copy_file(
            osm_db,
            os.path.splitext(osm_db)[0] +
            '_r{}.sqlite'.format(ruleID)) if roadsAPI == 'SQLITE' else None
        # ******************************************************************** #
        # 1 - Selection Rule #
        # ******************************************************************** #
        if ruleID == 1:
            res, tm = num_selection(conPGSQL if not _osmdb else _osmdb,
                                    osmTableData['polygons'],
                                    workspace,
                                    CELLSIZE,
                                    epsg,
                                    refRaster,
                                    api=roadsAPI)
        # ******************************************************************** #
        # 2 - Get Information About Roads Location #
        # ******************************************************************** #
        elif ruleID == 2:
            res, tm = num_roads(
                _osmdb, nomenclature, osmTableData['lines'],
                osmTableData['polygons'], workspace, CELLSIZE, epsg,
                refRaster) if _osmdb else pg_num_roads(
                    conPGSQL, nomenclature, osmTableData['lines'],
                    osmTableData['polygons'], workspace, CELLSIZE, epsg,
                    refRaster)

        # ******************************************************************** #
        # 3 - Area Upper than #
        # ******************************************************************** #
        elif ruleID == 3:
            if nomenclature != "GLOBE_LAND_30":
                res, tm = num_selbyarea(conPGSQL if not _osmdb else _osmdb,
                                        osmTableData['polygons'],
                                        workspace,
                                        CELLSIZE,
                                        epsg,
                                        refRaster,
                                        UPPER=True,
                                        api=roadsAPI)
            else:
                return

        # ******************************************************************** #
        # 4 - Area Lower than #
        # ******************************************************************** #
        elif ruleID == 4:
            if nomenclature != "GLOBE_LAND_30":
                res, tm = num_selbyarea(conPGSQL if not _osmdb else _osmdb,
                                        osmTableData['polygons'],
                                        workspace,
                                        CELLSIZE,
                                        epsg,
                                        refRaster,
                                        UPPER=False,
                                        api=roadsAPI)
            else:
                return

        # ******************************************************************** #
        # 5 - Get data from lines table (railway | waterway) #
        # ******************************************************************** #
        elif ruleID == 5:
            res, tm = num_base_buffer(conPGSQL if not _osmdb else _osmdb,
                                      osmTableData['lines'],
                                      workspace,
                                      CELLSIZE,
                                      epsg,
                                      refRaster,
                                      api=roadsAPI)
        # ******************************************************************** #
        # 7 - Assign untagged Buildings to tags #
        # ******************************************************************** #
        elif ruleID == 7:
            if nomenclature != "GLOBE_LAND_30":
                res, tm = num_assign_builds(conPGSQL if not _osmdb else _osmdb,
                                            osmTableData['points'],
                                            osmTableData['polygons'],
                                            workspace,
                                            CELLSIZE,
                                            epsg,
                                            refRaster,
                                            apidb=roadsAPI)

            else:
                return

        time_end = datetime.datetime.now().replace(microsecond=0)
        mergeOut[ruleID] = res
        timeCheck[ruleID] = {'total': time_end - time_start, 'detailed': tm}

    thrds = []
    for r in RULES:
        thrds.append(
            Thread(name="to_{}".format(str(r)), target=run_rule, args=(r, )))

    for t in thrds:
        t.start()
    for t in thrds:
        t.join()

    # Merge all results into one Raster
    compileResults = {}
    for rule in mergeOut:
        for cls in mergeOut[rule]:
            if cls not in compileResults:
                if type(mergeOut[rule][cls]) == list:
                    compileResults[cls] = mergeOut[rule][cls]
                else:
                    compileResults[cls] = [mergeOut[rule][cls]]

            else:
                if type(mergeOut[rule][cls]) == list:
                    compileResults[cls] += mergeOut[rule][cls]
                else:
                    compileResults[cls].append(mergeOut[rule][cls])

    time_m = datetime.datetime.now().replace(microsecond=0)
    # All Rasters to Array
    arrayRst = {}
    for cls in compileResults:
        for raster in compileResults[cls]:
            if not raster:
                continue

            array = rst_to_array(raster)

            if cls not in arrayRst:
                arrayRst[cls] = [array.astype(numpy.uint8)]

            else:
                arrayRst[cls].append(array.astype(numpy.uint8))
    time_n = datetime.datetime.now().replace(microsecond=0)

    # Sum Rasters of each class
    for cls in arrayRst:
        if len(arrayRst[cls]) == 1:
            sumArray = arrayRst[cls][0]

        else:
            sumArray = arrayRst[cls][0]

            for i in range(1, len(arrayRst[cls])):
                sumArray = sumArray + arrayRst[cls][i]

        arrayRst[cls] = sumArray

    time_o = datetime.datetime.now().replace(microsecond=0)

    # Apply priority rule
    __priorities = PRIORITIES[nomenclature + "_NUMPY"]

    for lulcCls in __priorities:
        __lulcCls = 1222 if lulcCls == 98 else 1221 if lulcCls == 99 else \
            802 if lulcCls == 82 else 801 if lulcCls == 81 else lulcCls
        if __lulcCls not in arrayRst:
            continue
        else:
            numpy.place(arrayRst[__lulcCls], arrayRst[__lulcCls] > 0, lulcCls)

    for i in range(len(__priorities)):
        lulc_i = 1222 if __priorities[i] == 98 else 1221 \
            if __priorities[i] == 99 else 802 if __priorities[i] == 82 \
            else 801 if __priorities[i] == 81 else __priorities[i]
        if lulc_i not in arrayRst:
            continue

        else:
            for e in range(i + 1, len(__priorities)):
                lulc_e = 1222 if __priorities[e] == 98 else 1221 \
                    if __priorities[e] == 99 else \
                    802 if __priorities[e] == 82 else 801 \
                    if __priorities[e] == 81 else __priorities[e]
                if lulc_e not in arrayRst:
                    continue

                else:
                    numpy.place(arrayRst[lulc_e],
                                arrayRst[lulc_i] == __priorities[i], 0)

    time_p = datetime.datetime.now().replace(microsecond=0)

    # Merge all rasters
    startCls = 'None'
    for i in range(len(__priorities)):
        lulc_i = 1222 if __priorities[i] == 98 else 1221 \
            if __priorities[i] == 99 else 802 if __priorities[i] == 82 \
            else 801 if __priorities[i] == 81 else __priorities[i]
        if lulc_i in arrayRst:
            resultSum = arrayRst[lulc_i]
            startCls = i
            break

    if startCls == 'None':
        return 'NoResults'

    for i in range(startCls + 1, len(__priorities)):
        lulc_i = 1222 if __priorities[i] == 98 else 1221 \
            if __priorities[i] == 99 else 802 if __priorities[i] == 82 \
            else 801 if __priorities[i] == 81 else __priorities[i]
        if lulc_i not in arrayRst:
            continue

        resultSum = resultSum + arrayRst[lulc_i]

    # Save Result
    numpy.place(resultSum, resultSum == 0, 1)
    array_to_raster(resultSum,
                    lulcRst,
                    refRaster,
                    epsg,
                    gdal.GDT_Byte,
                    noData=1,
                    gisApi='gdal')

    time_q = datetime.datetime.now().replace(microsecond=0)

    return lulcRst, {
        0: ('set_settings', time_b - time_a),
        1: ('osm_to_sqdb', time_c - time_b),
        2: ('cls_in_sqdb', time_d - time_c),
        3: ('proj_data', time_e - time_d),
        4: ('rule_1', timeCheck[1]['total'], timeCheck[1]['detailed']),
        5: ('rule_2', timeCheck[2]['total'], timeCheck[2]['detailed']),
        6:
        None if 3 not in timeCheck else
        ('rule_3', timeCheck[3]['total'], timeCheck[3]['detailed']),
        7:
        None if 4 not in timeCheck else
        ('rule_4', timeCheck[4]['total'], timeCheck[4]['detailed']),
        8: ('rule_5', timeCheck[5]['total'], timeCheck[5]['detailed']),
        9:
        None if 7 not in timeCheck else
        ('rule_7', timeCheck[7]['total'], timeCheck[7]['detailed']),
        10: ('rst_to_array', time_n - time_m),
        11: ('sum_cls', time_o - time_n),
        12: ('priority_rule', time_p - time_o),
        13: ('merge_rst', time_q - time_p)
    }
Example #13
0
def raster_based(osmdata,
                 nomenclature,
                 refRaster,
                 lulcRst,
                 overwrite=None,
                 dataStore=None,
                 roadsAPI='SQLITE'):
    """
    Convert OSM Data into Land Use/Land Cover Information
    
    An raster based approach.
    
    TODO: Add detailed description
    """

    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import datetime
    import os
    import pandas
    import json
    # ************************************************************************ #
    # Gasp dependencies #
    # ************************************************************************ #
    from gasp.oss.ops import create_folder
    from gasp.prop.rst import get_epsg_raster
    from gasp.session import run_grass
    if roadsAPI == 'POSTGIS':
        from gasp.sql.mng.db import create_db
        from gasp.osm2lulc.utils import osm_to_pgsql
        from gasp.osm2lulc.mod2 import roads_sqdb
    else:
        from gasp.osm2lulc.utils import osm_to_sqdb
        from gasp.osm2lulc.mod2 import grs_rst_roads
    from gasp.osm2lulc.utils import osm_project, add_lulc_to_osmfeat
    from gasp.osm2lulc.mod1 import grs_rst
    from gasp.osm2lulc.m3_4 import rst_area
    from gasp.osm2lulc.mod5 import basic_buffer
    from gasp.osm2lulc.mod6 import rst_pnt_to_build
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    if not os.path.exists(os.path.dirname(lulcRst)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcRst)))

    # Get EPSG of Reference Raster
    epsg = get_epsg_raster(refRaster)
    if not epsg:
        raise ValueError('Cannot get epsg code of ref raster')

    # Get Parameters to connect to PostgreSQL
    conPGSQL = json.load(
        open(
            os.path.join(os.path.dirname(os.path.abspath(__file__)),
                         'con-postgresql.json'),
            'r')) if roadsAPI == 'POSTGIS' else None

    time_a = datetime.datetime.now().replace(microsecond=0)
    from gasp.osm2lulc.var import PRIORITIES, osmTableData

    workspace = os.path.join(os.path.dirname(lulcRst),
                             'osmtolulc') if not dataStore else dataStore

    # Check if workspace exists
    if os.path.exists(workspace):
        if overwrite:
            create_folder(workspace)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        create_folder(workspace)

    __priorites = PRIORITIES[nomenclature]
    time_b = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # Convert OSM file to SQLITE DB or to POSTGIS DB #
    # ************************************************************************ #
    if roadsAPI == 'POSTGIS':
        conPGSQL["DATABASE"] = create_db(conPGSQL,
                                         os.path.splitext(
                                             os.path.basename(osmdata))[0],
                                         overwrite=True)
        osm_db = osm_to_pgsql(osmdata, conPGSQL)
    else:
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(conPGSQL if roadsAPI == 'POSTGIS' else osm_db,
                        osmTableData,
                        nomenclature,
                        api=roadsAPI)
    time_d = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(conPGSQL if roadsAPI == 'POSTGIS' else osm_db,
                               epsg,
                               api=roadsAPI)
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Start a GRASS GIS Session #
    # ************************************************************************ #
    grass_base = run_grass(workspace,
                           grassBIN='grass76',
                           location='grloc',
                           srs=epsg)
    import grass.script as grass
    import grass.script.setup as gsetup
    gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT')

    # ************************************************************************ #
    # IMPORT SOME GASP MODULES FOR GRASS GIS #
    # ************************************************************************ #
    from gasp.to.rst import rst_to_grs, grs_to_rst
    from gasp.cpu.grs.spanlst import mosaic_raster
    from gasp.prop.grs import rst_to_region

    # ************************************************************************ #
    # SET GRASS GIS LOCATION EXTENT #
    # ************************************************************************ #
    extRst = rst_to_grs(refRaster, 'extent_raster')
    rst_to_region(extRst)
    time_f = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # MapResults #
    mergeOut = {}
    # ************************************************************************ #
    # ************************************************************************ #
    # 1 - Selection Rule #
    # ************************************************************************ #
    """
    selOut = {
        cls_code : rst_name, ...
    }
    """
    selOut, timeCheck1 = grs_rst(conPGSQL if roadsAPI == 'POSTGIS' else osm_db,
                                 osmTableData['polygons'],
                                 api=roadsAPI)
    for cls in selOut:
        mergeOut[cls] = [selOut[cls]]

    time_g = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 2 - Get Information About Roads Location #
    # ************************************************************************ #
    """
    roads = {
        cls_code : rst_name, ...
    }
    """

    if roadsAPI != 'POSTGIS':
        roads, timeCheck2 = grs_rst_roads(
            osm_db, osmTableData['lines'], osmTableData['polygons'], workspace,
            1221 if nomenclature != "GLOBE_LAND_30" else 801)
    else:
        roadCls = 1221 if nomenclature != "GLOBE_LAND_30" else 801

        roads, timeCheck2 = roads_sqdb(conPGSQL,
                                       osmTableData['lines'],
                                       osmTableData['polygons'],
                                       apidb='POSTGIS',
                                       asRst=roadCls)

        roads = {roadCls: roads}

    for cls in roads:
        if cls not in mergeOut:
            mergeOut[cls] = [roads[cls]]
        else:
            mergeOut[cls].append(roads[cls])

    time_h = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 3 - Area Upper than #
    # ************************************************************************ #
    """
    auOut = {
        cls_code : rst_name, ...
    }
    """

    auOut, timeCheck3 = rst_area(conPGSQL if roadsAPI == 'POSTGIS' else osm_db,
                                 osmTableData['polygons'],
                                 UPPER=True,
                                 api=roadsAPI)
    for cls in auOut:
        if cls not in mergeOut:
            mergeOut[cls] = [auOut[cls]]
        else:
            mergeOut[cls].append(auOut[cls])

    time_l = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 4 - Area Lower than #
    # ************************************************************************ #
    """
    alOut = {
        cls_code : rst_name, ...
    }
    """

    alOut, timeCheck4 = rst_area(conPGSQL if roadsAPI == 'POSTGIS' else osm_db,
                                 osmTableData['polygons'],
                                 UPPER=None,
                                 api=roadsAPI)
    for cls in alOut:
        if cls not in mergeOut:
            mergeOut[cls] = [alOut[cls]]
        else:
            mergeOut[cls].append(alOut[cls])

    time_j = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 5 - Get data from lines table (railway | waterway) #
    # ************************************************************************ #
    """
    bfOut = {
        cls_code : rst_name, ...
    }
    """

    bfOut, timeCheck5 = basic_buffer(
        conPGSQL if roadsAPI == 'POSTGIS' else osm_db,
        osmTableData['lines'],
        workspace,
        apidb=roadsAPI)
    for cls in bfOut:
        if cls not in mergeOut:
            mergeOut[cls] = [bfOut[cls]]
        else:
            mergeOut[cls].append(bfOut[cls])

    time_m = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 7 - Assign untagged Buildings to tags #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        buildsOut, timeCheck7 = rst_pnt_to_build(
            conPGSQL if roadsAPI == 'POSTGIS' else osm_db,
            osmTableData['points'],
            osmTableData['polygons'],
            api_db=roadsAPI)

        for cls in buildsOut:
            if cls not in mergeOut:
                mergeOut[cls] = buildsOut[cls]
            else:
                mergeOut[cls] += buildsOut[cls]

        time_n = datetime.datetime.now().replace(microsecond=0)

    else:
        timeCheck7 = None
        time_n = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Produce LULC Map  #
    # ************************************************************************ #
    """
    Merge all results for one cls into one raster
    mergeOut = {
        cls_code : [rst_name, rst_name, ...], ...
    }
    into
    mergeOut = {
        cls_code : patched_raster, ...
    }
    """

    for cls in mergeOut:
        if len(mergeOut[cls]) == 1:
            mergeOut[cls] = mergeOut[cls][0]

        else:
            mergeOut[cls] = mosaic_raster(mergeOut[cls],
                                          'mosaic_{}'.format(str(cls)),
                                          asCmd=True)

    time_o = datetime.datetime.now().replace(microsecond=0)
    """
    Merge all Class Raster using a priority rule
    """

    __priorities = PRIORITIES[nomenclature]
    lst_rst = []
    for cls in __priorities:
        if cls not in mergeOut:
            continue
        else:
            lst_rst.append(mergeOut[cls])

    outGrs = mosaic_raster(lst_rst,
                           os.path.splitext(os.path.basename(lulcRst))[0],
                           asCmd=True)
    time_p = datetime.datetime.now().replace(microsecond=0)

    grs_to_rst(outGrs, lulcRst, as_cmd=True)
    time_q = datetime.datetime.now().replace(microsecond=0)

    return lulcRst, {
        0: ('set_settings', time_b - time_a),
        1: ('osm_to_sqdb', time_c - time_b),
        2: ('cls_in_sqdb', time_d - time_c),
        3: ('proj_data', time_e - time_d),
        4: ('set_grass', time_f - time_e),
        5: ('rule_1', time_g - time_f, timeCheck1),
        6: ('rule_2', time_h - time_g, timeCheck2),
        7: ('rule_3', time_l - time_h, timeCheck3),
        8: ('rule_4', time_j - time_l, timeCheck4),
        9: ('rule_5', time_m - time_j, timeCheck5),
        10: None if not timeCheck7 else
        ('rule_7', time_n - time_m, timeCheck7),
        11: ('merge_rst', time_o - time_n),
        12: ('priority_rule', time_p - time_o),
        13: ('export_rst', time_q - time_p)
    }
Example #14
0
def check_shape_diff(SHAPES_TO_COMPARE,
                     OUT_FOLDER,
                     REPORT,
                     conPARAM,
                     DB,
                     SRS_CODE,
                     GIS_SOFTWARE="GRASS",
                     GRASS_REGION_TEMPLATE=None):
    """
    Script to check differences between pairs of Feature Classes
    
    Suponha que temos diversas Feature Classes (FC) e que cada uma delas
    possui um determinado atributo; imagine também que,
    considerando todos os pares possíveis entre estas FC,
    se pretende comparar as diferenças na distribuição dos valores
    desse atributo em cada par.
    
    * Dependências:
    - ArcGIS;
    - GRASS;
    - PostgreSQL;
    - PostGIS.
    
    * GIS_SOFTWARE Options:
    - ARCGIS;
    - GRASS.
    """

    import datetime
    import os
    import pandas
    from gasp.fm.sql import query_to_df
    from gasp.sql.mng.tbl import tbls_to_tbl
    from gasp.sql.mng.geom import fix_geom, check_geomtype_in_table
    from gasp.sql.mng.geom import select_main_geom_type
    from gasp.sql.mng.qw import ntbl_by_query
    from gasp.prop.ff import check_isRaster
    from gasp.oss import get_filename
    from gasp.sql.mng.db import create_db
    from gasp.to.sql import shp_to_psql, df_to_db
    from gasp.to.shp import rst_to_polyg
    from gasp.to.shp import shp_to_shp, psql_to_shp
    from gasp.to import db_to_tbl

    # Check if folder exists, if not create it
    if not os.path.exists(OUT_FOLDER):
        from gasp.oss.ops import create_folder
        create_folder(OUT_FOLDER, overwrite=None)
    else:
        raise ValueError('{} already exists!'.format(OUT_FOLDER))

    # Start GRASS GIS Session if GIS_SOFTWARE == GRASS
    if GIS_SOFTWARE == "GRASS":
        if not GRASS_REGION_TEMPLATE:
            raise ValueError(
                'To use GRASS GIS you need to specify GRASS_REGION_TEMPLATE')

        from gasp.session import run_grass

        gbase = run_grass(OUT_FOLDER,
                          grassBIN='grass76',
                          location='shpdif',
                          srs=GRASS_REGION_TEMPLATE)

        import grass.script as grass
        import grass.script.setup as gsetup

        gsetup.init(gbase, OUT_FOLDER, 'shpdif', 'PERMANENT')

        from gasp.mng.grstbl import rename_col
        from gasp.to.shp.grs import shp_to_grs, grs_to_shp
        from gasp.to.rst import rst_to_grs
        from gasp.mng.fld import rename_column

    # Convert to SHAPE if file is Raster
    # Import to GRASS GIS if GIS SOFTWARE == GRASS
    i = 0
    _SHP_TO_COMPARE = {}
    for s in SHAPES_TO_COMPARE:
        isRaster = check_isRaster(s)

        if isRaster:
            if GIS_SOFTWARE == "ARCGIS":
                d = rst_to_polyg(s,
                                 os.path.join(os.path.dirname(s),
                                              get_filename(s) + '.shp'),
                                 gisApi='arcpy')

                _SHP_TO_COMPARE[d] = "gridcode"

            elif GIS_SOFTWARE == "GRASS":
                # To GRASS
                rstName = get_filename(s)
                inRst = rst_to_grs(s, "rst_" + rstName, as_cmd=True)
                # To Raster
                d = rst_to_polyg(inRst,
                                 rstName,
                                 rstColumn="lulc_{}".format(i),
                                 gisApi="grasscmd")

                # Export Shapefile
                shp = grs_to_shp(d, os.path.join(OUT_FOLDER, d + '.shp'),
                                 "area")

                _SHP_TO_COMPARE[shp] = "lulc_{}".format(i)

        else:
            if GIS_SOFTWARE == "ARCGIS":
                _SHP_TO_COMPARE[s] = SHAPES_TO_COMPARE[s]

            elif GIS_SOFTWARE == "GRASS":
                # To GRASS
                grsV = shp_to_grs(s, get_filename(s), asCMD=True)

                # Change name of column with comparing value
                rename_col(grsV,
                           SHAPES_TO_COMPARE[s],
                           "lulc_{}".format(i),
                           as_cmd=True)

                # Export
                shp = grs_to_shp(grsV,
                                 os.path.join(OUT_FOLDER, grsV + '_rn.shp'),
                                 "area")

                _SHP_TO_COMPARE[shp] = "lulc_{}".format(i)

        i += 1

    SHAPES_TO_COMPARE = _SHP_TO_COMPARE
    if GIS_SOFTWARE == "ARCGIS":
        from gasp.cpu.arcg.mng.fld import calc_fld
        from gasp.cpu.arcg.mng.wspace import create_geodb
        from gasp.mng.gen import copy_feat

        # Sanitize data and Add new field
        __SHAPES_TO_COMPARE = {}
        i = 0

        # Create GeoDatabase
        geodb = create_geodb(OUT_FOLDER, 'geo_sanitize')
        """ Sanitize Data """
        for k in SHAPES_TO_COMPARE:
            # Send data to GeoDatabase only to sanitize
            newFc = shp_to_shp(k,
                               os.path.join(geodb, get_filename(k)),
                               gisApi='arcpy')

            # Create a copy to change
            newShp = copy_feat(newFc,
                               os.path.join(OUT_FOLDER, os.path.basename(k)),
                               gisApi='arcpy')

            # Sanitize field name with interest data
            NEW_FLD = "lulc_{}".format(i)
            calc_fld(newShp,
                     NEW_FLD,
                     "[{}]".format(SHAPES_TO_COMPARE[k]),
                     isNewField={
                         "TYPE": "INTEGER",
                         "LENGTH": 5,
                         "PRECISION": ""
                     })

            __SHAPES_TO_COMPARE[newShp] = NEW_FLD

            i += 1

    else:
        __SHAPES_TO_COMPARE = SHAPES_TO_COMPARE

    # Create database
    conPARAM["DATABASE"] = create_db(conPARAM, DB)
    """ Union SHAPEs """

    UNION_SHAPE = {}
    FIX_GEOM = {}

    def fix_geometry(shp):
        # Send data to PostgreSQL
        nt = shp_to_psql(conPARAM, shp, SRS_CODE, api='shp2pgsql')

        # Fix data
        corr_tbl = fix_geom(conPARAM,
                            nt,
                            "geom",
                            "corr_{}".format(nt),
                            colsSelect=['gid', __SHAPES_TO_COMPARE[shp]])

        # Check if we have multiple geometries
        geomN = check_geomtype_in_table(conPARAM, corr_tbl)

        if geomN > 1:
            corr_tbl = select_main_geom_type(conPARAM, corr_tbl,
                                             "corr2_{}".format(nt))

        # Export data again
        newShp = psql_to_shp(conPARAM,
                             corr_tbl,
                             os.path.join(OUT_FOLDER, corr_tbl + '.shp'),
                             api='pgsql2shp',
                             geom_col='geom')

        return newShp

    SHPS = __SHAPES_TO_COMPARE.keys()
    for i in range(len(SHPS)):
        for e in range(i + 1, len(SHPS)):
            if GIS_SOFTWARE == 'ARCGIS':
                # Try the union thing
                unShp = union(SHPS[i],
                              SHPS[e],
                              os.path.join(OUT_FOLDER,
                                           "un_{}_{}.shp".format(i, e)),
                              api_gis="arcpy")

                # See if the union went all right
                if not os.path.exists(unShp):
                    # Union went not well

                    # See if geometry was already fixed
                    if SHPS[i] not in FIX_GEOM:
                        # Fix SHPS[i] geometry
                        FIX_GEOM[SHPS[i]] = fix_geometry(SHPS[i])

                    if SHPS[e] not in FIX_GEOM:
                        FIX_GEOM[SHPS[e]] = fix_geometry(SHPS[e])

                    # Run Union again
                    unShp = union(FIX_GEOM[SHPS[i]],
                                  FIX_GEOM[SHPS[e]],
                                  os.path.join(OUT_FOLDER,
                                               "un_{}_{}_f.shp".format(i, e)),
                                  api_gis="arcpy")

            elif GIS_SOFTWARE == "GRASS":
                # Optimized Union
                print "Union between {} and {}".format(SHPS[i], SHPS[e])
                time_a = datetime.datetime.now().replace(microsecond=0)
                __unShp = optimized_union_anls(
                    SHPS[i],
                    SHPS[e],
                    os.path.join(OUT_FOLDER, "un_{}_{}.shp".format(i, e)),
                    GRASS_REGION_TEMPLATE,
                    SRS_CODE,
                    os.path.join(OUT_FOLDER, "work_{}_{}".format(i, e)),
                    multiProcess=True)
                time_b = datetime.datetime.now().replace(microsecond=0)
                print time_b - time_a

                # Rename cols
                unShp = rename_column(
                    __unShp, {
                        "a_" + __SHAPES_TO_COMPARE[SHPS[i]]:
                        __SHAPES_TO_COMPARE[SHPS[i]],
                        "b_" + __SHAPES_TO_COMPARE[SHPS[e]]:
                        __SHAPES_TO_COMPARE[SHPS[e]]
                    }, os.path.join(OUT_FOLDER, "un_{}_{}_rn.shp".format(i,
                                                                         e)))

            UNION_SHAPE[(SHPS[i], SHPS[e])] = unShp

    # Send data one more time to postgresql
    SYNTH_TBL = {}

    for uShp in UNION_SHAPE:
        # Send data to PostgreSQL
        union_tbl = shp_to_psql(conPARAM,
                                UNION_SHAPE[uShp],
                                SRS_CODE,
                                api='shp2pgsql')

        # Produce table with % of area equal in both maps
        areaMapTbl = ntbl_by_query(
            conPARAM,
            "{}_syn".format(union_tbl),
            ("SELECT CAST('{lulc_1}' AS text) AS lulc_1, "
             "CAST('{lulc_2}' AS text) AS lulc_2, "
             "round("
             "CAST(SUM(g_area) / 1000000 AS numeric), 4"
             ") AS agree_area, round("
             "CAST((SUM(g_area) / MIN(total_area)) * 100 AS numeric), 4"
             ") AS agree_percentage, "
             "round("
             "CAST(MIN(total_area) / 1000000 AS numeric), 4"
             ") AS total_area FROM ("
             "SELECT {map1_cls}, {map2_cls}, ST_Area(geom) AS g_area, "
             "CASE "
             "WHEN {map1_cls} = {map2_cls} "
             "THEN 1 ELSE 0 "
             "END AS isthesame, total_area FROM {tbl}, ("
             "SELECT SUM(ST_Area(geom)) AS total_area FROM {tbl}"
             ") AS foo2"
             ") AS foo WHERE isthesame = 1 "
             "GROUP BY isthesame").format(
                 lulc_1=get_filename(uShp[0]),
                 lulc_2=get_filename(uShp[1]),
                 map1_cls=__SHAPES_TO_COMPARE[uShp[0]],
                 map2_cls=__SHAPES_TO_COMPARE[uShp[1]],
                 tbl=union_tbl),
            api='psql')

        # Produce confusion matrix for the pair in comparison
        lulcCls = query_to_df(
            conPARAM,
            ("SELECT fcol FROM ("
             "SELECT CAST({map1_cls} AS text) AS fcol FROM {tbl} "
             "GROUP BY {map1_cls} "
             "UNION ALL SELECT CAST({map2_cls} AS text) FROM {tbl} "
             "GROUP BY {map2_cls}"
             ") AS foo GROUP BY fcol ORDER BY fcol").format(
                 tbl=union_tbl,
                 map1_cls=__SHAPES_TO_COMPARE[uShp[0]],
                 map2_cls=__SHAPES_TO_COMPARE[uShp[1]]),
            db_api='psql').fcol.tolist()

        matrixTbl = ntbl_by_query(
            conPARAM,
            "{}_matrix".format(union_tbl),
            ("SELECT * FROM crosstab('"
             "SELECT CASE "
             "WHEN foo.{map1_cls} IS NOT NULL "
             "THEN foo.{map1_cls} ELSE jtbl.flyr "
             "END AS lulc1_cls, CASE "
             "WHEN foo.{map2_cls} IS NOT NULL "
             "THEN foo.{map2_cls} ELSE jtbl.slyr "
             "END AS lulc2_cls, CASE "
             "WHEN foo.garea IS NOT NULL "
             "THEN round(CAST(foo.garea / 1000000 AS numeric)"
             ", 3) ELSE 0 "
             "END AS garea FROM ("
             "SELECT CAST({map1_cls} AS text) AS {map1_cls}, "
             "CAST({map2_cls} AS text) AS {map2_cls}, "
             "SUM(ST_Area(geom)) AS garea "
             "FROM {tbl} GROUP BY {map1_cls}, {map2_cls}"
             ") AS foo FULL JOIN ("
             "SELECT f.flyr, s.slyr FROM ("
             "SELECT CAST({map1_cls} AS text) AS flyr "
             "FROM {tbl} GROUP BY {map1_cls}"
             ") AS f, ("
             "SELECT CAST({map2_cls} AS text) AS slyr "
             "FROM {tbl} GROUP BY {map2_cls}"
             ") AS s"
             ") AS jtbl "
             "ON foo.{map1_cls} = jtbl.flyr AND "
             "foo.{map2_cls} = jtbl.slyr "
             "ORDER BY 1,2"
             "') AS ct("
             "lulc_cls text, {crossCols}"
             ")").format(crossCols=", ".join(
                 ["cls_{} numeric".format(c) for c in lulcCls]),
                         tbl=union_tbl,
                         map1_cls=__SHAPES_TO_COMPARE[uShp[0]],
                         map2_cls=__SHAPES_TO_COMPARE[uShp[1]]),
            api='psql')

        SYNTH_TBL[uShp] = {"TOTAL": areaMapTbl, "MATRIX": matrixTbl}

    # UNION ALL TOTAL TABLES
    total_table = tbls_to_tbl(conPARAM,
                              [SYNTH_TBL[k]["TOTAL"] for k in SYNTH_TBL],
                              'total_table')

    # Create table with % of agreement between each pair of maps
    mapsNames = query_to_df(
        conPARAM,
        ("SELECT lulc FROM ("
         "SELECT lulc_1 AS lulc FROM {tbl} GROUP BY lulc_1 "
         "UNION ALL "
         "SELECT lulc_2 AS lulc FROM {tbl} GROUP BY lulc_2"
         ") AS lu GROUP BY lulc ORDER BY lulc").format(tbl=total_table),
        db_api='psql').lulc.tolist()

    FLDS_TO_PIVOT = ["agree_percentage", "total_area"]

    Q = ("SELECT * FROM crosstab('"
         "SELECT CASE "
         "WHEN foo.lulc_1 IS NOT NULL THEN foo.lulc_1 ELSE jtbl.tmp1 "
         "END AS lulc_1, CASE "
         "WHEN foo.lulc_2 IS NOT NULL THEN foo.lulc_2 ELSE jtbl.tmp2 "
         "END AS lulc_2, CASE "
         "WHEN foo.{valCol} IS NOT NULL THEN foo.{valCol} ELSE 0 "
         "END AS agree_percentage FROM ("
         "SELECT lulc_1, lulc_2, {valCol} FROM {tbl} UNION ALL "
         "SELECT lulc_1, lulc_2, {valCol} FROM ("
         "SELECT lulc_1 AS lulc_2, lulc_2 AS lulc_1, {valCol} "
         "FROM {tbl}"
         ") AS tst"
         ") AS foo FULL JOIN ("
         "SELECT lulc_1 AS tmp1, lulc_2 AS tmp2 FROM ("
         "SELECT lulc_1 AS lulc_1 FROM {tbl} GROUP BY lulc_1 "
         "UNION ALL "
         "SELECT lulc_2 AS lulc_1 FROM {tbl} GROUP BY lulc_2"
         ") AS tst_1, ("
         "SELECT lulc_1 AS lulc_2 FROM {tbl} GROUP BY lulc_1 "
         "UNION ALL "
         "SELECT lulc_2 AS lulc_2 FROM {tbl} GROUP BY lulc_2"
         ") AS tst_2 WHERE lulc_1 = lulc_2 GROUP BY lulc_1, lulc_2"
         ") AS jtbl ON foo.lulc_1 = jtbl.tmp1 AND foo.lulc_2 = jtbl.tmp2 "
         "ORDER BY lulc_1, lulc_2"
         "') AS ct("
         "lulc_map text, {crossCols}"
         ")")

    TOTAL_AGREE_TABLE = None
    TOTAL_AREA_TABLE = None
    for f in FLDS_TO_PIVOT:
        if not TOTAL_AGREE_TABLE:
            TOTAL_AGREE_TABLE = ntbl_by_query(
                conPARAM,
                "agreement_table",
                Q.format(tbl=total_table,
                         valCol=f,
                         crossCols=", ".join([
                             "{} numeric".format(map_) for map_ in mapsNames
                         ])),
                api='psql')

        else:
            TOTAL_AREA_TABLE = ntbl_by_query(
                conPARAM,
                "area_table",
                Q.format(tbl=total_table,
                         valCol=f,
                         crossCols=", ".join([
                             "{} numeric".format(map_) for map_ in mapsNames
                         ])),
                api='psql')

    # Union Mapping
    UNION_MAPPING = pandas.DataFrame(
        [[
            get_filename(k[0]),
            get_filename(k[1]),
            get_filename(UNION_SHAPE[k])
        ] for k in UNION_SHAPE],
        columns=['shp_a', 'shp_b', 'union_shp'
                 ]) if GIS_SOFTWARE == "ARCGIS" else pandas.DataFrame(
                     [[k[0], k[1], get_filename(UNION_SHAPE[k])]
                      for k in UNION_SHAPE],
                     columns=['shp_a', 'shp_b', 'union_shp'])

    UNION_MAPPING = df_to_db(conPARAM, UNION_MAPPING, 'union_map', api='psql')

    # Export Results
    TABLES = [UNION_MAPPING, TOTAL_AGREE_TABLE, TOTAL_AREA_TABLE
              ] + [SYNTH_TBL[x]["MATRIX"] for x in SYNTH_TBL]

    SHEETS = ["union_map", "agreement_percentage", "area_with_data_km"] + [
        "{}_{}".format(get_filename(x[0])[:15],
                       get_filename(x[1])[:15]) for x in SYNTH_TBL
    ]

    db_to_xls(conPARAM, ["SELECT * FROM {}".format(x) for x in TABLES],
              REPORT,
              sheetsNames=SHEETS,
              dbAPI='psql')

    return REPORT
Example #15
0
def clip_several_each_feature(rst_folder,
                              shp,
                              feature_id,
                              work,
                              template=None,
                              rst_file_format='.tif'):
    """
    Clip a folder of rasters by each feature in a feature class

    The rasters clipped for a feature will be in an individual folder
    """

    import arcpy
    import os

    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.cpu.arcg.lyr import rst_lyr
    from gasp.cpu.arcg.anls.exct import select_by_attr
    from gasp.cpu.arcg.mng.fld import type_fields
    from gasp.oss.ops import create_folder
    from gasp.oss import list_files

    # ########### #
    # Environment #
    # ########### #
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = work

    # ###### #
    # Do it! #
    # ###### #
    # Open feature class
    lyr_shp = feat_lyr(shp)

    # Create folder for some temporary files
    wTmp = create_folder(os.path.join(work, 'tmp'))

    # Split feature class in parts
    c = arcpy.SearchCursor(lyr_shp)
    l = c.next()
    features = {}

    # Get id's field type
    fld_type = type_fields(lyr_shp, field=feature_id)

    expression = '{fld}=\'{_id}\'' if str(fld_type) == 'String' else \
        '{fld}={_id}'

    del fields, f

    while l:
        fid = str(l.getValue(feature_id))

        selection = select_by_attr(
            lyr_shp, expression.format(fld=feature_id, _id=fid),
            os.path.join(wTmp, 'each_{}.shp'.format(fid)))

        f_lyr = feat_lyr(selection)
        features[fid] = f_lyr

        l = c.next()

    rasters = list_files(rst_folder, file_format='.tif')

    for raster in rasters:
        r_lyr = rst_lyr(raster)
        for feat in features:
            clip_rst = clip_raster(
                r_lyr, features[feat],
                os.path.join(work,
                             os.path.splitext(os.path.basename(feat))[0],
                             os.path.basename(raster)), template)
Example #16
0
def create_psqlstore(store,
                     workspace,
                     pg_con,
                     gs_con={
                         'USER': '******',
                         'PASSWORD': '******',
                         'HOST': 'localhost',
                         'PORT': '8888'
                     },
                     protocol='http'):
    """
    Create a store for PostGIS data
    """

    import os
    import requests

    from gasp.oss.ops import create_folder, del_folder
    from gasp import random_str
    from gasp.to.Xml import write_xml_tree

    # Create folder to write xml
    wTmp = create_folder(
        os.path.join(os.path.dirname(os.path.abspath(__file__)),
                     random_str(7)))

    # Create obj with data to be written in the xml
    tree_order = {
        "dataStore": [
            "name", "type", "enabled", "workspace", "connectionParameters",
            "__default"
        ],
        "connection:Parameters": [("entry", "key", "port"),
                                  ("entry", "key", "user"),
                                  ("entry", "key", "passwd"),
                                  ("entry", "key", "dbtype"),
                                  ("entry", "key", "host"),
                                  ("entry", "key", "database"),
                                  ("entry", "key", "schema")]
    }

    xml_tree = {
        "dataStore": {
            "name": store,
            "type": "PostGIS",
            "enabled": "true",
            "workspace": {
                "name": workspace
            },
            "connectionParameters": {
                ("entry", "key", "port"): pg_con["PORT"],
                ("entry", "key", "user"): pg_con["USER"],
                ("entry", "key", "passwd"): pg_con["PASSWORD"],
                ("entry", "key", "dbtype"): "postgis",
                ("entry", "key", "host"): pg_con["HOST"],
                ("entry", "key", "database"): pg_con["DATABASE"],
                ("entry", "key", "schema"): "public"
            },
            "__default": "false"
        }
    }

    # Write xml
    xml_file = write_xml_tree(xml_tree,
                              os.path.join(wTmp, 'pgrest.xml'),
                              nodes_order=tree_order)

    # Create Geoserver Store
    url = ('{pro}://{host}:{port}/geoserver/rest/workspaces/{wname}/'
           'datastores.xml').format(host=gs_con['HOST'],
                                    port=gs_con['PORT'],
                                    wname=workspace,
                                    pro=protocol)

    with open(xml_file, 'rb') as f:
        r = requests.post(url,
                          data=f,
                          headers={'content-type': 'text/xml'},
                          auth=(gs_con['USER'], gs_con['PASSWORD']))
        f.close()

    del_folder(wTmp)

    return r
Example #17
0
def infovalue(landslides, variables, iv_rst, dataEpsg):
    """
    Informative Value using GDAL Library
    """

    import os
    import math
    import numpy
    from osgeo import gdal
    from gasp.fm.rst import rst_to_array
    from gasp.fm import tbl_to_obj
    from gasp.prop.feat import get_geom_type
    from gasp.prop.rst import rst_shape
    from gasp.prop.rst import count_cells
    from gasp.prop.rst import get_cellsize
    from gasp.stats.rst import frequencies
    from gasp.oss.ops import create_folder
    from gasp.to.rst import array_to_raster

    # Create Workspace for temporary files
    workspace = create_folder(os.path.join(os.path.dirname(landslides), 'tmp'))

    # Get Variables Raster Shape and see if there is any difference
    varShapes = rst_shape(variables, gisApi='gdal')
    for i in range(1, len(variables)):
        if varShapes[variables[i - 1]] != varShapes[variables[i]]:
            raise ValueError(
                ('All rasters must have the same dimension! '
                 'Raster {} and Raster {} have not the same shape!').format(
                     variables[i - 1], variables[i]))

    # See if landslides are raster or not
    # Try to open as raster
    try:
        land_rst = rst_to_array(landslides)
        lrows, lcols = land_rst.shape

        if [lrows, lcols] != varShapes[variables[0]]:
            raise ValueError(
                ("Raster with Landslides ({}) has to have the same "
                 "dimension that Raster Variables").format(landslides))

    except:
        # Landslides are not Raster
        # Open as Feature Class
        # See if is Point or Polygon
        land_df = tbl_to_obj(landslides)
        geomType = get_geom_type(land_df, geomCol="geometry", gisApi='pandas')

        if geomType == 'Polygon' or geomType == 'MultiPolygon':
            # it will be converted to raster bellow
            land_poly = landslides

        elif geomType == 'Point' or geomType == 'MultiPoint':
            # Do a Buffer
            from gasp.anls.prox.bf import geodf_buffer_to_shp
            land_poly = geodf_buffer_to_shp(
                land_df, 100, os.path.join(workspace, 'landslides_buffer.shp'))

        # Convert To Raster
        from gasp.to.rst import shp_to_raster
        land_raster = shp_to_raster(land_poly,
                                    None,
                                    get_cellsize(variables[0], gisApi='gdal'),
                                    -9999,
                                    os.path.join(workspace,
                                                 'landslides_rst.tif'),
                                    rst_template=variables[0],
                                    api='gdal')

        land_rst = rst_to_array(land_raster)

    # Get Number of cells of each raster and number of cells
    # with landslides
    landsldCells = frequencies(land_raster)[1]
    totalCells = count_cells(variables[0])

    # Get number of cells by classe in variable
    freqVar = {r: frequencies(r) for r in variables}

    for rst in freqVar:
        for cls in freqVar[rst]:
            if cls == 0:
                freqVar[rst][-1] = freqVar[rst][cls]
                del freqVar[rst][cls]

            else:
                continue

    # Get cell number with landslides by class
    varArray = {r: rst_to_array(r) for r in variables}

    for r in varArray:
        numpy.place(varArray[r], varArray[r] == 0, -1)

    landArray = {r: land_rst * varArray[r] for r in varArray}
    freqLndVar = {r: frequencies(landArray[r]) for r in landArray}

    # Estimate VI for each class on every variable
    vi = {}
    for var in freqVar:
        vi[var] = {}
        for cls in freqVar[var]:
            if cls in freqLndVar[var]:
                vi[var][cls] = math.log10(
                    (float(freqLndVar[var][cls]) / freqVar[var][cls]) /
                    (float(landsldCells) / totalCells))

            else:
                vi[var][cls] = 9999

    # Replace Classes without VI, from 9999 to minimum VI
    vis = []
    for d in vi.values():
        vis += d.values()

    min_vi = min(vis)

    for r in vi:
        for cls in vi[r]:
            if vi[r][cls] == 9999:
                vi[r][cls] = min_vi
            else:
                continue

    # Replace cls by vi in rst_arrays
    resultArrays = {v: numpy.zeros(varArray[v].shape) for v in varArray}
    for v in varArray:
        numpy.place(resultArrays[v], resultArrays[v] == 0, -128)

    for v in varArray:
        for cls in vi[v]:
            numpy.place(resultArrays[v], varArray[v] == cls, vi[v][cls])

    # Sum all arrays and save the result as raster
    vi_rst = resultArrays[variables[0]] + resultArrays[variables[1]]
    for v in range(2, len(variables)):
        vi_rst = vi_rst + resultArrays[variables[v]]

    numpy.place(vi_rst, vi_rst == len(variables) * -128, -128)

    result = array_to_raster(vi_rst,
                             iv_rst,
                             variables[i],
                             dataEpsg,
                             gdal.GDT_Float32,
                             noData=-128,
                             gisApi='gdal')

    return iv_rst
Example #18
0
def joinLines_by_spatial_rel_raster(mainLines, mainId, joinLines,
                                    joinCol, outfile, epsg):
    """
    Join Attributes based on a spatial overlap.
    An raster based approach
    """
    
    import os;          import pandas
    from geopandas      import GeoDataFrame
    from gasp.to.geom   import regulardf_to_geodf
    from gasp.session   import run_grass
    from gasp.oss       import get_filename
    from gasp.oss.ops   import create_folder
    from gasp.mng.ext   import shpextent_to_boundary
    from gasp.mng.joins import join_dfs
    from gasp.mng.df    import df_groupBy
    from gasp.to.rst    import shp_to_raster
    from gasp.fm        import tbl_to_obj
    from gasp.to.shp    import df_to_shp
    
    workspace = create_folder(os.path.join(
        os.path.dirname(mainLines, 'tmp_dt')
    ))
    
    # Create boundary file
    boundary = shpextent_to_boundary(
        mainLines, os.path.join(workspace, "bound.shp"),
        epsg
    )
    
    boundRst = shp_to_raster(boundary, None, 5, -99, os.path.join(
        workspace, "rst_base.tif"), epsg=epsg, api='gdal')
    
    # Start GRASS GIS Session
    gbase = run_grass(workspace, location="grs_loc", srs=boundRst)
    
    import grass.script       as grass
    import grass.script.setup as gsetup
    
    gsetup.init(gbase, workspace, "grs_loc", "PERMANENT")
    
    from gasp.spanlst.local   import combine
    from gasp.cpu.grs.spanlst import get_rst_report_data
    from gasp.to.shp.grs      import shp_to_grs, grs_to_shp
    from gasp.to.rst          import shp_to_raster
    
    # Add data to GRASS GIS
    mainVector = shp_to_grs(
        mainLines, get_filename(mainLines, forceLower=True))
    joinVector = shp_to_grs(
        joinLines, get_filename(joinLines, forceLower=True))
    
    mainRst = shp_to_raster(
        mainVector, mainId, None, None, "rst_" + mainVector, api='pygrass'
    ); joinRst = shp_to_raster(
        joinVector, joinCol, None, None, "rst_" + joinVector, api='pygrass'
    )
    
    combRst = combine(mainRst, joinRst, "combine_rst", api="pygrass")
    
    combine_data = get_rst_report_data(combRst, UNITS="c")
    
    combDf = pandas.DataFrame(combine_data, columns=[
        "comb_cat", "rst_1", "rst_2", "ncells"
    ])
    combDf = combDf[combDf["rst_2"] != '0']
    combDf["ncells"] = combDf["ncells"].astype(int)
    
    gbdata = df_groupBy(combDf, ["rst_1"], "MAX", "ncells")
    
    fTable = join_dfs(gbdata, combDf, ["rst_1", "ncells"], ["rst_1", "ncells"])
    
    fTable["rst_2"] = fTable["rst_2"].astype(int)
    fTable = df_groupBy(
        fTable, ["rst_1", "ncells"],
        STAT='MIN', STAT_FIELD="rst_2"
    )
    
    mainLinesCat = grs_to_shp(
        mainVector, os.path.join(workspace, mainVector + '.shp'), 'line')
    
    mainLinesDf = tbl_to_obj(mainLinesCat)
    
    resultDf = join_dfs(
        mainLinesDf, fTable, "cat", "rst_1",
        onlyCombinations=None
    )
    
    resultDf.rename(columns={"rst_2" : joinCol}, inplace=True)
    
    resultDf = regulardf_to_geodf(resultDf, "geometry", epsg)
    
    df_to_shp(resultDf, outfile)
    
    return outfile
Example #19
0
def thrd_matrix_od(origins, destinationShp, network, costCol, oneway, grsWork,
                   grsLoc, output):
    """
    Produce matrix OD using GRASS GIS - Thread MODE
    
    PROBLEM:
    * O programa baralha-se todo porque ha muitas sessoes do grass a serem 
    executadas. E preciso verificar se e possivel segregar as varias sessoes
    do grass
    """

    from threading import Thread
    from gasp.session import run_grass
    from gasp.oss import get_filename
    from gasp.oss.ops import create_folder
    from gasp.mng.split import splitShp_by_range
    from gasp.mng.gen import merge_feat

    # SPLIT ORIGINS IN PARTS
    originsFld = create_folder(os.path.join(grsWork, 'origins_parts'))

    originsList = splitShp_by_range(origins, 100, originsFld)

    gbase = run_grass(grsWork,
                      grassBIN="grass74",
                      location=grsLoc,
                      srs=network)

    import grass.script as grass
    import grass.script.setup as gsetup

    gsetup.init(gbase, grsWork, grsLoc, 'PERMANENT')

    from gasp.to.shp.grs import shp_to_grs
    from gasp.to.shp.grs import grs_to_shp
    # Add Data to GRASS GIS
    rdvMain = shp_to_grs(network,
                         get_filename(network, forceLower=True),
                         asCMD=True)

    RESULTS = []
    R_FOLDER = create_folder(os.path.join(grsWork, 'res_parts'))

    def __prod_mtxod(O, D, THRD):
        result_part = prod_matrix(O,
                                  D,
                                  rdvMain,
                                  costCol,
                                  oneway,
                                  thrdId=THRD,
                                  asCmd=True)

        shp = shp_to_grs(result_part,
                         os.path.join(R_FOLDER, result_part + '.shp'),
                         geom_type="line",
                         lyrN=3,
                         asCMD=True)

        RESULTS.append(shp)

    thrds = []
    for i in range(len(originsList)):
        thrds.append(
            Thread(name='tk-{}'.format(str(i)),
                   target=__prod_mtxod,
                   args=(originsList[i], destinationShp, str(i))))

    for t in thrds:
        t.start()

    for t in thrds:
        t.join()

    merge_feat(RESULTS, output, api='pandas')

    return output
Example #20
0
def vector_based(osmdata,
                 nomenclature,
                 refRaster,
                 lulcShp,
                 overwrite=None,
                 dataStore=None,
                 RoadsAPI='SQLITE'):
    """
    Convert OSM Data into Land Use/Land Cover Information
    
    An vector based approach.
    
    TODO: Add a detailed description.
    
    RoadsAPI Options:
    * SQLITE
    * POSTGIS
    """

    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import datetime
    import os
    import json
    # ************************************************************************ #
    # GASP dependencies #
    # ************************************************************************ #
    from gasp.oss.ops import create_folder
    from gasp.prop.rst import get_epsg_raster
    from gasp.session import run_grass
    if RoadsAPI == 'POSTGIS':
        from gasp.sql.mng.db import create_db
        from gasp.osm2lulc.utils import osm_to_pgsql
    else:
        from gasp.osm2lulc.utils import osm_to_sqdb
    from gasp.osm2lulc.utils import osm_project, add_lulc_to_osmfeat
    from gasp.mng.gen import merge_feat
    from gasp.osm2lulc.mod1 import grs_vector
    if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS':
        from gasp.osm2lulc.mod2 import roads_sqdb
    else:
        from gasp.osm2lulc.mod2 import grs_vec_roads
    from gasp.osm2lulc.m3_4 import grs_vect_selbyarea
    from gasp.osm2lulc.mod5 import grs_vect_bbuffer
    from gasp.osm2lulc.mod6 import vector_assign_pntags_to_build
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    if not os.path.exists(os.path.dirname(lulcShp)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcShp)))

    # Get Parameters to connect to PostgreSQL
    conPGSQL = json.load(
        open(
            os.path.join(os.path.dirname(os.path.abspath(__file__)),
                         'con-postgresql.json'),
            'r')) if RoadsAPI == 'POSTGIS' else None

    # Get EPSG of Reference Raster
    epsg = get_epsg_raster(refRaster)
    if not epsg:
        raise ValueError('Cannot get epsg code of ref raster')

    time_a = datetime.datetime.now().replace(microsecond=0)
    from gasp.osm2lulc.var import osmTableData, PRIORITIES

    workspace = os.path.join(os.path.dirname(lulcShp),
                             'osmtolulc') if not dataStore else dataStore

    # Check if workspace exists
    if os.path.exists(workspace):
        if overwrite:
            create_folder(workspace)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        create_folder(workspace)

    __priorities = PRIORITIES[nomenclature]
    time_b = datetime.datetime.now().replace(microsecond=0)
    if RoadsAPI != 'POSTGIS':
        # ******************************************************************** #
        # Convert OSM file to SQLITE DB #
        # ******************************************************************** #
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    else:
        # Convert OSM file to POSTGRESQL DB #
        conPGSQL["DATABASE"] = create_db(conPGSQL,
                                         os.path.splitext(
                                             os.path.basename(osmdata))[0],
                                         overwrite=True)
        osm_db = osm_to_pgsql(osmdata, conPGSQL)
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
                        osmTableData,
                        nomenclature,
                        api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI)
    time_d = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(
        osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
        epsg,
        api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI)
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Start a GRASS GIS Session #
    # ************************************************************************ #
    grass_base = run_grass(workspace,
                           grassBIN='grass76',
                           location='grloc',
                           srs=epsg)
    #import grass.script as grass
    import grass.script.setup as gsetup
    gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT')

    # ************************************************************************ #
    # IMPORT SOME GASP MODULES FOR GRASS GIS #
    # ************************************************************************ #
    from gasp.anls.ovlay import erase
    from gasp.prop.grs import rst_to_region
    from gasp.mng.genze import dissolve
    from gasp.mng.grstbl import add_and_update, reset_table
    from gasp.to.shp.grs import shp_to_grs, grs_to_shp
    from gasp.to.rst import rst_to_grs
    # ************************************************************************ #
    # SET GRASS GIS LOCATION EXTENT #
    # ************************************************************************ #
    extRst = rst_to_grs(refRaster, 'extent_raster')
    rst_to_region(extRst)
    time_f = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # MapResults #
    # ************************************************************************ #
    osmShps = []
    # ************************************************************************ #
    # 1 - Selection Rule #
    # ************************************************************************ #
    ruleOneShp, timeCheck1 = grs_vector(
        osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
        osmTableData['polygons'],
        apidb=RoadsAPI)
    osmShps.append(ruleOneShp)

    time_g = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 2 - Get Information About Roads Location #
    # ************************************************************************ #
    ruleRowShp, timeCheck2 = roads_sqdb(
        osm_db if RoadsAPI == 'SQLITE' else conPGSQL,
        osmTableData['lines'],
        osmTableData['polygons'],
        apidb=RoadsAPI
    ) if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS' else grs_vec_roads(
        osm_db, osmTableData['lines'], osmTableData['polygons'])

    osmShps.append(ruleRowShp)
    time_h = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 3 - Area Upper than #
    # ************************************************************************ #
    ruleThreeShp, timeCheck3 = grs_vect_selbyarea(
        osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
        osmTableData['polygons'],
        UPPER=True,
        apidb=RoadsAPI)

    osmShps.append(ruleThreeShp)
    time_l = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 4 - Area Lower than #
    # ************************************************************************ #
    ruleFourShp, timeCheck4 = grs_vect_selbyarea(
        osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
        osmTableData['polygons'],
        UPPER=False,
        apidb=RoadsAPI)

    osmShps.append(ruleFourShp)
    time_j = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 5 - Get data from lines table (railway | waterway) #
    # ************************************************************************ #
    ruleFiveShp, timeCheck5 = grs_vect_bbuffer(
        osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
        osmTableData["lines"],
        api_db=RoadsAPI)

    osmShps.append(ruleFiveShp)
    time_m = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 7 - Assign untagged Buildings to tags #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleSeven11, ruleSeven12, timeCheck7 = vector_assign_pntags_to_build(
            osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
            osmTableData['points'],
            osmTableData['polygons'],
            apidb=RoadsAPI)

        if ruleSeven11:
            osmShps.append(ruleSeven11)

        if ruleSeven12:
            osmShps.append(ruleSeven12)

        time_n = datetime.datetime.now().replace(microsecond=0)

    else:
        timeCheck7 = None
        time_n = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # Produce LULC Map  #
    # ************************************************************************ #
    """
    Get Shps with all geometries related with one class - One Shape for Classe
    """

    from gasp.mng.gen import same_attr_to_shp

    _osmShps = []
    for i in range(len(osmShps)):
        if not osmShps[i]: continue

        _osmShps.append(
            grs_to_shp(osmShps[i],
                       os.path.join(workspace, osmShps[i] + '.shp'),
                       'auto',
                       lyrN=1,
                       asCMD=True,
                       asMultiPart=None))

    _osmShps = same_attr_to_shp(_osmShps,
                                "cat",
                                workspace,
                                "osm_",
                                resultDict=True)
    del osmShps

    time_o = datetime.datetime.now().replace(microsecond=0)
    """
    Merge all Classes into one feature class using a priority rule
    """

    osmShps = {}
    for cls in _osmShps:
        if cls == '1':
            osmShps[1221] = shp_to_grs(_osmShps[cls], "osm_1221", asCMD=True)

        else:
            osmShps[int(cls)] = shp_to_grs(_osmShps[cls],
                                           "osm_" + cls,
                                           asCMD=True)

    # Erase overlapping areas by priority
    import copy
    osmNameRef = copy.deepcopy(osmShps)

    for e in range(len(__priorities)):
        if e + 1 == len(__priorities): break

        if __priorities[e] not in osmShps:
            continue
        else:
            for i in range(e + 1, len(__priorities)):
                if __priorities[i] not in osmShps:
                    continue
                else:
                    osmShps[__priorities[i]] = erase(
                        osmShps[__priorities[i]],
                        osmShps[__priorities[e]],
                        "{}_{}".format(osmNameRef[__priorities[i]], e),
                        notTbl=True,
                        api='pygrass')

    time_p = datetime.datetime.now().replace(microsecond=0)

    # Export all classes
    lst_merge = []
    for cls in osmShps:
        if cls == __priorities[0]:
            reset_table(osmShps[cls], {'cls': 'varchar(5)'}, {'cls': str(cls)})
        else:
            add_and_update(osmShps[cls], {'cls': 'varchar(5)'},
                           {'cls': str(cls)})

        ds = dissolve(osmShps[cls],
                      'dl_{}'.format(str(cls)),
                      'cls',
                      api="grass")

        lst_merge.append(
            grs_to_shp(ds,
                       os.path.join(workspace, "lulc_{}.shp".format(str(cls))),
                       'auto',
                       lyrN=1,
                       asCMD=True,
                       asMultiPart=None))

    time_q = datetime.datetime.now().replace(microsecond=0)

    merge_feat(lst_merge, lulcShp, api='pandas')

    time_r = datetime.datetime.now().replace(microsecond=0)

    return lulcShp, {
        0: ('set_settings', time_b - time_a),
        1: ('osm_to_sqdb', time_c - time_b),
        2: ('cls_in_sqdb', time_d - time_c),
        3: ('proj_data', time_e - time_d),
        4: ('set_grass', time_f - time_e),
        5: ('rule_1', time_g - time_f, timeCheck1),
        6: ('rule_2', time_h - time_g, timeCheck2),
        7: ('rule_3', time_l - time_h, timeCheck3),
        8: ('rule_4', time_j - time_l, timeCheck4),
        9: ('rule_5', time_m - time_j, timeCheck5),
        10: None if not timeCheck7 else
        ('rule_7', time_n - time_m, timeCheck7),
        11: ('disj_cls', time_o - time_n),
        12: ('priority_rule', time_p - time_o),
        13: ('export_cls', time_q - time_p),
        14: ('merge_cls', time_r - time_q)
    }
Example #21
0
def optimized_union_anls(lyr_a,
                         lyr_b,
                         outShp,
                         ref_boundary,
                         epsg,
                         workspace=None,
                         multiProcess=None):
    """
    Optimized Union Analysis
    
    Goal: optimize v.overlay performance for Union operations
    """

    import os
    from gasp.oss import get_filename
    from gasp.mng.sample import create_fishnet
    from gasp.mng.feat import eachfeat_to_newshp
    from gasp.mng.gen import merge_feat
    from gasp.session import run_grass
    from gasp.anls.exct import split_shp_by_attr

    if workspace:
        if not os.path.exists(workspace):
            from gasp.oss.ops import create_folder

            create_folder(workspace, overwrite=True)

    else:
        from gasp.oss.ops import create_folder

        workspace = create_folder(
            os.path.join(os.path.dirname(outShp), "union_work"))

    # Create Fishnet
    gridShp = create_fishnet(ref_boundary,
                             os.path.join(workspace, 'ref_grid.shp'),
                             rowN=4,
                             colN=4)

    # Split Fishnet in several files
    cellsShp = eachfeat_to_newshp(gridShp, workspace, epsg=epsg)

    if not multiProcess:
        # INIT GRASS GIS Session
        grsbase = run_grass(workspace, location="grs_loc", srs=ref_boundary)

        import grass.script.setup as gsetup

        gsetup.init(grsbase, workspace, "grs_loc", 'PERMANENT')

        # Add data to GRASS GIS
        from gasp.to.shp.grs import shp_to_grs
        cellsShp = [
            shp_to_grs(shp, get_filename(shp), asCMD=True) for shp in cellsShp
        ]

        LYR_A = shp_to_grs(lyr_a, get_filename(lyr_a), asCMD=True)
        LYR_B = shp_to_grs(lyr_b, get_filename(lyr_b), asCMD=True)

        # Clip Layers A and B for each CELL in fishnet
        LYRS_A = [
            clip(LYR_A, cellsShp[x], LYR_A + "_" + str(x), api_gis="grass_cmd")
            for x in range(len(cellsShp))
        ]
        LYRS_B = [
            clip(LYR_B, cellsShp[x], LYR_B + "_" + str(x), api_gis="grass_cmd")
            for x in range(len(cellsShp))
        ]

        # Union SHPS
        UNION_SHP = [
            union(LYRS_A[i], LYRS_B[i], "un_{}".format(i), api_gis="grass_cmd")
            for i in range(len(cellsShp))
        ]

        # Export Data
        from gasp.to.shp.grs import grs_to_shp
        _UNION_SHP = [
            grs_to_shp(shp, os.path.join(workspace, shp + ".shp"), "area")
            for shp in UNION_SHP
        ]

    else:

        def clip_and_union(la, lb, cell, work, ref, proc, output):
            # Start GRASS GIS Session
            grsbase = run_grass(work, location="proc_" + str(proc), srs=ref)
            import grass.script.setup as gsetup
            gsetup.init(grsbase, work, "proc_" + str(proc), 'PERMANENT')

            # Import GRASS GIS modules
            from gasp.to.shp.grs import shp_to_grs
            from gasp.to.shp.grs import grs_to_shp

            # Add data to GRASS
            a = shp_to_grs(la, get_filename(la), asCMD=True)
            b = shp_to_grs(lb, get_filename(lb), asCMD=True)
            c = shp_to_grs(cell, get_filename(cell), asCMD=True)

            # Clip
            a_clip = clip(a, c, "{}_clip".format(a), api_gis="grass_cmd")
            b_clip = clip(b, c, "{}_clip".format(b), api_gis="grass_cmd")

            # Union
            u_shp = union(a_clip,
                          b_clip,
                          "un_{}".format(c),
                          api_gis="grass_cmd")

            # Export
            o = grs_to_shp(u_shp, output, "area")

        import multiprocessing

        thrds = [
            multiprocessing.Process(
                target=clip_and_union,
                name="th-{}".format(i),
                args=(lyr_a, lyr_b, cellsShp[i],
                      os.path.join(workspace,
                                   "th_{}".format(i)), ref_boundary, i,
                      os.path.join(workspace, "uniao_{}.shp".format(i))))
            for i in range(len(cellsShp))
        ]

        for t in thrds:
            t.start()

        for t in thrds:
            t.join()

        _UNION_SHP = [
            os.path.join(workspace, "uniao_{}.shp".format(i))
            for i in range(len(cellsShp))
        ]

    # Merge all union into the same layer
    MERGED_SHP = merge_feat(_UNION_SHP, outShp, api="ogr2ogr")

    return outShp
Example #22
0
def population_within_point_buffer(netDataset,
                                   rdvName,
                                   pointShp,
                                   populationShp,
                                   popField,
                                   bufferDist,
                                   epsg,
                                   output,
                                   workspace=None,
                                   bufferIsTimeMinutes=None,
                                   useOneway=None):
    """
    Assign to points the population within a certain distance (metric or time)
    
    * Creates a Service Area Polygon for each point in pointShp;
    * Intersect the Service Area Polygons with the populationShp;
    * Count the number of persons within each Service Area Polygon
    (this number will be weighted by the area % of the statistic unit
    intersected with the Service Area Polygon).
    """

    import arcpy
    import os
    from geopandas import GeoDataFrame
    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.cpu.arcg.anls.ovlay import intersect
    from gasp.mng.gen import copy_feat
    from gasp.cpu.arcg.mng.fld import add_geom_attr
    from gasp.cpu.arcg.mng.fld import add_field
    from gasp.cpu.arcg.mng.fld import calc_fld
    from gasp.mng.genze import dissolve
    from gasp.mob.arctbx.svarea import service_area_use_meters
    from gasp.mob.arctbx.svarea import service_area_polygon
    from gasp.fm import tbl_to_obj
    from gasp.to.shp import df_to_shp

    workspace = os.path.dirname(pointShp) if not workspace else workspace

    if not os.path.exists(workspace):
        from gasp.oss.ops import create_folder
        workspace = create_folder(workspace, overwrite=False)

    # Copy population layer
    populationShp = copy_feat(
        populationShp,
        os.path.join(workspace,
                     'cop_{}'.format(os.path.basename(populationShp))),
        gisApi='arcpy')

    # Create layer
    pntLyr = feat_lyr(pointShp)
    popLyr = feat_lyr(populationShp)

    # Create Service Area
    if not bufferIsTimeMinutes:
        servArea = service_area_use_meters(
            netDataset,
            rdvName,
            bufferDist,
            pointShp,
            os.path.join(workspace,
                         'servare_{}'.format(os.path.basename(pointShp))),
            OVERLAP=False,
            ONEWAY=useOneway)

    else:
        servArea = service_area_polygon(
            netDataset,
            rdvName,
            bufferDist,
            pointShp,
            os.path.join(workspace,
                         "servare_{}".format(os.path.basename(pointShp))),
            ONEWAY_RESTRICTION=useOneway,
            OVERLAP=None)

    servAreaLyr = feat_lyr(servArea)

    # Add Column with Polygons area to Feature Class population
    add_geom_attr(popLyr, "total", geom_attr="AREA")

    # Intersect buffer and Population Feature Class
    intSrc = intersect([servAreaLyr, popLyr],
                       os.path.join(workspace, "int_servarea_pop.shp"))

    intLyr = feat_lyr(intSrc)

    # Get area of intersected statistical unities with population
    add_geom_attr(intLyr, "partarea", geom_attr="AREA")

    # Get population weighted by area intersected
    calc_fld(intLyr, "population",
             "((([partarea] * 100) / [total]) * [{}]) / 100".format(popField),
             {
                 "TYPE": "DOUBLE",
                 "LENGTH": "10",
                 "PRECISION": "3"
             })

    # Dissolve service area by Facility ID
    diss = dissolve(intLyr,
                    os.path.join(workspace, 'diss_servpop.shp'),
                    "FacilityID",
                    statistics="population SUM")

    # Get original Point FID from FacilityID
    calc_fld(diss, "pnt_fid", "[FacilityID] - 1", {
        "TYPE": "INTEGER",
        "LENGTH": "5",
        "PRECISION": None
    })

    dfPnt = tbl_to_obj(pointShp)
    dfDiss = tbl_to_obj(diss)

    dfDiss.rename(columns={"SUM_popula": "n_pessoas"}, inplace=True)

    resultDf = dfPnt.merge(dfDiss,
                           how='inner',
                           left_index=True,
                           right_on="pnt_fid")

    resultDf.drop('geometry_y', axis=1, inplace=True)

    resultDf = GeoDataFrame(resultDf,
                            crs={'init': 'epsg:{}'.format(epsg)},
                            geometry='geometry_x')

    df_to_shp(resultDf, output)

    return output
Example #23
0
def arcg_mean_time_WByPop2(netDt,
                           rdv,
                           infraestruturas,
                           unidades,
                           conjuntos,
                           popf,
                           w,
                           output,
                           oneway=None):
    """
    Tempo medio ponderado pela populacao residente a infra-estrutura mais
    proxima (min)
    
    * netDt = Path to Network Dataset
    * infraestruturas = Points of destiny
    * unidades = BGRI; Freg; Concelhos
    * conjuntos = Freg; Concelhos; NUT - field
    * popf = Field with the population of the statistic unity
    * w = Workspace
    * output = Path to store the final output
    * rdv = Name of feature class with the streets network
    """

    import arcpy
    import os
    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.cpu.arcg.mng.feat import feat_to_pnt
    from gasp.cpu.arcg.mng.fld import add_field, calc_fld
    from gasp.cpu.arcg.mng.joins import join_table
    from gasp.mng.genze import dissolve
    from gasp.mng.gen import copy_feat
    from gasp.mob.arctbx.closest import closest_facility
    from gasp.fm import tbl_to_obj

    def get_freg_denominator(shp, groups, population, fld_time="Total_Minu"):
        cursor = arcpy.SearchCursor(shp)

        groups_sum = {}
        for lnh in cursor:
            group = lnh.getValue(groups)
            nrInd = float(lnh.getValue(population))
            time = float(lnh.getValue(fld_time))

            if group not in groups_sum.keys():
                groups_sum[group] = time * nrInd

            else:
                groups_sum[group] += time * nrInd

        del cursor, lnh

        return groups_sum

    if not os.path.exists(w):
        from gasp.oss.ops import create_folder
        w = create_folder(w, overwrite=False)

    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = w

    # Start Procedure #
    # Create copy of statitic unities to preserve the original data
    copy_unities = copy_feat(unidades,
                             os.path.join(w, os.path.basename(unidades)),
                             gisApi='arcpy')

    # Generate centroids of the statistic unities - unidades
    lyr_unidades = feat_lyr(copy_unities)
    pnt_unidades = feat_to_pnt(lyr_unidades, 'pnt_unidades.shp')

    # Network Processing - Distance between CENTROID and Destiny points
    closest_facility(netDt,
                     rdv,
                     infraestruturas,
                     pnt_unidades,
                     os.path.join(w, "cls_table.dbf"),
                     oneway_restriction=oneway)
    add_field("cls_table.dbf", 'j', "SHORT", "6")
    calc_fld("cls_table.dbf", 'j', "[IncidentID]-1")
    join_table(lyr_unidades, "FID", "cls_table.dbf", "j", "Total_Minu")
    del lyr_unidades

    # Calculo dos somatorios por freguesia (conjunto)
    # To GeoDf
    unidadesDf = tbl_to_obj(copy_unities)
    """
    groups = get_freg_denominator(lyr_unidades, conjuntos, popf)
    add_field(lyr_unidades, "tm", "FLOAT", "10", "3")
    
    cs = arcpy.UpdateCursor(lyr_unidades)
    linha = cs.next()
    while linha:
        group = linha.getValue(conjuntos)
        t = float(linha.getValue("Total_Minu"))
        p = int(linha.getValue(popf))
        total = groups[group]
        indi = ((t * p) / total) * t
        linha.setValue("tm", indi)
        cs.updateRow(linha)
        linha = cs.next()
    
    return dissolve(lyr_unidades, output, conjuntos, "tm SUM")"""
    return unidadesDf
Example #24
0
def pgtables_to_layer_withStyle_by_col(pgtables,
                                       sldData,
                                       pgsql_con,
                                       workName=None,
                                       storeName=None,
                                       geoserver_con={
                                           'USER': '******',
                                           'PASSWORD': '******',
                                           'HOST': 'localhost',
                                           'PORT': '8888'
                                       },
                                       sldGeom='Polygon',
                                       DATATYPE='QUANTITATIVE',
                                       TABLE_DESIGNATION=None,
                                       COL_DESIGNATION=None,
                                       exclude_cols=None,
                                       pathToSLDfiles=None):
    """
    Create a new Geoserver Workspace, create a postgis store and one layer
    for each table in 'pgtables'. Each layer will have
    multiple styles - one style by column in it.
    
    Parameters:
    1) pgtables
    - List of PSQL tables to be transformed as Geoserver Layers
    
    2) sldData
    - sldData should be a xls table with the styles specifications.
    For QUANTITATIVE DATA: The table should have two sheets: one for
    colors and other for intervals:
    
    COLORS SHEET STRUCTURE (Sheet Index = 0):
    cls_id | R | G | B | STROKE_R | STROKE_G | STROKE_B | STROKE_W
       1   | X | X | X |    X     |    X     |    X     |    1
       2   | X | X | X |    X     |    X     |    X     |    1
       3   | X | X | X |    X     |    X     |    X     |    1
       4   | X | X | X |    X     |    X     |    X     |    1
       5   | X | X | X |    X     |    X     |    X     |    1
    
    INTERVALS SHEET STRUCTURE (Sheet Index = 1):
          | 0 | 1 |  2 |  3 |  4 |  5
    col_0 | 0 | 5 | 10 | 15 | 20 | 25
    col_1 | 0 | 6 | 12 | 18 | 24 | 30
    ...
    col_n | 0 | 5 | 10 | 15 | 20 | 25
    
    For CATEGORICAL DATA: The table should have only one sheet:
    CATEGORICAL SHEET STRUCTURE
           | R | G | B | STROKE_R | STROKE_G | STROKE_B | STROKE_W
    attr_1 | X | X | X |    X     |    X     |    X     |    1
    attr_2 | X | X | X |    X     |    X     |    X     |    1
    ...
    attr_n | X | X | X |    X     |    X     |    X     |    1
    
    3) pgsql_con
    - Dict with parameters that will be used to connect to PostgreSQL
    d = {
        'HOST' : 'localhost', 'PORT' : '5432', 'USER' : 'postgres',
        'PASSWORD' : 'admin', 'DATABASE' : 'db_name'
    }
    
    4) workName
    - String with the name of the Geoserver workspace that will be created
    
    5) storeName
    - String with the name of the Geoserver store that will be created
    
    6) geoserver_con
    - Dict with parameters to connect to Geoserver
    
    7) sldGeom
    - Data Geometry. At the moment, the options 'Polygon' and 'Line' are
    valid.
    
    8) DATATYPE='QUANTITATIVE' | 'CATEGORICAL'
    
    9) TABLE_DESIGNATION
    - Table with the association between pgtables name and the designation
    to be used to name the Geoserver Layer.
    
    10) COL_DESIGNATION 
    - Table xls with association between each column and one
    style that will be used to present the information of that column.
    The style designation could not have blank characters.
    
    11) exclude_cols
    - One style will be created for each column in one pgtable. The columns
    in 'exclude_cols' will not have a style.
    
    12) pathToSLDfiles
    - Absolute path to the folder where the SLD (Style Layer Descriptor)
    will be stored.
    """

    import os
    from gasp import goToList
    from gasp.fm import tbl_to_obj
    from gasp.oss.ops import create_folder
    from gasp.sql.mng.fld import cols_name
    from gasp.web.geosrv.wspace import create_workspace
    from gasp.web.geosrv.stores import create_psqlstore
    from gasp.web.geosrv.lyrs import publish_postgis_layer
    from gasp.web.geosrv.styl import create_style
    from gasp.web.geosrv.styl import list_styles
    from gasp.web.geosrv.styl import del_style
    from gasp.web.geosrv.styl.assign import assign_style_to_layer
    from gasp.web.geosrv.styl.sld import write_sld

    # Sanitize PGtables
    pgtables = goToList(pgtables)

    if not pgtables:
        raise ValueError('pgtables value is not valid!')

    exclude_cols = goToList(exclude_cols)

    STY_DESIGNATION = tbl_to_obj(COL_DESIGNATION,
                                 useFirstColAsIndex=True,
                                 output='dict',
                                 colsAsArray=True) if COL_DESIGNATION else None

    LYR_DESIGNATION = tbl_to_obj(
        TABLE_DESIGNATION,
        useFirstColAsIndex=True,
        output='dict',
        colsAsArray=True) if TABLE_DESIGNATION else None

    # Get intervals and colors data
    if DATATYPE == 'QUANTITATIVE':
        if os.path.exists(sldData):
            FF = os.path.splitext(sldData)[1]
            if FF == '.xls' or FF == '.xlsx':
                colorsDict = tbl_to_obj(sldData,
                                        sheet=0,
                                        useFirstColAsIndex=True,
                                        output='dict')
                intervalsDict = tbl_to_obj(sldData,
                                           sheet=1,
                                           useFirstColAsIndex=True,
                                           output='dict')

            else:
                raise ValueError(
                    ('At the moment, for DATATYPE QUANTITATIVE, sldData '
                     'has to be a xls table'))

        else:
            raise ValueError(('{} is not a valid file').format(sldData))

    elif DATATYPE == 'CATEGORICAL':
        if os.path.exists(sldData):
            if os.path.splitext(sldData)[1] == 'xls':
                colorsDict = tbl_to_obj(sldData,
                                        sheet=0,
                                        useFirstColAsIndex=True,
                                        output='dict')

            else:
                raise ValueError(
                    ('At the moment, for DATATYPE CATEGORICAL, sldData '
                     'has to be a xls table'))
        else:
            raise ValueError(('{} is not a valid file').format(sldData))

    else:
        raise ValueError('{} is not avaiable at the moment'.format(DATATYPE))

    # Create Workspace
    workName = 'w_{}'.format(
        pgsql_con['DATABASE']) if not workName else workName

    create_workspace(workName, conf=geoserver_con, overwrite=True)

    # Create Store
    storeName = pgsql_con['DATABASE'] if not storeName else storeName
    create_psqlstore(storeName, workName, pgsql_con, gs_con=geoserver_con)

    # Create folder for sld's
    wTmp = create_folder(
        os.path.join(os.path.dirname(sldData),
                     'sldFiles')) if not pathToSLDfiles else pathToSLDfiles

    # List styles in geoserver
    STYLES = list_styles(conf=geoserver_con)

    # For each table in PGTABLES
    for PGTABLE in pgtables:
        # Publish Postgis table
        TITLE = None if not LYR_DESIGNATION else LYR_DESIGNATION[PGTABLE][0]
        publish_postgis_layer(workName,
                              storeName,
                              PGTABLE,
                              title=TITLE,
                              gs_con=geoserver_con)

        # List PGTABLE columns
        pgCols = cols_name(pgsql_con, PGTABLE)

        # For each column
        for col in pgCols:
            if exclude_cols and col in exclude_cols:
                continue

            STYLE_NAME = '{}_{}'.format(
                PGTABLE, STY_DESIGNATION[col]
                [0]) if STY_DESIGNATION else '{}_{}'.format(PGTABLE, col)

            if STYLE_NAME in STYLES:
                del_style(STYLE_NAME, geoserver_con)

            # Create Object with association between colors and intervals
            d = {}
            OPACITY = str(colorsDict[1]['OPACITY'])
            for i in colorsDict:
                d[i] = {
                    'R': colorsDict[i]['R'],
                    'G': colorsDict[i]['G'],
                    'B': colorsDict[i]['B']
                }

                if DATATYPE == 'QUANTITATIVE':
                    d[i]['min'] = intervalsDict[col][i - 1]
                    d[i]['max'] = intervalsDict[col][i]

                if 'STROKE_R' in colorsDict[i] and \
                   'STROKE_G' in colorsDict[i] and \
                   'STROKE_B' in colorsDict[i]:
                    d[i]['STROKE'] = {
                        'R': colorsDict[i]['STROKE_R'],
                        'G': colorsDict[i]['STROKE_G'],
                        'B': colorsDict[i]['STROKE_B']
                    }

                    if 'STROKE_W' in colorsDict[i]:
                        d[i]['STROKE']['WIDTH'] = colorsDict[i]['STROKE_W']

            # Create SLD
            sldFile = write_sld(col,
                                d,
                                os.path.join(wTmp, '{}.sld'.format(col)),
                                geometry=sldGeom,
                                DATA=DATATYPE,
                                opacity=OPACITY)

            # Create Style
            create_style(STYLE_NAME, sldFile, conf=geoserver_con)

            # Apply SLD
            assign_style_to_layer(STYLE_NAME, PGTABLE, geoserver_con)
Example #25
0
def cost_surface(dem,
                 lulc,
                 cls_lulc,
                 prod_lulc,
                 roads,
                 kph,
                 barr,
                 grass_location,
                 output,
                 grass_path=None):
    """
    Tool for make a cost surface based on the roads, slope, land use and
    physical barriers. ach cell has a value that represents the resistance to
    the movement.
    """

    import os

    from gasp.oss.ops import create_folder
    from gasp.os import os_name
    from gasp.session import run_grass
    from gasp.prop.rst import get_cellsize
    from gasp.prop.rst import rst_distinct

    from .constants import lulc_weight
    from .constants import get_slope_categories
    """
    Auxiliar Methods
    """
    def edit_lulc(shp, fld_cls, new_cls):
        FT_TF_GRASS(shp, 'lulc', 'None')
        add_field('lulc', 'leg', 'INT')
        for key in new_cls.keys():
            l = new_cls[key]['cls']
            sql = " OR ".join([
                "{campo}='{value}'".format(campo=fld_cls, value=i) for i in l
            ])
            update_table('lulc', 'leg', int(key), sql)
        return {'shp': 'lulc', 'fld': 'leg'}

    def combine_to_cost(rst_combined, lst_rst, work, slope_weight,
                        rdv_cos_weight, cellsize, mode_movement):
        # The tool r.report doesn't work properly, for that we need some aditional information
        l = []
        for i in lst_rst:
            FT_TF_GRASS(i, os.path.join(work, i + '.tif'), 'None')
            values = rst_distinct(os.path.join(work, i + '.tif'),
                                  gisApi='gdal')
            l.append(min(values))
        # ******
        # Now, we can procede normaly
        txt_file = os.path.join(work, 'text_combine.txt')
        raster_report(rst_combined, txt_file)
        open_txt = open(txt_file, 'r')
        c = 0
        dic_combine = {}
        for line in open_txt.readlines():
            try:
                if c == 4:
                    dic_combine[0] = [str(l[0]), str(l[1])]
                elif c >= 5:
                    pl = line.split('|')
                    cat = pl[2].split('; ')
                    cat1 = cat[0].split(' ')
                    cat2 = cat[1].split(' ')
                    dic_combine[int(pl[1])] = [cat1[1], cat2[1]]
                c += 1
            except:
                break

        cst_dic = {}
        for key in dic_combine.keys():
            cls_slope = int(dic_combine[key][0])
            cos_vias = int(dic_combine[key][1])
            if cos_vias >= 6:
                weight4slope = slope_weight[cls_slope]['rdv']
                if mode_movement == 'pedestrian':
                    weight4other = (3600.0 * cellsize) / (5.0 * 1000.0)
                else:
                    weight4other = (3600.0 * cellsize) / (cos_vias * 1000.0)
            else:
                weight4slope = slope_weight[cls_slope]['cos']
                weight4other = rdv_cos_weight[cos_vias]['weight']
            cst_dic[key] = (weight4slope * weight4other) * 10000000.0
        return cst_dic

    def Rules4CstSurface(dic, work):
        txt = open(os.path.join(work, 'cst_surface.txt'), 'w')
        for key in dic.keys():
            txt.write('{cat}  = {cst}\n'.format(cat=str(key),
                                                cst=str(dic[key])))
        txt.close()
        return os.path.join(work, 'cst_surface.txt')

    """
    Prepare GRASS GIS Environment
    """
    workspace = os.path.dirname(grass_location)
    location = os.path.basename(grass_location)
    # Start GRASS GIS Engine
    grass_base = run_grass(workspace, location, dem, win_path=grass_path)
    import grass.script as grass
    import grass.script.setup as gsetup
    gsetup.init(grass_base, workspace, location, 'PERMANENT')

    # Import GRASS GIS Modules
    from gasp.cpu.grs import grass_converter
    from gasp.spanlst.surf import slope
    from gasp.spanlst.rcls import reclassify
    from gasp.spanlst.rcls import interval_rules
    from gasp.spanlst.rcls import category_rules
    from gasp.spanlst.rcls import grass_set_null
    from gasp.mng.grstbl import add_field, update_table
    from gasp.anls.ovlay import union
    from gasp.to.rst import rst_to_grs, grs_to_rst
    from gasp.to.rst import shp_to_raster
    from gasp.to.shp.grs import shp_to_grs
    from gasp.cpu.grs.spanlst import mosaic_raster
    from gasp.spanlst.local import combine
    from gasp.spanlst.algebra import rstcalc
    from gasp.cpu.grs.spanlst import raster_report
    """Global variables"""
    # Workspace for temporary files
    wTmp = create_folder(os.path.join(workspace, 'tmp'))

    # Cellsize
    cellsize = float(get_cellsize(dem), gisApi='gdal')
    # Land Use Land Cover weights
    lulcWeight = lulc_weight(prod_lulc, cellsize)
    # Slope classes and weights
    slope_cls = get_slope_categories()
    """Make Cost Surface"""
    # Generate slope raster
    rst_to_grs(dem, 'dem')
    slope('dem', 'rst_slope', api="pygrass")

    # Reclassify Slope
    rulesSlope = interval_rules(slope_cls, os.path.join(wTmp, 'slope.txt'))
    reclassify('rst_slope', 'recls_slope', rulesSlope)

    # LULC - Dissolve, union with barriers and conversion to raster
    lulc_shp = edit_lulc(lulc, cls_lulc, lulc_weight)
    shp_to_grs(barr, 'barriers')
    union(lulc_shp['shp'], 'barriers', 'barrcos', api_gis="grass")
    update_table('barrcos', 'a_' + lulc_shp['fld'], 99, 'b_cat=1')
    shp_to_raster('barrcos',
                  'a_' + lulc_shp['fld'],
                  None,
                  None,
                  'rst_barrcos',
                  api='pygrass')

    # Reclassify this raster - convert the values 99 to NULL or NODATA
    grass_set_null('rst_barrcos', 99)

    # Add the roads layer to the GRASS GIS
    shp_to_grs(roads, 'rdv')
    if kph == 'pedestrian':
        add_field('rdv', 'foot', 'INT')
        update_table('rdv', 'foot', 50, 'foot IS NULL')
        shp_to_raster('rdv', 'foot', None, None, 'rst_rdv', api='pygrass')
    else:
        shp_to_raster('rdv', kph, None, None, 'rst_rdv', api='pygrass')

    # Merge LULC/BARR and Roads
    mosaic_raster('rst_rdv', 'rst_barrcos', 'rdv_barrcos')

    # Combine LULC/BARR/ROADS with Slope
    combine('recls_slope', 'rdv_barrcos', 'rst_combine', api="pygrass")
    """
    Estimating cost for every combination at rst_combine
    The order of the rasters on the following list has to be the same of
    GRASS Combine"""
    cst = combine_to_cost('rst_combine', ['recls_slope', 'rdv_barrcos'], wTmp,
                          slope_cls, lulc_weight, cell_size, kph)

    # Reclassify combined rst
    rulesSurface = category_rules(cst, os.path.join('r_surface.txt'))
    reclassify('rst_combine', 'cst_tmp', rulesSurface)
    rstcalc('cst_tmp / 10000000.0', 'cst_surface', api='pygrass')
    grs_to_rst('cst_surface', output)
Example #26
0
def cstDistance_with_motorway(cst_surface, motorway, fld_motorway, nodes_start,
                              nodes_end, pnt_destiny, grass_location,
                              isolines):
    """
    Produce a surface representing the acumulated cost of each cell to a
    destination point considering the false intersections caused by a non
    planar graph
    """

    import os

    from gasp.oss.ops import create_folder
    from gasp.prop.ff import drv_name
    from gasp.cpu.grs.spanlst import rseries
    from gasp.spanlst.algebra import rstcalc
    from gasp.spanlst.dist import rcost
    from gasp.to.rst import rst_to_grs
    from gasp.to.rst import shp_to_raster
    from gasp.cpu.gdl.sampling import gdal_values_to_points
    from pysage.tools_thru_api.gdal.ogr import OGR_CreateNewShape
    """
    Auxiliar Methods
    """
    def dist_to_nodes(pnt_shp, cstSurface, string, w):
        nodes = ogr.GetDriverByName(drv_name(pnt_shp)).Open(pnt_shp, 0)

        nodesLyr = nodes.GetLayer()

        c = 0
        dicNodes = {}
        for pnt in nodesLyr:
            geom = pnt.GetGeometryRef()
            point = geom.ExportToWkb()
            OGR_CreateNewShape(
                OGR_GetDriverName(pnt_shp),
                os.path.join(w, '{pnt}_{o}.shp'.format(pnt=string, o=str(c))),
                ogr.wkbPoint, [point])
            FT_TF_GRASS(
                os.path.join(w, '{pnt}_{o}.shp'.format(pnt=string, o=str(c))),
                '{pnt}_{o}'.format(pnt=string, o=str(c)), 'None')
            GRASS_CostDistance(cstSurface, '{pnt}_{o}'.format(pnt=string,
                                                              o=str(c)),
                               'cst_{pnt}_{a}'.format(pnt=string, a=str(c)))
            dicNodes['{pnt}_{o}'.format(pnt=string, o=str(c))] = [
                os.path.join(w, '{pnt}_{o}.shp'.format(pnt=string, o=str(c))),
                'cst_{pnt}_{a}'.format(pnt=string, a=str(c))
            ]
            c += 1
        return dicNodes

    """GRASS GIS Configuration"""
    # Workspace for temporary files
    wTmp = create_folder(os.path.join(os.path.dirname(grass_location), 'tmp'))
    """Make Accessibility Map"""
    # Add Cost Surface to GRASS GIS
    convert(cst_surface, 'cst_surface')
    # Add Destination To GRASS
    convert(pnt_destiny, 'destination')

    # Run r.cost with only with a secundary roads network
    rcost('cst_surface', 'destination', 'cst_dist_secun')

    # We have to know if the path through motorway implies minor cost.
    # Add primary roads to grass
    convert(motorway, 'rdv_prim', 'None')

    # We need a cost surface only with the cost of motorway roads
    shp_to_raster('rdv_prim',
                  fld_motorway,
                  None,
                  None,
                  'rst_rdv',
                  api='pygrass')
    rstcalc('(3600.0 * {cs}) / (rst_rdv * 1000.0)'.format(
        cs=get_cellsize(cst_surface, gisApi='gdal')),
            'cst_motorway',
            api='grass')

    # For each node of entrance into a motorway, we need to know:
    # - the distance to the entrance node;
    # - the distance between the entrance and every exit node
    # - the distance between the exit and the destination
    # Geting the distance to the entrance node
    entranceNodes = dist_to_nodes(nodes_start, 'cst_surface', 'start', wTmp)
    # Geting the distances to all entrance nodes
    exitNodes = dist_to_nodes(nodes_end, 'cst_surface', 'exit', wTmp)

    # Getting the values needed
    for start_pnt in entranceNodes.keys():
        for exit_pnt in exitNodes.keys():
            GRASS_CostDistance(
                'cst_motorway', exit_pnt,
                'cst2exit_{a}_{b}'.format(a=str(start_pnt[-1]),
                                          b=str(exit_pnt[-1])))
            FT_TF_GRASS(
                'cst2exit_{a}_{b}'.format(a=str(start_pnt[-1]),
                                          b=str(exit_pnt[-1])),
                os.path.join(
                    wTmp, 'cst2exit_{a}_{b}.tif'.format(a=str(start_pnt[-1]),
                                                        b=str(exit_pnt[-1]))),
                'None')
            cst_start_exit = GDAL_ExtractValuesByPoint(
                entranceNodes[start_pnt][0],
                os.path.join(
                    wTmp, 'cst2exit_{a}_{b}.tif'.format(a=str(start_pnt[-1]),
                                                        b=str(exit_pnt[-1]))))
            if os.path.isfile(
                    os.path.join(wTmp,
                                 exitNodes[exit_pnt][1] + '.tif')) == False:
                FT_TF_GRASS(
                    exitNodes[exit_pnt][1],
                    os.path.join(wTmp, exitNodes[exit_pnt][1] + '.tif'),
                    'None')
            cst_exit_destination = GDAL_ExtractValuesByPoint(
                pnt_destiny, os.path.join(wTmp,
                                          exitNodes[exit_pnt][1] + '.tif'))
            GRASS_RasterCalculator(
                '{rst} + {a} + {b}'.format(rst=entranceNodes[start_pnt][1],
                                           a=str(cst_start_exit[0]),
                                           b=str(min(cst_exit_destination))),
                'cst_path_{a}_{b}'.format(a=str(start_pnt[-1]),
                                          b=str(exit_pnt[-1])))
            lst_outputs.append('cst_path_{a}_{b}'.format(a=str(start_pnt[-1]),
                                                         b=str(exit_pnt[-1])))
    lst_outputs.append('cst_dist_secun')
    rseries(lst_outputs, 'isocronas', 'minimum')