Example #1
0
def foldershp_to_foldershp(inFld, outFld, destiny_file_format,
                           file_format='.shp', useApi='ogr'):
    """
    Execute shp_to_shp for every file in inFld (path to folder)
    
    useApi options:
    * ogr;
    """
    
    import os
    from gasp.pyt.oss import lst_ff, fprop
    
    if not os.path.exists(outFld):
        from gasp.pyt.oss import mkdir
        mkdir(outFld)
    
    geo_files = lst_ff(inFld, file_format=file_format)
    
    for f in geo_files:
        shp_to_shp(f, os.path.join(outFld, '{}.{}'.format(
            fprop(f, 'fn'), destiny_file_format if \
                destiny_file_format[0] == '.' else '.' + destiny_file_format
        )), gisApi=useApi)
    
    return outFld
Example #2
0
def thrd_lulc_by_cell(thrd_id, df_fishnet, l_lulc, result):
    # Create folder for this thread
    t_folder = mkdir(os.path.join(result, 'thrd_' + str(thrd_id)))

    # For each fishnet, do the job
    for idx, row in df_fishnet.iterrows():
        rf = mkdir(os.path.join(result, fprop(row.fishnet, 'fn')))

        lulc_by_cell(int(idx), row.bound, l_lulc, row.fishnet, rf, t_folder)
Example #3
0
File: sm.py Project: jasp382/gasp
def summarize_table_fields(table,
                           outFld,
                           fld_name_fld_name=None,
                           __upper=False):
    """
    Summarize all fields in a table
    """

    import os
    from gasp import exec_cmd
    from gasp.pyt.oss import mkdir

    # List table fields:
    fields = lst_fld(table)

    # For each field, query data to summarize the values in the field
    cmd = 'ogr2ogr {o} {i} -dialect sqlite -sql "{s};"'

    if not os.path.exists(outFld):
        tmp = mkdir(outFld)

    for field in fields:
        outTbl = os.path.join(outFld, '{}.dbf'.format(field))

        outcmd = exec_cmd(
            cmd.format(i=table,
                       o=outTbl,
                       s='SELECT {f_}{f} FROM {t} GROUP BY {f}'.format(
                           f=field,
                           t=os.path.splitext(os.path.basename(table))[0],
                           f_='' if not fld_name_fld_name else
                           '{}, '.format(fld_name_fld_name))))
Example #4
0
def osm_from_geofabrik(out_folder):
    """
    Get all OSM Files in GeoFabrik
    """
    
    import os
    from gasp.to.web   import get_file
    from gasp.cons.osm import osm_files
    from gasp.pyt.oss  import mkdir

    main_url = "http://download.geofabrik.de/{}/{}-latest.osm.pbf"

    for c in osm_files:
        if c == 'russia':
            get_file(
                "http://download.geofabrik.de/russia-latest.osm.pbf",
                os.path.join(out_folder, "russia-latest.osm.pbf")
            )
            continue
        
        # Create folder for that continent
        cf = mkdir(os.path.join(out_folder, c))

        # Download files of every continent
        for _c in osm_files[c]:
            get_file(main_url.format(c, _c), os.path.join(
                cf, 
                "{}-latest.osm.pbf".format(_c.replace('/', '_'))
            ), useWget=True)

    return out_folder
Example #5
0
def pub_pglyr(workspace, store, pg_table, title=None):
    """
    Publish PostGIS table in geoserver
    """
    
    import os;         import requests
    from gasp.pyt.char import random_str
    from gasp.pyt.Xml  import write_xml_tree
    from gasp.pyt.oss  import mkdir, del_folder
    from gasp.cons.gsrv import con_gsrv

    gs_con = con_gsrv()
    
    # Create folder to write xml
    wTmp = mkdir(
        os.path.join(
            os.path.dirname(os.path.abspath(__file__)), random_str(7)
        )
    )
    
    # Create obj with data to be written in the xml
    lyr_title = "Title {}".format(pg_table) if not title else title
    elements = {
        "featureType": {
            "name"  : pg_table,
            "title" : lyr_title
        }
    }
    
    # Write the xml
    xml_file = write_xml_tree(
        elements,
        os.path.join(wTmp, '{}.xml'.format(pg_table))
    )
    
    # Create Geoserver Layer
    url = (
        '{pro}://{host}:{port}/geoserver/rest/workspaces/{wname}/'
        'datastores/{store_name}/featuretypes'
    ).format(
        host=gs_con['HOST'], port=gs_con['PORT'], wname=workspace,
        store_name=store, pro=gs_con['PROTOCOL']
    )
    
    with open(xml_file, 'rb') as __xml:
        r = requests.post(
            url, data=__xml, headers={'content-type': 'text/xml'},
            auth=(gs_con['USER'], gs_con['PASSWORD'])
        )
        
        __xml.close()
    
    del_folder(wTmp)
    
    return r
Example #6
0
def pub_rst_lyr(layername, datastore, workspace, epsg_code):
    """
    Publish a Raster layer
    """
    
    import os;            import requests
    from gasp.pyt.char    import random_str
    from gasp.pyt.Xml     import write_xml_tree
    from gasp.pyt.oss     import mkdir, del_folder
    from gasp.gt.prop.prj import epsg_to_wkt
    from gasp.cons.gsrv   import con_gsrv

    conf = con_gsrv()
    
    url = (
        '{pro}://{host}:{port}/geoserver/rest/workspaces/{work}/'
        'coveragestores/{storename}/coverages'
    ).format(
        host=conf['HOST'], port=conf['PORT'],
        work=workspace, storename=datastore, pro=conf['PROTOCOL']
    )
    
    # Create obj with data to be written in the xml
    xmlTree = {
        "coverage" : {
            "name"      : layername,
            "title"     : layername,
            "nativeCRS" : str(epsg_to_wkt(epsg_code)),
            "srs"       : 'EPSG:{}'.format(str(epsg_code)),
        }
    }
    
    # Write XML
    wTmp = mkdir(os.path.join(
        os.path.dirname(os.path.abspath(__file__)),
        random_str(7)
    )) 
    
    xml_file = write_xml_tree(
        xmlTree, os.path.join(wTmp, 'rst_lyr.xml')
    )
    
    # Create layer
    with open(xml_file, 'rb') as f:
        r = requests.post(
            url, data=f,
            headers={'content-type': 'text/xml'},
            auth=(conf['USER'], conf['PASSWORD'])
        )
    
    del_folder(wTmp)
    
    return r
Example #7
0
File: to.py Project: jasp382/gasp
def tbl_fromdb_todb(from_db, to_db, tables, qForTbl=None, api='pandas'):
    """
    Send PGSQL Tables from one database to other
    """
    
    from gasp.pyt import obj_to_lst
    
    api = 'pandas' if api != 'pandas' and api != 'psql' else api
    
    tables = obj_to_lst(tables)
    
    if api == 'pandas':
        from gasp.sql.fm import q_to_obj
    
        for table in tables:
            if not qForTbl:
                tblDf = q_to_obj(from_db, "SELECT * FROM {}".format(
                    table), db_api='psql')
        
            else:
                if table not in qForTbl:
                    tblDf = q_to_obj(from_db, "SELECT * FROM {}".format(
                        table), db_api='psql')
            
                else:
                    tblDf = q_to_obj(from_db, qForTbl[table], db_api='psql')
        
            df_to_db(to_db, tblDf, table, api='psql')
    
    else:
        import os
        from gasp.pyt.oss import mkdir, del_folder
        from gasp.sql.fm  import dump_tbls
        from gasp.sql.to  import restore_tbls
        
        tmpFolder = mkdir(
            os.path.dirname(os.path.abspath(__file__)), randName=True
        )
        
        # Dump 
        sqlScript = dump_tbls(from_db, tables, os.path.join(
            tmpFolder, "tables_data.sql"
        ))
            
        # Restore
        restore_tbls(to_db, sqlScript, tables)
        
        del_folder(tmpFolder)
Example #8
0
    def prod_dem(_df):
        for idx, row in _df.iterrows():
            # Get DEM name
            dem_f = 'dem_{}{}'.format(str(row.fid), demFormat)

            # Get GRASS GIS Workspace
            gw = mkdir(os.path.join(ref_folder, 'gw_{}'.format(str(row.fid))),
                       overwrite=True)

            # Get mask
            msk = None if not masksFolder else None if pd.isna(row.masks)\
                else os.path.join(masksFolder, row.masks)

            # Produce DEM
            make_dem(gw,
                     os.path.join(countours_folder, row.countours),
                     attr,
                     os.path.join(dem_folder, dem_f),
                     os.path.join(ref_folder, row.ref),
                     method="CONTOUR",
                     cell_size=cellsize,
                     mask=msk)
Example #9
0
def shp_diff_fm_ref(refshp, refcol, shps, out_folder, refrst, db=None):
    """
    Check differences between each shp in shps and one reference shape

    Dependencies:
    - GRASS;
    - PostgreSQL with Postgis or GeoPandas;
    """

    import os
    from gasp.gt.prop.ff import check_isRaster
    from gasp.gt.wenv.grs import run_grass
    from gasp.pyt.oss import fprop
    from gasp.gt.tbl.tomtx import tbl_to_areamtx

    # Check if folder exists, if not create it
    if not os.path.exists(out_folder):
        from gasp.pyt.oss import mkdir
        mkdir(out_folder)

    # Start GRASS GIS Session
    gbase = run_grass(out_folder,
                      grassBIN='grass78',
                      location='shpdif',
                      srs=refrst)

    import grass.script.setup as gsetup

    gsetup.init(gbase, out_folder, 'shpdif', 'PERMANENT')

    from gasp.gt.toshp.cff import shp_to_grs, grs_to_shp
    from gasp.gt.torst import rst_to_grs
    from gasp.gt.tbl.fld import rn_cols
    from gasp.gt.toshp.rst import rst_to_polyg

    # Convert to SHAPE if file is Raster
    # Rename interest columns
    i = 0
    lstff = [refshp] + list(shps.keys())
    __shps = {}
    for s in lstff:
        is_rst = check_isRaster(s)

        if is_rst:
            # To GRASS
            rname = fprop(s, 'fn')
            inrst = rst_to_grs(s, "rst_" + rname, as_cmd=True)

            # To vector
            d = rst_to_polyg(inrst,
                             rname,
                             rstColumn="lulc_{}".format(str(i)),
                             gisApi="grass")

        else:
            # To GRASS
            d = shp_to_grs(s, fprop(s, 'fn'), asCMD=True)

            # Change name of interest colum
            rn_cols(d, {shps[s] if i else refcol: "lulc_{}".format(str(i))},
                    api="grass")

        # Export To Shapefile
        if not i:
            refshp = grs_to_shp(d, os.path.join(out_folder, d + '.shp'),
                                'area')
            refcol = "lulc_{}".format(str(i))

        else:
            shp = grs_to_shp(d, os.path.join(out_folder, d + '.shp'), 'area')
            __shps[shp] = "lulc_{}".format(str(i))

        i += 1

    # Union Shapefiles
    union_shape = {}

    for shp in __shps:
        # Optimized Union
        sname = fprop(shp, 'fn')
        union_shape[shp] = optimized_union_anls(
            shp,
            refshp,
            os.path.join(out_folder, sname + '_un.shp'),
            refrst,
            os.path.join(out_folder, "wk_" + sname),
            multiProcess=True)

        # Produce confusion matrices
        mtxf = tbl_to_areamtx(union_shape[shp],
                              "a_" + __shps[shp],
                              'b_' + refcol,
                              os.path.join(out_folder, sname + '.xlsx'),
                              db=db,
                              with_metrics=True)

    return out_folder
Example #10
0
def create_pgstore(store, workspace, pg_con):
    """
    Create a store for PostGIS data
    """

    import os
    import requests
    from gasp.pyt.char import random_str
    from gasp.pyt.Xml import write_xml_tree
    from gasp.pyt.oss import mkdir, del_folder
    from gasp.cons.gsrv import con_gsrv

    gs_con = con_gsrv()

    # Create folder to write xml
    wTmp = mkdir(
        os.path.join(os.path.dirname(os.path.abspath(__file__)),
                     random_str(7)))

    # Create obj with data to be written in the xml
    tree_order = {
        "dataStore": [
            "name", "type", "enabled", "workspace", "connectionParameters",
            "__default"
        ],
        "connection:Parameters": [("entry", "key", "port"),
                                  ("entry", "key", "user"),
                                  ("entry", "key", "passwd"),
                                  ("entry", "key", "dbtype"),
                                  ("entry", "key", "host"),
                                  ("entry", "key", "database"),
                                  ("entry", "key", "schema")]
    }

    xml_tree = {
        "dataStore": {
            "name": store,
            "type": "PostGIS",
            "enabled": "true",
            "workspace": {
                "name": workspace
            },
            "connectionParameters": {
                ("entry", "key", "port"): pg_con["PORT"],
                ("entry", "key", "user"): pg_con["USER"],
                ("entry", "key", "passwd"): pg_con["PASSWORD"],
                ("entry", "key", "dbtype"): "postgis",
                ("entry", "key", "host"): pg_con["HOST"],
                ("entry", "key", "database"): pg_con["DATABASE"],
                ("entry", "key", "schema"): "public"
            },
            "__default": "false"
        }
    }

    # Write xml
    xml_file = write_xml_tree(xml_tree,
                              os.path.join(wTmp, 'pgrest.xml'),
                              nodes_order=tree_order)

    # Create Geoserver Store
    url = ('{pro}://{host}:{port}/geoserver/rest/workspaces/{wname}/'
           'datastores.xml').format(host=gs_con['HOST'],
                                    port=gs_con['PORT'],
                                    wname=workspace,
                                    pro=gs_con['PROTOCOL'])

    with open(xml_file, 'rb') as f:
        r = requests.post(url,
                          data=f,
                          headers={'content-type': 'text/xml'},
                          auth=(gs_con['USER'], gs_con['PASSWORD']))
        f.close()

    del_folder(wTmp)

    return r
Example #11
0
def joinLines_by_spatial_rel_raster(mainLines, mainId, joinLines, joinCol,
                                    outfile, epsg):
    """
    Join Attributes based on a spatial overlap.
    An raster based approach
    """

    import os
    import pandas
    from geopandas import GeoDataFrame
    from gasp.gt.fmshp import shp_to_obj
    from gasp.gt.toshp import df_to_shp
    from gasp.gt.toshp.coord import shpext_to_boundshp
    from gasp.gt.torst import shp_to_rst
    from gasp.g.to import df_to_geodf
    from gasp.gt.wenv.grs import run_grass
    from gasp.pyt.df.joins import join_dfs
    from gasp.pyt.df.agg import df_groupBy
    from gasp.pyt.oss import fprop, mkdir

    workspace = mkdir(os.path.join(os.path.dirname(mainLines, 'tmp_dt')))

    # Create boundary file
    boundary = shpext_to_boundshp(mainLines,
                                  os.path.join(workspace, "bound.shp"), epsg)

    boundRst = shp_to_rst(boundary,
                          None,
                          5,
                          -99,
                          os.path.join(workspace, "rst_base.tif"),
                          epsg=epsg,
                          api='gdal')

    # Start GRASS GIS Session
    gbase = run_grass(workspace, location="grs_loc", srs=boundRst)

    import grass.script as grass
    import grass.script.setup as gsetup

    gsetup.init(gbase, workspace, "grs_loc", "PERMANENT")

    from gasp.gt.nop.local import combine
    from gasp.gt.prop.rst import get_rst_report_data
    from gasp.gt.toshp.cff import shp_to_grs, grs_to_shp
    from gasp.gt.torst import shp_to_rst

    # Add data to GRASS GIS
    mainVector = shp_to_grs(mainLines, fprop(mainLines, 'fn', forceLower=True))
    joinVector = shp_to_grs(joinLines, fprop(joinLines, 'fn', forceLower=True))

    mainRst = shp_to_rst(mainVector,
                         mainId,
                         None,
                         None,
                         "rst_" + mainVector,
                         api='pygrass')
    joinRst = shp_to_rst(joinVector,
                         joinCol,
                         None,
                         None,
                         "rst_" + joinVector,
                         api='pygrass')

    combRst = combine(mainRst, joinRst, "combine_rst", api="pygrass")

    combine_data = get_rst_report_data(combRst, UNITS="c")

    combDf = pandas.DataFrame(combine_data,
                              columns=["comb_cat", "rst_1", "rst_2", "ncells"])
    combDf = combDf[combDf["rst_2"] != '0']
    combDf["ncells"] = combDf["ncells"].astype(int)

    gbdata = df_groupBy(combDf, ["rst_1"], "MAX", "ncells")

    fTable = join_dfs(gbdata, combDf, ["rst_1", "ncells"], ["rst_1", "ncells"])

    fTable["rst_2"] = fTable["rst_2"].astype(int)
    fTable = df_groupBy(fTable, ["rst_1", "ncells"],
                        STAT='MIN',
                        STAT_FIELD="rst_2")

    mainLinesCat = grs_to_shp(mainVector,
                              os.path.join(workspace, mainVector + '.shp'),
                              'line')

    mainLinesDf = shp_to_obj(mainLinesCat)

    resultDf = join_dfs(mainLinesDf,
                        fTable,
                        "cat",
                        "rst_1",
                        onlyCombinations=None)

    resultDf.rename(columns={"rst_2": joinCol}, inplace=True)

    resultDf = df_to_geodf(resultDf, "geometry", epsg)

    df_to_shp(resultDf, outfile)

    return outfile
Example #12
0
def raster_based(osmdata,
                 nomenclature,
                 refRaster,
                 lulcRst,
                 overwrite=None,
                 dataStore=None,
                 roadsAPI='POSTGIS'):
    """
    Convert OSM Data into Land Use/Land Cover Information
    
    An raster based approach.
    
    TODO: Add detailed description
    """

    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import datetime
    import os
    import pandas
    import copy
    # ************************************************************************ #
    # Gasp dependencies #
    # ************************************************************************ #
    from gasp.pyt.oss import mkdir, fprop
    from gasp.gt.prop.ff import check_isRaster
    from gasp.gt.prop.prj import get_rst_epsg
    from gasp.gt.wenv.grs import run_grass
    if roadsAPI == 'POSTGIS':
        from gasp.sql.db import create_db
        from gasp.gql.to.osm import osm_to_psql
        from gasp.sds.osm2lulc.mod2 import roads_sqdb
        from gasp.sql.fm import dump_db
        from gasp.sql.db import drop_db
    else:
        from gasp.gt.toshp.osm import osm_to_sqdb
        from gasp.sds.osm2lulc.mod2 import grs_rst_roads
    from gasp.sds.osm2lulc.utils import osm_project, add_lulc_to_osmfeat, osmlulc_rsttbl
    from gasp.sds.osm2lulc.utils import get_ref_raster
    from gasp.sds.osm2lulc.mod1 import grs_rst
    from gasp.sds.osm2lulc.m3_4 import rst_area
    from gasp.sds.osm2lulc.mod5 import basic_buffer
    from gasp.sds.osm2lulc.mod6 import rst_pnt_to_build
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    # Check if input parameters exists!
    if not os.path.exists(os.path.dirname(lulcRst)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcRst)))

    if not os.path.exists(osmdata):
        raise ValueError(
            'File with OSM DATA ({}) does not exist!'.format(osmdata))

    if not os.path.exists(refRaster):
        raise ValueError(
            'File with reference area ({}) does not exist!'.format(refRaster))

    # Check if Nomenclature is valid
    nomenclature = "URBAN_ATLAS" if nomenclature != "URBAN_ATLAS" and \
        nomenclature != "CORINE_LAND_COVER" and \
        nomenclature == "GLOBE_LAND_30" else nomenclature

    time_a = datetime.datetime.now().replace(microsecond=0)

    workspace = os.path.join(os.path.dirname(lulcRst),
                             'osmtolulc') if not dataStore else dataStore

    # Check if workspace exists
    if os.path.exists(workspace):
        if overwrite:
            mkdir(workspace)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        mkdir(workspace)

    # Get Ref Raster
    refRaster, epsg = get_ref_raster(refRaster, workspace, cellsize=2)

    from gasp.sds.osm2lulc import PRIORITIES, osmTableData, LEGEND

    __priorites = PRIORITIES[nomenclature]
    __legend = LEGEND[nomenclature]
    time_b = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # Convert OSM file to SQLITE DB or to POSTGIS DB #
    # ************************************************************************ #
    if roadsAPI == 'POSTGIS':
        osm_db = create_db(fprop(osmdata, 'fn', forceLower=True),
                           overwrite=True)
        osm_db = osm_to_psql(osmdata, osm_db)
    else:
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(osm_db, osmTableData, nomenclature, api=roadsAPI)
    time_d = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(
        osm_db,
        epsg,
        api=roadsAPI,
        isGlobeLand=None if nomenclature != 'GLOBE_LAND_30' else True)
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Start a GRASS GIS Session #
    # ************************************************************************ #
    grass_base = run_grass(workspace,
                           grassBIN='grass78',
                           location='grloc',
                           srs=epsg)
    import grass.script as grass
    import grass.script.setup as gsetup
    gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT')

    # ************************************************************************ #
    # IMPORT SOME GASP MODULES FOR GRASS GIS #
    # ************************************************************************ #
    from gasp.gt.torst import rst_to_grs, grs_to_rst
    from gasp.gt.nop.mos import rsts_to_mosaic
    from gasp.gt.wenv.grs import rst_to_region
    # ************************************************************************ #
    # SET GRASS GIS LOCATION EXTENT #
    # ************************************************************************ #
    extRst = rst_to_grs(refRaster, 'extent_raster')
    rst_to_region(extRst)
    time_f = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # MapResults #
    mergeOut = {}
    # ************************************************************************ #
    # ************************************************************************ #
    # 1 - Selection Rule #
    # ************************************************************************ #
    """
    selOut = {
        cls_code : rst_name, ...
    }
    """
    selOut, timeCheck1 = grs_rst(osm_db,
                                 osmTableData['polygons'],
                                 api=roadsAPI)

    for cls in selOut:
        mergeOut[cls] = [selOut[cls]]

    time_g = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 2 - Get Information About Roads Location #
    # ************************************************************************ #
    """
    roads = {
        cls_code : rst_name, ...
    }
    """

    if roadsAPI != 'POSTGIS':
        roads, timeCheck2 = grs_rst_roads(
            osm_db, osmTableData['lines'], osmTableData['polygons'], workspace,
            1221 if nomenclature != "GLOBE_LAND_30" else 801)
    else:
        roadCls = 1221 if nomenclature != "GLOBE_LAND_30" else 801

        roads, timeCheck2 = roads_sqdb(osm_db,
                                       osmTableData['lines'],
                                       osmTableData['polygons'],
                                       apidb='POSTGIS',
                                       asRst=roadCls)

        roads = {roadCls: roads}

    for cls in roads:
        if cls not in mergeOut:
            mergeOut[cls] = [roads[cls]]
        else:
            mergeOut[cls].append(roads[cls])

    time_h = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 3 - Area Upper than #
    # ************************************************************************ #
    """
    auOut = {
        cls_code : rst_name, ...
    }
    """

    if nomenclature != 'GLOBE_LAND_30':
        auOut, timeCheck3 = rst_area(osm_db,
                                     osmTableData['polygons'],
                                     UPPER=True,
                                     api=roadsAPI)

        for cls in auOut:
            if cls not in mergeOut:
                mergeOut[cls] = [auOut[cls]]
            else:
                mergeOut[cls].append(auOut[cls])

        time_l = datetime.datetime.now().replace(microsecond=0)
    else:
        timeCheck3 = None
        time_l = None
    # ************************************************************************ #
    # 4 - Area Lower than #
    # ************************************************************************ #
    """
    alOut = {
        cls_code : rst_name, ...
    }
    """
    if nomenclature != 'GLOBE_LAND_30':
        alOut, timeCheck4 = rst_area(osm_db,
                                     osmTableData['polygons'],
                                     UPPER=None,
                                     api=roadsAPI)
        for cls in alOut:
            if cls not in mergeOut:
                mergeOut[cls] = [alOut[cls]]
            else:
                mergeOut[cls].append(alOut[cls])

        time_j = datetime.datetime.now().replace(microsecond=0)
    else:
        timeCheck4 = None
        time_j = None
    # ************************************************************************ #
    # 5 - Get data from lines table (railway | waterway) #
    # ************************************************************************ #
    """
    bfOut = {
        cls_code : rst_name, ...
    }
    """

    bfOut, timeCheck5 = basic_buffer(osm_db,
                                     osmTableData['lines'],
                                     workspace,
                                     apidb=roadsAPI)
    for cls in bfOut:
        if cls not in mergeOut:
            mergeOut[cls] = [bfOut[cls]]
        else:
            mergeOut[cls].append(bfOut[cls])

    time_m = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 7 - Assign untagged Buildings to tags #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        buildsOut, timeCheck7 = rst_pnt_to_build(osm_db,
                                                 osmTableData['points'],
                                                 osmTableData['polygons'],
                                                 api_db=roadsAPI)

        for cls in buildsOut:
            if cls not in mergeOut:
                mergeOut[cls] = buildsOut[cls]
            else:
                mergeOut[cls] += buildsOut[cls]

        time_n = datetime.datetime.now().replace(microsecond=0)

    else:
        timeCheck7 = None
        time_n = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Produce LULC Map  #
    # ************************************************************************ #
    """
    Merge all results for one cls into one raster
    mergeOut = {
        cls_code : [rst_name, rst_name, ...], ...
    }
    into
    mergeOut = {
        cls_code : patched_raster, ...
    }
    """

    for cls in mergeOut:
        if len(mergeOut[cls]) == 1:
            mergeOut[cls] = mergeOut[cls][0]

        else:
            mergeOut[cls] = rsts_to_mosaic(mergeOut[cls],
                                           'mosaic_{}'.format(str(cls)),
                                           api="grass")

    time_o = datetime.datetime.now().replace(microsecond=0)
    """
    Merge all Class Raster using a priority rule
    """

    __priorities = PRIORITIES[nomenclature]
    lst_rst = []
    for cls in __priorities:
        if cls not in mergeOut:
            continue
        else:
            lst_rst.append(mergeOut[cls])

    outGrs = rsts_to_mosaic(lst_rst,
                            os.path.splitext(os.path.basename(lulcRst))[0],
                            api="grass")
    time_p = datetime.datetime.now().replace(microsecond=0)

    # Ceck if lulc Rst has an valid format
    outIsRst = check_isRaster(lulcRst)
    if not outIsRst:
        from gasp.pyt.oss import fprop
        lulcRst = os.path.join(os.path.dirname(lulcRst),
                               fprop(lulcRst, 'fn') + '.tif')

    grs_to_rst(outGrs, lulcRst, as_cmd=True)
    osmlulc_rsttbl(
        nomenclature,
        os.path.join(os.path.dirname(lulcRst),
                     os.path.basename(lulcRst) + '.vat.dbf'))

    time_q = datetime.datetime.now().replace(microsecond=0)

    # Dump Database if PostGIS was used
    # Drop Database if PostGIS was used
    if roadsAPI == 'POSTGIS':
        dump_db(osm_db, os.path.join(workspace, osm_db + '.sql'), api='psql')
        drop_db(osm_db)

    return lulcRst, {
        0: ('set_settings', time_b - time_a),
        1: ('osm_to_sqdb', time_c - time_b),
        2: ('cls_in_sqdb', time_d - time_c),
        3: ('proj_data', time_e - time_d),
        4: ('set_grass', time_f - time_e),
        5: ('rule_1', time_g - time_f, timeCheck1),
        6: ('rule_2', time_h - time_g, timeCheck2),
        7:
        None if not timeCheck3 else ('rule_3', time_l - time_h, timeCheck3),
        8:
        None if not timeCheck4 else ('rule_4', time_j - time_l, timeCheck4),
        9: ('rule_5', time_m - time_j if timeCheck4 else time_m - time_h,
            timeCheck5),
        10:
        None if not timeCheck7 else ('rule_7', time_n - time_m, timeCheck7),
        11: ('merge_rst', time_o - time_n),
        12: ('priority_rule', time_p - time_o),
        13: ('export_rst', time_q - time_p)
    }
Example #13
0
    Set environment variables
    """
    """
    Parameters
    """

    ref_raster = '/home/jasp/mrgis/landsense_pp/rst_pnse.tif'
    osmtolulc = '/home/jasp/mrgis/landsense_pp/res_pnse'
    tmp_folder = '/home/jasp/mrgis/landsense_pp/tmp_pnse'
    results = '/home/jasp/mrgis/landsense_pp/sample_pnse'
    """
    Run Script
    """

    # Create Fishnets
    fishnets = mkdir(os.path.join(tmp_folder, 'fishnets_shp'))
    fnet = nfishnet_fm_rst(ref_raster, 500, 500, fishnets)

    # List Fishnet
    df_fnet = pd.DataFrame(fnet, columns=['fishnet'])

    # List results
    lst_lulc = lst_ff(osmtolulc, file_format='.shp')

    # Produce boundaries for each fishnet
    bf = mkdir(os.path.join(tmp_folder, 'boundaries'))

    def produce_bound(row):
        row['bound'] = shpext_to_boundshp(
            row.fishnet, os.path.join(bf, os.path.basename(row.fishnet)))
Example #14
0
def vector_based(osmdata,
                 nomenclature,
                 refRaster,
                 lulcShp,
                 overwrite=None,
                 dataStore=None,
                 RoadsAPI='POSTGIS'):
    """
    Convert OSM Data into Land Use/Land Cover Information
    
    An vector based approach.
    
    TODO: Add a detailed description.
    
    RoadsAPI Options:
    * GRASS
    * SQLITE
    * POSTGIS
    """

    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import datetime
    import os
    import copy
    # ************************************************************************ #
    # GASP dependencies #
    # ************************************************************************ #
    from gasp.pyt.oss import fprop, mkdir
    from gasp.gt.wenv.grs import run_grass
    if RoadsAPI == 'POSTGIS':
        from gasp.sql.db import create_db
        from gasp.gql.to.osm import osm_to_psql
        from gasp.sql.db import drop_db
        from gasp.sql.fm import dump_db
    else:
        from gasp.gt.toshp.osm import osm_to_sqdb
    from gasp.sds.osm2lulc.utils import osm_project, add_lulc_to_osmfeat, get_ref_raster
    from gasp.gt.toshp.mtos import shps_to_shp
    from gasp.sds.osm2lulc.mod1 import grs_vector
    if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS':
        from gasp.sds.osm2lulc.mod2 import roads_sqdb
    else:
        from gasp.sds.osm2lulc.mod2 import grs_vec_roads
    from gasp.sds.osm2lulc.m3_4 import grs_vect_selbyarea
    from gasp.sds.osm2lulc.mod5 import grs_vect_bbuffer
    from gasp.sds.osm2lulc.mod6 import vector_assign_pntags_to_build
    from gasp.gt.toshp.mtos import same_attr_to_shp
    from gasp.gt.prj import def_prj
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    # Check if input parameters exists!
    if not os.path.exists(os.path.dirname(lulcShp)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcShp)))

    if not os.path.exists(osmdata):
        raise ValueError(
            'File with OSM DATA ({}) does not exist!'.format(osmdata))

    if not os.path.exists(refRaster):
        raise ValueError(
            'File with reference area ({}) does not exist!'.format(refRaster))

    # Check if Nomenclature is valid
    nomenclature = "URBAN_ATLAS" if nomenclature != "URBAN_ATLAS" and \
        nomenclature != "CORINE_LAND_COVER" and \
        nomenclature == "GLOBE_LAND_30" else nomenclature

    time_a = datetime.datetime.now().replace(microsecond=0)

    # Create workspace for temporary files
    workspace = os.path.join(os.path.dirname(lulcShp),
                             'osmtolulc') if not dataStore else dataStore

    # Check if workspace exists
    if os.path.exists(workspace):
        if overwrite:
            mkdir(workspace)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        mkdir(workspace)

    # Get Reference Raster
    refRaster, epsg = get_ref_raster(refRaster, workspace, cellsize=10)

    from gasp.sds.osm2lulc import osmTableData, PRIORITIES, LEGEND

    __priorities = PRIORITIES[nomenclature]
    __legend = LEGEND[nomenclature]

    time_b = datetime.datetime.now().replace(microsecond=0)

    if RoadsAPI != 'POSTGIS':
        # ******************************************************************** #
        # Convert OSM file to SQLITE DB #
        # ******************************************************************** #
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    else:
        # Convert OSM file to POSTGRESQL DB #
        osm_db = create_db(fprop(osmdata, 'fn', forceLower=True),
                           overwrite=True)
        osm_db = osm_to_psql(osmdata, osm_db)
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(osm_db,
                        osmTableData,
                        nomenclature,
                        api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI)
    time_d = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(
        osm_db,
        epsg,
        api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI,
        isGlobeLand=None if nomenclature != 'GLOBE_LAND_30' else True)
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Start a GRASS GIS Session #
    # ************************************************************************ #
    grass_base = run_grass(workspace,
                           grassBIN='grass78',
                           location='grloc',
                           srs=epsg)
    #import grass.script as grass
    import grass.script.setup as gsetup
    gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT')

    # ************************************************************************ #
    # IMPORT SOME GASP MODULES FOR GRASS GIS #
    # ************************************************************************ #
    from gasp.gt.gop.ovlay import erase
    from gasp.gt.wenv.grs import rst_to_region
    from gasp.gt.gop.genze import dissolve
    from gasp.gt.tbl.grs import add_and_update, reset_table, update_table
    from gasp.gt.tbl.fld import add_fields
    from gasp.gt.toshp.cff import shp_to_grs, grs_to_shp
    from gasp.gt.torst import rst_to_grs
    # ************************************************************************ #
    # SET GRASS GIS LOCATION EXTENT #
    # ************************************************************************ #
    extRst = rst_to_grs(refRaster, 'extent_raster')
    rst_to_region(extRst)
    time_f = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # MapResults #
    # ************************************************************************ #
    osmShps = []
    # ************************************************************************ #
    # 1 - Selection Rule #
    # ************************************************************************ #
    ruleOneShp, timeCheck1 = grs_vector(osm_db,
                                        osmTableData['polygons'],
                                        apidb=RoadsAPI)
    osmShps.append(ruleOneShp)

    time_g = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 2 - Get Information About Roads Location #
    # ************************************************************************ #
    ruleRowShp, timeCheck2 = roads_sqdb(
        osm_db,
        osmTableData['lines'],
        osmTableData['polygons'],
        apidb=RoadsAPI
    ) if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS' else grs_vec_roads(
        osm_db, osmTableData['lines'], osmTableData['polygons'])

    osmShps.append(ruleRowShp)
    time_h = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 3 - Area Upper than #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleThreeShp, timeCheck3 = grs_vect_selbyarea(osm_db,
                                                      osmTableData['polygons'],
                                                      UPPER=True,
                                                      apidb=RoadsAPI)

        osmShps.append(ruleThreeShp)
        time_l = datetime.datetime.now().replace(microsecond=0)
    else:
        timeCheck3 = None
        time_l = None
    # ************************************************************************ #
    # 4 - Area Lower than #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleFourShp, timeCheck4 = grs_vect_selbyarea(osm_db,
                                                     osmTableData['polygons'],
                                                     UPPER=False,
                                                     apidb=RoadsAPI)

        osmShps.append(ruleFourShp)
        time_j = datetime.datetime.now().replace(microsecond=0)
    else:
        timeCheck4 = None
        time_j = None
    # ************************************************************************ #
    # 5 - Get data from lines table (railway | waterway) #
    # ************************************************************************ #
    ruleFiveShp, timeCheck5 = grs_vect_bbuffer(osm_db,
                                               osmTableData["lines"],
                                               api_db=RoadsAPI)

    osmShps.append(ruleFiveShp)
    time_m = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 7 - Assign untagged Buildings to tags #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleSeven11, ruleSeven12, timeCheck7 = vector_assign_pntags_to_build(
            osm_db,
            osmTableData['points'],
            osmTableData['polygons'],
            apidb=RoadsAPI)

        if ruleSeven11:
            osmShps.append(ruleSeven11)

        if ruleSeven12:
            osmShps.append(ruleSeven12)

        time_n = datetime.datetime.now().replace(microsecond=0)

    else:
        timeCheck7 = None
        time_n = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # Produce LULC Map  #
    # ************************************************************************ #
    """
    Get Shps with all geometries related with one class - One Shape for Classe
    """

    _osmShps = []
    for i in range(len(osmShps)):
        if not osmShps[i]: continue

        _osmShps.append(
            grs_to_shp(osmShps[i],
                       os.path.join(workspace, osmShps[i] + '.shp'),
                       'auto',
                       lyrN=1,
                       asCMD=True,
                       asMultiPart=None))

    for shp in _osmShps:
        def_prj(os.path.splitext(shp)[0] + '.prj', epsg=epsg, api='epsgio')

    _osmShps = same_attr_to_shp(_osmShps,
                                "cat",
                                workspace,
                                "osm_",
                                resultDict=True)
    del osmShps

    time_o = datetime.datetime.now().replace(microsecond=0)
    """
    Merge all Classes into one feature class using a priority rule
    """

    osmShps = {}
    for cls in _osmShps:
        if cls == '1':
            osmShps[1221] = shp_to_grs(_osmShps[cls], "osm_1221", asCMD=True)

        else:
            osmShps[int(cls)] = shp_to_grs(_osmShps[cls],
                                           "osm_" + cls,
                                           asCMD=True)

    # Erase overlapping areas by priority
    osmNameRef = copy.deepcopy(osmShps)

    for e in range(len(__priorities)):
        if e + 1 == len(__priorities): break

        if __priorities[e] not in osmShps:
            continue
        else:
            for i in range(e + 1, len(__priorities)):
                if __priorities[i] not in osmShps:
                    continue
                else:
                    osmShps[__priorities[i]] = erase(
                        osmShps[__priorities[i]],
                        osmShps[__priorities[e]],
                        "{}_{}".format(osmNameRef[__priorities[i]], e),
                        notTbl=True,
                        api='pygrass')

    time_p = datetime.datetime.now().replace(microsecond=0)

    # Export all classes
    lst_merge = []
    a = None
    for i in range(len(__priorities)):
        if __priorities[i] not in osmShps:
            continue

        if not a:
            reset_table(osmShps[__priorities[i]], {
                'cls': 'varchar(5)',
                'leg': 'varchar(75)'
            }, {
                'cls': str(__priorities[i]),
                'leg': str(__legend[__priorities[i]])
            })

            a = 1

        else:
            add_and_update(osmShps[__priorities[i]], {'cls': 'varchar(5)'},
                           {'cls': str(__priorities[i])})

        ds = dissolve(osmShps[__priorities[i]],
                      'dl_{}'.format(str(__priorities[i])),
                      'cls',
                      api="grass")

        add_fields(ds, {'leg': 'varchar(75)'}, api="grass")
        update_table(ds, 'leg', str(__legend[__priorities[i]]), 'leg is null')

        lst_merge.append(
            grs_to_shp(ds,
                       os.path.join(workspace, "lulc_{}.shp".format(
                           str(__priorities[i]))),
                       'auto',
                       lyrN=1,
                       asCMD=True,
                       asMultiPart=None))

    time_q = datetime.datetime.now().replace(microsecond=0)

    if fprop(lulcShp, 'ff') != '.shp':
        lulcShp = os.path.join(os.path.dirname(lulcShp),
                               fprop(lulcShp, 'fn') + '.shp')

    shps_to_shp(lst_merge, lulcShp, api='pandas')

    # Check if prj of lulcShp exists and create it if necessary
    prj_ff = os.path.splitext(lulcShp)[0] + '.prj'
    if not os.path.exists(prj_ff):
        def_prj(prj_ff, epsg=epsg, api='epsgio')

    time_r = datetime.datetime.now().replace(microsecond=0)

    # Dump Database if PostGIS was used
    # Drop Database if PostGIS was used
    if RoadsAPI == 'POSTGIS':
        dump_db(osm_db, os.path.join(workspace, osm_db + '.sql'), api='psql')
        drop_db(osm_db)

    return lulcShp, {
        0: ('set_settings', time_b - time_a),
        1: ('osm_to_sqdb', time_c - time_b),
        2: ('cls_in_sqdb', time_d - time_c),
        3: ('proj_data', time_e - time_d),
        4: ('set_grass', time_f - time_e),
        5: ('rule_1', time_g - time_f, timeCheck1),
        6: ('rule_2', time_h - time_g, timeCheck2),
        7:
        None if not timeCheck3 else ('rule_3', time_l - time_h, timeCheck3),
        8:
        None if not timeCheck4 else ('rule_4', time_j - time_l, timeCheck4),
        9: ('rule_5', time_m - time_j if timeCheck4 else time_m - time_h,
            timeCheck5),
        10:
        None if not timeCheck7 else ('rule_7', time_n - time_m, timeCheck7),
        11: ('disj_cls', time_o - time_n),
        12: ('priority_rule', time_p - time_o),
        13: ('export_cls', time_q - time_p),
        14: ('merge_cls', time_r - time_q)
    }
Example #15
0
    def run_viewshed_by_cpu(tid,
                            db,
                            obs,
                            dem,
                            srs,
                            vis_basename='vis',
                            maxdst=None,
                            obselevation=None):
        # Create Database
        new_db = create_db("{}_{}".format(db, str(tid)), api='psql')

        # Points to Database
        pnt_tbl = df_to_db(new_db,
                           obs,
                           'pnt_tbl',
                           api='psql',
                           epsg=srs,
                           geomType='Point',
                           colGeom='geometry')

        # Create GRASS GIS Session
        workspace = mkdir(
            os.path.join(os.path.dirname(dem), 'work_{}'.format(str(tid))))
        loc_name = 'vis_loc'
        gbase = run_grass(workspace, location=loc_name, srs=dem)

        # Start GRASS GIS Session
        import grass.script as grass
        import grass.script.setup as gsetup
        gsetup.init(gbase, workspace, loc_name, 'PERMANENT')

        from gasp.gt.torst import rst_to_grs, grs_to_rst
        from gasp.gt.nop.surf import grs_viewshed
        from gasp.gt.deldt import del_rst

        # Send DEM to GRASS GIS
        grs_dem = rst_to_grs(dem, 'grs_dem', as_cmd=True)

        # Produce Viewshed for each point in obs
        for idx, row in obs.iterrows():
            # Get Viewshed raster
            vrst = grs_viewshed(grs_dem, (row.geometry.x, row.geometry.y),
                                '{}_{}'.format(vis_basename, str(row[obs_id])),
                                max_dist=maxdst,
                                obs_elv=obselevation)

            # Export Raster to File
            frst = grs_to_rst(vrst, os.path.join(workspace, vrst + '.tif'))

            # Raster to Array
            img = gdal.Open(frst)
            num = img.ReadAsArray()

            # Two Dimension to One Dimension
            # Reshape Array
            numone = num.reshape(num.shape[0] * num.shape[1])

            # Get Indexes with visibility
            visnum = np.arange(numone.shape[0]).astype(np.uint32)
            visnum = visnum[numone == 1]

            # Get rows indexes
            visrow = visnum / num.shape[0]
            visrow = visrow.astype(np.uint32)

            # Get cols indexes
            viscol = visnum - (visrow * num.shape[1])

            # Visibility indexes to Pandas DataFrame
            idxnum = np.full(visrow.shape, row[obs_id])
            visdf = pd.DataFrame({
                'pntid': idxnum,
                'rowi': visrow,
                'coli': viscol
            })

            # Pandas DF to database
            # Create Visibility table
            df_to_db(new_db,
                     visdf,
                     vis_basename,
                     api='psql',
                     colGeom=None,
                     append=None if not idx else True)

            # Delete all variables
            numone = None
            visnum = None
            visrow = None
            viscol = None
            idxnum = None
            visdf = None
            del img

            # Delete GRASS GIS File
            del_rst(vrst)

            # Delete TIFF File
            del_file(frst)
            frst = None
Example #16
0
File: rmp.py Project: jasp382/gasp
def match_cellsize_and_clip(rstBands, refRaster, outFolder,
                            clipShp=None):
    """
    Resample images to make them with the same resolution and clip
    
    Good to resample Sentinel bands with more than 10 meters.
    
    Dependencies: 
    * GRASS GIS;
    * GDAL/OGR.
    """
    
    import os
    from gasp.gt.prop.prj import get_rst_epsg
    from gasp.gt.wenv.grs import run_grass
    from gasp.pyt.oss     import fprop, mkdir
    
    # Check if outfolder exists
    if not os.path.exists(outFolder):
        mkdir(outFolder, overwrite=None)
    
    # Get EPSG from refRaster
    epsg = get_rst_epsg(refRaster, returnIsProj=None)
    
    """
    Start GRASS GIS Session
    """
    GRS_WORKSPACE = mkdir(os.path.join(outFolder, 'grswork'))
    grsb = run_grass(
        GRS_WORKSPACE, grassBIN='grass78', location='resample',
        srs=epsg
    )
    
    import grass.script as grass
    import grass.script.setup as gsetup
    
    gsetup.init(grsb, GRS_WORKSPACE, 'resample', 'PERMANENT')
    
    """
    Import packages related with GRASS GIS
    """
    from gasp.gt.torst     import rst_to_grs, grs_to_rst
    from gasp.gt.wenv.grs  import rst_to_region
    from gasp.gt.toshp.cff import shp_to_grs
    from gasp.gt.torst     import shp_to_rst, grs_to_mask
    
    # Send Ref Raster to GRASS GIS and set region
    extRst = rst_to_grs(refRaster, 'ext_rst')
    rst_to_region(extRst)
    
    # Import all bands in rstBands
    grs_bands = [rst_to_grs(i, fprop(i, 'fn')) for i in rstBands]
    
    if clipShp:
        # Add clipShp to GRASS
        grs_clip = shp_to_grs(clipShp, fprop(clipShp, 'fn'), asCMD=True)

        # SHP to Raster
        rstClip = shp_to_rst(
            grs_clip, 1, None, 0, 'rst_' + grs_clip,
            api='grass'
        )

        # Set region using
        rst_to_region(rstClip)

        # Set mask
        grs_to_mask(rstClip)
    
    # Export bands
    return [grs_to_rst(
        i, os.path.join(outFolder, i + '.tif')
    ) for i in grs_bands]
Example #17
0
def check_shape_diff(SHAPES_TO_COMPARE, OUT_FOLDER, REPORT, DB,
                     GRASS_REGION_TEMPLATE):
    """
    Script to check differences between pairs of Feature Classes
    
    Suponha que temos diversas Feature Classes (FC) e que cada uma delas
    possui um determinado atributo; imagine tambem que,
    considerando todos os pares possiveis entre estas FC,
    se pretende comparar as diferencas na distribuicao dos valores
    desse atributo para cada par.
    
    * Dependencias:
    - GRASS;
    - PostgreSQL;
    - PostGIS.
    """

    import datetime
    import os
    import pandas
    from gasp.sql.fm import q_to_obj
    from gasp.to import db_to_tbl
    from gasp.sql.to import df_to_db
    from gasp.gt.toshp.cff import shp_to_shp
    from gasp.gt.toshp.db import dbtbl_to_shp
    from gasp.gt.toshp.rst import rst_to_polyg
    from gasp.gql.to import shp_to_psql
    from gasp.gql.tomtx import tbl_to_area_mtx
    from gasp.gt.prop.ff import check_isRaster
    from gasp.pyt.oss import fprop
    from gasp.sql.db import create_db
    from gasp.sql.tbl import tbls_to_tbl
    from gasp.sql.to import q_to_ntbl
    from gasp.gql.cln import fix_geom
    from gasp.to import db_to_tbl

    # Check if folder exists, if not create it
    if not os.path.exists(OUT_FOLDER):
        from gasp.pyt.oss import mkdir
        mkdir(OUT_FOLDER, overwrite=None)
    else:
        raise ValueError('{} already exists!'.format(OUT_FOLDER))

    from gasp.gt.wenv.grs import run_grass

    gbase = run_grass(OUT_FOLDER,
                      grassBIN='grass78',
                      location='shpdif',
                      srs=GRASS_REGION_TEMPLATE)

    import grass.script as grass
    import grass.script.setup as gsetup

    gsetup.init(gbase, OUT_FOLDER, 'shpdif', 'PERMANENT')

    from gasp.gt.toshp.cff import shp_to_grs, grs_to_shp
    from gasp.gt.torst import rst_to_grs
    from gasp.gt.tbl.fld import rn_cols

    # Convert to SHAPE if file is Raster
    i = 0
    _SHP_TO_COMPARE = {}
    for s in SHAPES_TO_COMPARE:
        isRaster = check_isRaster(s)

        if isRaster:
            # To GRASS
            rstName = fprop(s, 'fn')
            inRst = rst_to_grs(s, "rst_" + rstName, as_cmd=True)
            # To Vector
            d = rst_to_polyg(inRst,
                             rstName,
                             rstColumn="lulc_{}".format(i),
                             gisApi="grass")

            # Export Shapefile
            shp = grs_to_shp(d, os.path.join(OUT_FOLDER, d + '.shp'), "area")

            _SHP_TO_COMPARE[shp] = "lulc_{}".format(i)

        else:
            # To GRASS
            grsV = shp_to_grs(s, fprop(s, 'fn'), asCMD=True)

            # Change name of column with comparing value
            ncol = "lulc_{}".format(str(i))
            rn_cols(grsV, {SHAPES_TO_COMPARE[s]: "lulc_{}".format(str(i))},
                    api="grass")

            # Export
            shp = grs_to_shp(grsV, os.path.join(OUT_FOLDER, grsV + '_rn.shp'),
                             "area")

            _SHP_TO_COMPARE[shp] = "lulc_{}".format(str(i))

        i += 1

    SHAPES_TO_COMPARE = _SHP_TO_COMPARE
    __SHAPES_TO_COMPARE = SHAPES_TO_COMPARE

    # Create database
    create_db(DB, api='psql')
    """ Union SHAPEs """

    UNION_SHAPE = {}
    FIX_GEOM = {}

    SHPS = list(__SHAPES_TO_COMPARE.keys())
    for i in range(len(SHPS)):
        for e in range(i + 1, len(SHPS)):
            # Optimized Union
            print("Union between {} and {}".format(SHPS[i], SHPS[e]))
            time_a = datetime.datetime.now().replace(microsecond=0)
            __unShp = optimized_union_anls(
                SHPS[i],
                SHPS[e],
                os.path.join(OUT_FOLDER, "un_{}_{}.shp".format(i, e)),
                GRASS_REGION_TEMPLATE,
                os.path.join(OUT_FOLDER, "work_{}_{}".format(i, e)),
                multiProcess=True)
            time_b = datetime.datetime.now().replace(microsecond=0)
            print(time_b - time_a)

            # Rename cols
            unShp = rn_cols(
                __unShp, {
                    "a_" + __SHAPES_TO_COMPARE[SHPS[i]]:
                    __SHAPES_TO_COMPARE[SHPS[i]],
                    "b_" + __SHAPES_TO_COMPARE[SHPS[e]]:
                    __SHAPES_TO_COMPARE[SHPS[e]]
                })

            UNION_SHAPE[(SHPS[i], SHPS[e])] = unShp

    # Send data to postgresql
    SYNTH_TBL = {}

    for uShp in UNION_SHAPE:
        # Send data to PostgreSQL
        union_tbl = shp_to_psql(DB, UNION_SHAPE[uShp], api='shp2pgsql')

        # Produce table with % of area equal in both maps
        areaMapTbl = q_to_ntbl(
            DB,
            "{}_syn".format(union_tbl),
            ("SELECT CAST('{lulc_1}' AS text) AS lulc_1, "
             "CAST('{lulc_2}' AS text) AS lulc_2, "
             "round("
             "CAST(SUM(g_area) / 1000000 AS numeric), 4"
             ") AS agree_area, round("
             "CAST((SUM(g_area) / MIN(total_area)) * 100 AS numeric), 4"
             ") AS agree_percentage, "
             "round("
             "CAST(MIN(total_area) / 1000000 AS numeric), 4"
             ") AS total_area FROM ("
             "SELECT {map1_cls}, {map2_cls}, ST_Area(geom) AS g_area, "
             "CASE "
             "WHEN {map1_cls} = {map2_cls} "
             "THEN 1 ELSE 0 "
             "END AS isthesame, total_area FROM {tbl}, ("
             "SELECT SUM(ST_Area(geom)) AS total_area FROM {tbl}"
             ") AS foo2"
             ") AS foo WHERE isthesame = 1 "
             "GROUP BY isthesame").format(
                 lulc_1=fprop(uShp[0], 'fn'),
                 lulc_2=fprop(uShp[1], 'fn'),
                 map1_cls=__SHAPES_TO_COMPARE[uShp[0]],
                 map2_cls=__SHAPES_TO_COMPARE[uShp[1]],
                 tbl=union_tbl),
            api='psql')

        # Produce confusion matrix for the pair in comparison
        matrixTbl = tbl_to_area_mtx(DB, union_tbl,
                                    __SHAPES_TO_COMPARE[uShp[0]],
                                    __SHAPES_TO_COMPARE[uShp[1]],
                                    union_tbl + '_mtx')

        SYNTH_TBL[uShp] = {"TOTAL": areaMapTbl, "MATRIX": matrixTbl}

    # UNION ALL TOTAL TABLES
    total_table = tbls_to_tbl(DB, [SYNTH_TBL[k]["TOTAL"] for k in SYNTH_TBL],
                              'total_table')

    # Create table with % of agreement between each pair of maps
    mapsNames = q_to_obj(
        DB,
        ("SELECT lulc FROM ("
         "SELECT lulc_1 AS lulc FROM {tbl} GROUP BY lulc_1 "
         "UNION ALL "
         "SELECT lulc_2 AS lulc FROM {tbl} GROUP BY lulc_2"
         ") AS lu GROUP BY lulc ORDER BY lulc").format(tbl=total_table),
        db_api='psql').lulc.tolist()

    FLDS_TO_PIVOT = ["agree_percentage", "total_area"]

    Q = ("SELECT * FROM crosstab('"
         "SELECT CASE "
         "WHEN foo.lulc_1 IS NOT NULL THEN foo.lulc_1 ELSE jtbl.tmp1 "
         "END AS lulc_1, CASE "
         "WHEN foo.lulc_2 IS NOT NULL THEN foo.lulc_2 ELSE jtbl.tmp2 "
         "END AS lulc_2, CASE "
         "WHEN foo.{valCol} IS NOT NULL THEN foo.{valCol} ELSE 0 "
         "END AS agree_percentage FROM ("
         "SELECT lulc_1, lulc_2, {valCol} FROM {tbl} UNION ALL "
         "SELECT lulc_1, lulc_2, {valCol} FROM ("
         "SELECT lulc_1 AS lulc_2, lulc_2 AS lulc_1, {valCol} "
         "FROM {tbl}"
         ") AS tst"
         ") AS foo FULL JOIN ("
         "SELECT lulc_1 AS tmp1, lulc_2 AS tmp2 FROM ("
         "SELECT lulc_1 AS lulc_1 FROM {tbl} GROUP BY lulc_1 "
         "UNION ALL "
         "SELECT lulc_2 AS lulc_1 FROM {tbl} GROUP BY lulc_2"
         ") AS tst_1, ("
         "SELECT lulc_1 AS lulc_2 FROM {tbl} GROUP BY lulc_1 "
         "UNION ALL "
         "SELECT lulc_2 AS lulc_2 FROM {tbl} GROUP BY lulc_2"
         ") AS tst_2 WHERE lulc_1 = lulc_2 GROUP BY lulc_1, lulc_2"
         ") AS jtbl ON foo.lulc_1 = jtbl.tmp1 AND foo.lulc_2 = jtbl.tmp2 "
         "ORDER BY lulc_1, lulc_2"
         "') AS ct("
         "lulc_map text, {crossCols}"
         ")")

    TOTAL_AGREE_TABLE = None
    TOTAL_AREA_TABLE = None
    for f in FLDS_TO_PIVOT:
        if not TOTAL_AGREE_TABLE:
            TOTAL_AGREE_TABLE = q_to_ntbl(
                DB,
                "agreement_table",
                Q.format(tbl=total_table,
                         valCol=f,
                         crossCols=", ".join([
                             "{} numeric".format(map_) for map_ in mapsNames
                         ])),
                api='psql')

        else:
            TOTAL_AREA_TABLE = q_to_ntbl(DB,
                                         "area_table",
                                         Q.format(tbl=total_table,
                                                  valCol=f,
                                                  crossCols=", ".join([
                                                      "{} numeric".format(map_)
                                                      for map_ in mapsNames
                                                  ])),
                                         api='psql')

    # Union Mapping
    UNION_MAPPING = pandas.DataFrame(
        [[k[0], k[1], fprop(UNION_SHAPE[k], 'fn')] for k in UNION_SHAPE],
        columns=['shp_a', 'shp_b', 'union_shp'])

    UNION_MAPPING = df_to_db(DB, UNION_MAPPING, 'union_map', api='psql')

    # Export Results
    TABLES = [UNION_MAPPING, TOTAL_AGREE_TABLE, TOTAL_AREA_TABLE
              ] + [SYNTH_TBL[x]["MATRIX"] for x in SYNTH_TBL]

    SHEETS = ["union_map", "agreement_percentage", "area_with_data_km"] + [
        "{}_{}".format(fprop(x[0], 'fn')[:15],
                       fprop(x[1], 'fn')[:15]) for x in SYNTH_TBL
    ]

    db_to_tbl(DB, ["SELECT * FROM {}".format(x) for x in TABLES],
              REPORT,
              sheetsNames=SHEETS,
              dbAPI='psql')

    return REPORT
Example #18
0
def optimized_union_anls(lyr_a,
                         lyr_b,
                         outShp,
                         ref_boundary,
                         workspace=None,
                         multiProcess=None):
    """
    Optimized Union Analysis
    
    Goal: optimize v.overlay performance for Union operations
    """

    import os
    from gasp.pyt.oss import fprop, lst_ff
    from gasp.pyt.oss import cpu_cores
    from gasp.gt.sample import create_fishnet
    from gasp.gt.wenv.grs import run_grass
    from gasp.gt.toshp import eachfeat_to_newshp
    from gasp.gt.toshp.mtos import shps_to_shp
    from gasp.gt.attr import split_shp_by_attr
    from gasp.gt.torst import shpext_to_rst
    from gasp.gt.prop.ext import get_ext

    if workspace:
        if not os.path.exists(workspace):
            from gasp.pyt.oss import mkdir

            mkdir(workspace, overwrite=True)

    else:
        from gasp.pyt.oss import mkdir

        workspace = mkdir(os.path.join(os.path.dirname(outShp), "union_work"))

    # Create Fishnet
    ncpu = cpu_cores()
    if ncpu == 12:
        nrow = 4
        ncol = 3
    elif ncpu == 8:
        nrow = 4
        ncol = 2
    else:
        nrow = 2
        ncol = 2

    ext = get_ext(ref_boundary)
    width = (ext[1] - ext[0]) / ncol
    height = (ext[3] - ext[2]) / nrow

    gridShp = create_fishnet(ref_boundary,
                             os.path.join(workspace, 'ref_grid.shp'),
                             width,
                             height,
                             xy_row_col=None)

    # Split Fishnet in several files
    cellsShp = eachfeat_to_newshp(gridShp, workspace)

    if not multiProcess:
        # INIT GRASS GIS Session
        grsbase = run_grass(workspace, location="grs_loc", srs=ref_boundary)

        import grass.script.setup as gsetup

        gsetup.init(grsbase, workspace, "grs_loc", 'PERMANENT')

        # Add data to GRASS GIS
        from gasp.gt.toshp.cff import shp_to_grs

        cellsShp = [
            shp_to_grs(shp, fprop(shp, 'fn'), asCMD=True) for shp in cellsShp
        ]

        LYR_A = shp_to_grs(lyr_a, fprop(lyr_a, 'fn'), asCMD=True)
        LYR_B = shp_to_grs(lyr_b, fprop(lyr_b, 'fn'), asCMD=True)

        # Clip Layers A and B for each CELL in fishnet
        LYRS_A = [
            clip(LYR_A, cellsShp[x], LYR_A + "_" + str(x), api_gis="grass")
            for x in range(len(cellsShp))
        ]
        LYRS_B = [
            clip(LYR_B, cellsShp[x], LYR_B + "_" + str(x), api_gis="grass")
            for x in range(len(cellsShp))
        ]

        # Union SHPS
        UNION_SHP = [
            union(LYRS_A[i], LYRS_B[i], "un_{}".format(i), api_gis="grass")
            for i in range(len(cellsShp))
        ]

        # Export Data
        from gasp.gt.toshp.cff import grs_to_shp

        _UNION_SHP = [
            grs_to_shp(shp, os.path.join(workspace, shp + ".shp"), "area")
            for shp in UNION_SHP
        ]

    else:

        def clip_and_union(la, lb, cell, work, proc, output):
            ref_rst = shpext_to_rst(cell,
                                    os.path.join(os.path.dirname(cell),
                                                 fprop(cell, 'fn') + '.tif'),
                                    cellsize=10)

            # Start GRASS GIS Session
            loc = "proc_" + str(proc)
            grsbase = run_grass(work, location=loc, srs=ref_rst)
            import grass.script.setup as gsetup
            gsetup.init(grsbase, work, loc, 'PERMANENT')

            # Import GRASS GIS modules
            from gasp.gt.toshp.cff import shp_to_grs, grs_to_shp
            from gasp.gt.prop.feat import feat_count

            # Add data to GRASS
            a = shp_to_grs(la, fprop(la, 'fn'), filterByReg=True, asCMD=True)
            b = shp_to_grs(lb, fprop(lb, 'fn'), filterByReg=True, asCMD=True)

            if not feat_count(a, gisApi="grass", work=work, loc=loc):
                return

            if not feat_count(b, gisApi="grass", work=work, loc=loc):
                return

            # Clip
            a_clip = clip(a,
                          None,
                          "{}_clip".format(a),
                          api_gis="grass",
                          clip_by_region=True)
            b_clip = clip(b,
                          None,
                          "{}_clip".format(b),
                          api_gis="grass",
                          clip_by_region=True)

            # Union
            u_shp = union(a_clip,
                          b_clip,
                          "un_{}".format(fprop(cell, 'fn')),
                          api_gis="grass")

            # Export
            o = grs_to_shp(u_shp, output, "area")

        import multiprocessing

        thrds = [
            multiprocessing.Process(
                target=clip_and_union,
                name="th-{}".format(i),
                args=(lyr_a, lyr_b, cellsShp[i],
                      os.path.join(workspace, "th_{}".format(i)), i,
                      os.path.join(workspace, "uniao_{}.shp".format(i))))
            for i in range(len(cellsShp))
        ]

        for t in thrds:
            t.start()

        for t in thrds:
            t.join()

        ff_shp = lst_ff(workspace, file_format='.shp')
        _UNION_SHP = []
        for i in range(len(cellsShp)):
            p = os.path.join(workspace, "uniao_{}.shp".format(i))

            if p in ff_shp:
                _UNION_SHP.append(p)
            else:
                continue

    # Merge all union into the same layer
    MERGED_SHP = shps_to_shp(_UNION_SHP, outShp, api="ogr2ogr")

    return MERGED_SHP
Example #19
0
def osm2lulc(osmdata, nomenclature, refRaster, lulcRst,
             overwrite=None, dataStore=None, roadsAPI='POSTGIS'):
    """
    Convert OSM data into Land Use/Land Cover Information
    
    A matrix based approach
    
    roadsAPI Options:
    * SQLITE
    * POSTGIS
    """
    
    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import os; import numpy; import datetime
    from threading import Thread
    from osgeo     import gdal
    # ************************************************************************ #
    # Dependencies #
    # ************************************************************************ #
    from gasp.gt.fmrst              import rst_to_array
    from gasp.gt.prop.ff            import check_isRaster
    from gasp.gt.prop.rst           import get_cellsize
    from gasp.gt.prop.prj           import get_rst_epsg
    from gasp.pyt.oss               import mkdir, copy_file
    from gasp.pyt.oss               import fprop
    if roadsAPI == 'POSTGIS':
        from gasp.sql.db            import create_db
        from gasp.gql.to.osm        import osm_to_psql
        from gasp.sds.osm2lulc.mod2 import pg_num_roads
        from gasp.sql.fm            import dump_db
        from gasp.sql.db            import drop_db
    else:
        from gasp.gt.toshp.osm      import osm_to_sqdb
        from gasp.sds.osm2lulc.mod2 import num_roads
    from gasp.sds.osm2lulc.utils    import osm_project, add_lulc_to_osmfeat
    from gasp.sds.osm2lulc.utils    import osmlulc_rsttbl
    from gasp.sds.osm2lulc.utils    import get_ref_raster
    from gasp.sds.osm2lulc.mod1     import num_selection
    from gasp.sds.osm2lulc.m3_4     import num_selbyarea
    from gasp.sds.osm2lulc.mod5     import num_base_buffer
    from gasp.sds.osm2lulc.mod6     import num_assign_builds
    from gasp.gt.torst              import obj_to_rst
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    # Check if input parameters exists!
    if not os.path.exists(os.path.dirname(lulcRst)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcRst)))
    
    if not os.path.exists(osmdata):
        raise ValueError('File with OSM DATA ({}) does not exist!'.format(osmdata))
    
    if not os.path.exists(refRaster):
        raise ValueError('File with reference area ({}) does not exist!'.format(refRaster))
    
    # Check if Nomenclature is valid
    nomenclature = "URBAN_ATLAS" if nomenclature != "URBAN_ATLAS" and \
        nomenclature != "CORINE_LAND_COVER" and \
        nomenclature == "GLOBE_LAND_30" else nomenclature
    
    time_a = datetime.datetime.now().replace(microsecond=0)
    
    workspace = os.path.join(os.path.dirname(
        lulcRst), 'num_osmto') if not dataStore else dataStore
    
    # Check if workspace exists:
    if os.path.exists(workspace):
        if overwrite:
            mkdir(workspace, overwrite=True)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        mkdir(workspace, overwrite=None)
    
    # Get Ref Raster and EPSG
    refRaster, epsg = get_ref_raster(refRaster, workspace, cellsize=2)
    CELLSIZE = get_cellsize(refRaster, gisApi='gdal')
        
    from gasp.sds.osm2lulc import osmTableData, PRIORITIES
    
    time_b = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Convert OSM file to SQLITE DB or to POSTGIS DB #
    # ************************************************************************ #
    if roadsAPI == 'POSTGIS':
        osm_db = create_db(fprop(
            osmdata, 'fn', forceLower=True), overwrite=True)
        osm_db = osm_to_psql(osmdata, osm_db)
    
    else:
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(osm_db, osmTableData, nomenclature, api=roadsAPI)
    time_d = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(
        osm_db, epsg, api=roadsAPI,
        isGlobeLand=None if nomenclature != "GLOBE_LAND_30" else True
    )
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # MapResults #
    # ************************************************************************ #
    mergeOut  = {}
    timeCheck = {}
    RULES = [1, 2, 3, 4, 5, 7]
    
    def run_rule(ruleID):
        time_start = datetime.datetime.now().replace(microsecond=0)
        _osmdb = copy_file(
            osm_db, os.path.splitext(osm_db)[0] + '_r{}.sqlite'.format(ruleID)
        ) if roadsAPI == 'SQLITE' else None
        # ******************************************************************** #
        # 1 - Selection Rule #
        # ******************************************************************** #
        if ruleID == 1:
            res, tm = num_selection(
                _osmdb if _osmdb else osm_db, osmTableData['polygons'], workspace,
                CELLSIZE, epsg, refRaster, api=roadsAPI
            )
        # ******************************************************************** #
        # 2 - Get Information About Roads Location #
        # ******************************************************************** #
        elif ruleID == 2:
            res, tm = num_roads(
                _osmdb, nomenclature, osmTableData['lines'],
                osmTableData['polygons'], workspace, CELLSIZE, epsg,
                refRaster
            ) if _osmdb else pg_num_roads(
                osm_db, nomenclature,
                osmTableData['lines'], osmTableData['polygons'],
                workspace, CELLSIZE, epsg, refRaster
            )
        
        # ******************************************************************** #
        # 3 - Area Upper than #
        # ******************************************************************** #
        elif ruleID == 3:
            if nomenclature != "GLOBE_LAND_30":
                res, tm = num_selbyarea(
                    osm_db if not _osmdb else _osmdb,
                    osmTableData['polygons'], workspace,
                    CELLSIZE, epsg, refRaster, UPPER=True, api=roadsAPI
                )
            else:
                return
        
        # ******************************************************************** #
        # 4 - Area Lower than #
        # ******************************************************************** #
        elif ruleID == 4:
            if nomenclature != "GLOBE_LAND_30":
                res, tm = num_selbyarea(
                    osm_db if not _osmdb else _osmdb,
                    osmTableData['polygons'], workspace,
                    CELLSIZE, epsg, refRaster, UPPER=False, api=roadsAPI
                )
            else:
                return
        
        # ******************************************************************** #
        # 5 - Get data from lines table (railway | waterway) #
        # ******************************************************************** #
        elif ruleID == 5:
            res, tm = num_base_buffer(
                osm_db if not _osmdb else _osmdb,
                osmTableData['lines'], workspace,
                CELLSIZE, epsg, refRaster, api=roadsAPI
            )
        # ******************************************************************** #
        # 7 - Assign untagged Buildings to tags #
        # ******************************************************************** #
        elif ruleID == 7:
            if nomenclature != "GLOBE_LAND_30":
                res, tm = num_assign_builds(
                    osm_db if not _osmdb else _osmdb,
                    osmTableData['points'], osmTableData['polygons'],
                    workspace, CELLSIZE, epsg, refRaster, apidb=roadsAPI
                )
            
            else:
                return
        
        time_end = datetime.datetime.now().replace(microsecond=0)
        mergeOut[ruleID] = res
        timeCheck[ruleID] = {'total': time_end - time_start, 'detailed': tm}
    
    thrds = []
    for r in RULES:
        thrds.append(Thread(
            name="to_{}".format(str(r)), target=run_rule,
            args=(r,)
        ))
        
    
    for t in thrds: t.start()
    for t in thrds: t.join()
    
    # Merge all results into one Raster
    compileResults = {}
    for rule in mergeOut:
        for cls in mergeOut[rule]:
            if cls not in compileResults:
                if type(mergeOut[rule][cls]) == list:
                    compileResults[cls] = mergeOut[rule][cls]
                else:
                    compileResults[cls] = [mergeOut[rule][cls]]
            
            else:
                if type(mergeOut[rule][cls]) == list:
                    compileResults[cls] += mergeOut[rule][cls]
                else:
                    compileResults[cls].append(mergeOut[rule][cls])
    
    time_m = datetime.datetime.now().replace(microsecond=0)
    # All Rasters to Array
    arrayRst = {}
    for cls in compileResults:
        for raster in compileResults[cls]:
            if not raster:
                continue
            
            array = rst_to_array(raster)
            
            if cls not in arrayRst:
                arrayRst[cls] = [array.astype(numpy.uint8)]
            
            else:
                arrayRst[cls].append(array.astype(numpy.uint8))
    time_n = datetime.datetime.now().replace(microsecond=0)
    
    # Sum Rasters of each class
    for cls in arrayRst:
        if len(arrayRst[cls]) == 1:
            sumArray = arrayRst[cls][0]
        
        else:
            sumArray = arrayRst[cls][0]
            
            for i in range(1, len(arrayRst[cls])):
                sumArray = sumArray + arrayRst[cls][i]
        
        arrayRst[cls] = sumArray
    
    time_o = datetime.datetime.now().replace(microsecond=0)
    
    # Apply priority rule
    __priorities = PRIORITIES[nomenclature + "_NUMPY"]
    
    for lulcCls in __priorities:
        __lulcCls = rstcls_map(lulcCls)

        if __lulcCls not in arrayRst:
            continue
        else:
            numpy.place(arrayRst[__lulcCls], arrayRst[__lulcCls] > 0,
                lulcCls
            )
    
    for i in range(len(__priorities)):
        lulc_i = rstcls_map(__priorities[i])

        if lulc_i not in arrayRst:
            continue
        
        else:
            for e in range(i+1, len(__priorities)):
                lulc_e = rstcls_map(__priorities[e])

                if lulc_e not in arrayRst:
                    continue
                
                else:
                    numpy.place(arrayRst[lulc_e],
                        arrayRst[lulc_i] == __priorities[i], 0
                    )
    
    time_p = datetime.datetime.now().replace(microsecond=0)
    
    # Merge all rasters
    startCls = 'None'
    for i in range(len(__priorities)):
        lulc_i = rstcls_map(__priorities[i])
        
        if lulc_i in arrayRst:
            resultSum = arrayRst[lulc_i]
            startCls = i
            break
    
    if startCls == 'None':
        return 'NoResults'
    
    for i in range(startCls + 1, len(__priorities)):
        lulc_i = rstcls_map(__priorities[i])
        
        if lulc_i not in arrayRst:
            continue
        
        resultSum = resultSum + arrayRst[lulc_i]
    
    # Save Result
    outIsRst = check_isRaster(lulcRst)
    if not outIsRst:
        from gasp.pyt.oss import fprop
        
        lulcRst = os.path.join(
            os.path.dirname(lulcRst), fprop(lulcRst, 'fn') + '.tif'
        )
    
    numpy.place(resultSum, resultSum==0, 1)
    obj_to_rst(resultSum, lulcRst, refRaster, noData=1)
    
    osmlulc_rsttbl(nomenclature + "_NUMPY", os.path.join(
        os.path.dirname(lulcRst), os.path.basename(lulcRst) + '.vat.dbf'
    ))
    
    time_q = datetime.datetime.now().replace(microsecond=0)

    # Dump Database if PostGIS was used
    # Drop Database if PostGIS was used
    if roadsAPI == 'POSTGIS':
        dump_db(osm_db, os.path.join(workspace, osm_db + '.sql'), api='psql')
        drop_db(osm_db)
    
    return lulcRst, {
        0  : ('set_settings', time_b - time_a),
        1  : ('osm_to_sqdb', time_c - time_b),
        2  : ('cls_in_sqdb', time_d - time_c),
        3  : ('proj_data', time_e - time_d),
        4  : ('rule_1', timeCheck[1]['total'], timeCheck[1]['detailed']),
        5  : ('rule_2', timeCheck[2]['total'], timeCheck[2]['detailed']),
        6  : None if 3 not in timeCheck else (
            'rule_3', timeCheck[3]['total'], timeCheck[3]['detailed']),
        7  : None if 4 not in timeCheck else (
            'rule_4', timeCheck[4]['total'], timeCheck[4]['detailed']),
        8  : ('rule_5', timeCheck[5]['total'], timeCheck[5]['detailed']),
        9  : None if 7 not in timeCheck else (
            'rule_7', timeCheck[7]['total'], timeCheck[7]['detailed']),
        10 : ('rst_to_array', time_n - time_m),
        11 : ('sum_cls', time_o - time_n),
        12 : ('priority_rule', time_p - time_o),
        13 : ('merge_rst', time_q - time_p)
    }
Example #20
0
def pgtables_to_layer_withStyle_by_col(pgtables,
                                       sldData,
                                       db,
                                       workName=None,
                                       storeName=None,
                                       sldGeom='Polygon',
                                       DATATYPE='QUANTITATIVE',
                                       TABLE_DESIGNATION=None,
                                       COL_DESIGNATION=None,
                                       exclude_cols=None,
                                       pathToSLDfiles=None):
    """
    Create a new Geoserver Workspace, create a postgis store and one layer
    for each table in 'pgtables'. Each layer will have
    multiple styles - one style by column in it.
    
    Parameters:
    1) pgtables
    - List of PSQL tables to be transformed as Geoserver Layers
    
    2) sldData
    - sldData should be a xls table with the styles specifications.
    For QUANTITATIVE DATA: The table should have two sheets: one for
    colors and other for intervals:
    
    COLORS SHEET STRUCTURE (Sheet Index = 0):
    cls_id | R | G | B | STROKE_R | STROKE_G | STROKE_B | STROKE_W
       1   | X | X | X |    X     |    X     |    X     |    1
       2   | X | X | X |    X     |    X     |    X     |    1
       3   | X | X | X |    X     |    X     |    X     |    1
       4   | X | X | X |    X     |    X     |    X     |    1
       5   | X | X | X |    X     |    X     |    X     |    1
    
    INTERVALS SHEET STRUCTURE (Sheet Index = 1):
          | 0 | 1 |  2 |  3 |  4 |  5
    col_0 | 0 | 5 | 10 | 15 | 20 | 25
    col_1 | 0 | 6 | 12 | 18 | 24 | 30
    ...
    col_n | 0 | 5 | 10 | 15 | 20 | 25
    
    For CATEGORICAL DATA: The table should have only one sheet:
    CATEGORICAL SHEET STRUCTURE
           | R | G | B | STROKE_R | STROKE_G | STROKE_B | STROKE_W
    attr_1 | X | X | X |    X     |    X     |    X     |    1
    attr_2 | X | X | X |    X     |    X     |    X     |    1
    ...
    attr_n | X | X | X |    X     |    X     |    X     |    1
    
    3) pgsql_con
    - Dict with parameters that will be used to connect to PostgreSQL
    d = {
        'HOST' : 'localhost', 'PORT' : '5432', 'USER' : 'postgres',
        'PASSWORD' : 'admin', 'DATABASE' : 'db_name'
    }
    
    4) workName
    - String with the name of the Geoserver workspace that will be created
    
    5) storeName
    - String with the name of the Geoserver store that will be created
    
    6) geoserver_con
    - Dict with parameters to connect to Geoserver
    
    7) sldGeom
    - Data Geometry. At the moment, the options 'Polygon' and 'Line' are
    valid.
    
    8) DATATYPE='QUANTITATIVE' | 'CATEGORICAL'
    
    9) TABLE_DESIGNATION
    - Table with the association between pgtables name and the designation
    to be used to name the Geoserver Layer.
    
    10) COL_DESIGNATION 
    - Table xls with association between each column and one
    style that will be used to present the information of that column.
    The style designation could not have blank characters.
    
    11) exclude_cols
    - One style will be created for each column in one pgtable. The columns
    in 'exclude_cols' will not have a style.
    
    12) pathToSLDfiles
    - Absolute path to the folder where the SLD (Style Layer Descriptor)
    will be stored.
    """

    import os
    from gasp.pyt import obj_to_lst
    from gasp.fm import tbl_to_obj
    from gasp.pyt.oss import mkdir
    from gasp.sql.i import cols_name
    from gasp.web.geosrv.ws import create_ws
    from gasp.web.geosrv.stores import create_pgstore
    from gasp.web.geosrv.lyrs import pub_pglyr
    from gasp.web.geosrv.sty import create_style
    from gasp.web.geosrv.sty import lst_styles
    from gasp.web.geosrv.sty import del_style
    from gasp.web.geosrv.sty import assign_style_to_layer
    from gasp.web.geosrv.sld import write_sld

    # Sanitize PGtables
    pgtables = obj_to_lst(pgtables)

    if not pgtables:
        raise ValueError('pgtables value is not valid!')

    exclude_cols = obj_to_lst(exclude_cols)

    STY_DESIGNATION = tbl_to_obj(COL_DESIGNATION,
                                 useFirstColAsIndex=True,
                                 output='dict',
                                 colsAsArray=True) if COL_DESIGNATION else None

    LYR_DESIGNATION = tbl_to_obj(
        TABLE_DESIGNATION,
        useFirstColAsIndex=True,
        output='dict',
        colsAsArray=True) if TABLE_DESIGNATION else None

    # Get intervals and colors data
    if DATATYPE == 'QUANTITATIVE':
        if os.path.exists(sldData):
            FF = os.path.splitext(sldData)[1]
            if FF == '.xls' or FF == '.xlsx':
                colorsDict = tbl_to_obj(sldData,
                                        sheet=0,
                                        useFirstColAsIndex=True,
                                        output='dict')
                intervalsDict = tbl_to_obj(sldData,
                                           sheet=1,
                                           useFirstColAsIndex=True,
                                           output='dict')

            else:
                raise ValueError(
                    ('At the moment, for DATATYPE QUANTITATIVE, sldData '
                     'has to be a xls table'))

        else:
            raise ValueError(('{} is not a valid file').format(sldData))

    elif DATATYPE == 'CATEGORICAL':
        if os.path.exists(sldData):
            if os.path.splitext(sldData)[1] == 'xls':
                colorsDict = tbl_to_obj(sldData,
                                        sheet=0,
                                        useFirstColAsIndex=True,
                                        output='dict')

            else:
                raise ValueError(
                    ('At the moment, for DATATYPE CATEGORICAL, sldData '
                     'has to be a xls table'))
        else:
            raise ValueError(('{} is not a valid file').format(sldData))

    else:
        raise ValueError('{} is not avaiable at the moment'.format(DATATYPE))

    # Create Workspace
    workName = 'w_{}'.format(db) if not workName else workName

    create_ws(workName, overwrite=True)

    # Create Store
    storeName = db if not storeName else storeName
    create_pgstore(storeName, workName, db)

    # Create folder for sld's
    wTmp = mkdir(
        os.path.join(os.path.dirname(sldData),
                     'sldFiles')) if not pathToSLDfiles else pathToSLDfiles

    # List styles in geoserver
    STYLES = lst_styles()

    # For each table in PGTABLES
    for PGTABLE in pgtables:
        # Publish Postgis table
        TITLE = None if not LYR_DESIGNATION else LYR_DESIGNATION[PGTABLE][0]
        pub_pglyr(workName, storeName, PGTABLE, title=TITLE)

        # List PGTABLE columns
        pgCols = cols_name(db, PGTABLE)

        # For each column
        for col in pgCols:
            if exclude_cols and col in exclude_cols:
                continue

            STYLE_NAME = '{}_{}'.format(
                PGTABLE, STY_DESIGNATION[col]
                [0]) if STY_DESIGNATION else '{}_{}'.format(PGTABLE, col)

            if STYLE_NAME in STYLES:
                del_style(STYLE_NAME)

            # Create Object with association between colors and intervals
            d = {}
            OPACITY = str(colorsDict[1]['OPACITY'])
            for i in colorsDict:
                d[i] = {
                    'R': colorsDict[i]['R'],
                    'G': colorsDict[i]['G'],
                    'B': colorsDict[i]['B']
                }

                if DATATYPE == 'QUANTITATIVE':
                    d[i]['min'] = intervalsDict[col][i - 1]
                    d[i]['max'] = intervalsDict[col][i]

                if 'STROKE_R' in colorsDict[i] and \
                   'STROKE_G' in colorsDict[i] and \
                   'STROKE_B' in colorsDict[i]:
                    d[i]['STROKE'] = {
                        'R': colorsDict[i]['STROKE_R'],
                        'G': colorsDict[i]['STROKE_G'],
                        'B': colorsDict[i]['STROKE_B']
                    }

                    if 'STROKE_W' in colorsDict[i]:
                        d[i]['STROKE']['WIDTH'] = colorsDict[i]['STROKE_W']

            # Create SLD
            sldFile = write_sld(col,
                                d,
                                os.path.join(wTmp, '{}.sld'.format(col)),
                                geometry=sldGeom,
                                DATA=DATATYPE,
                                opacity=OPACITY)

            # Create Style
            create_style(STYLE_NAME, sldFile)

            # Apply SLD
            assign_style_to_layer(STYLE_NAME, PGTABLE)