Beispiel #1
0
    def run_viewshed_by_cpu(tid,
                            obs,
                            dem,
                            output,
                            vis_basename='vis',
                            maxdst=None,
                            obselevation=None):

        # Create GRASS GIS Session
        loc_name = 'loc_' + str(tid)
        gbase = run_grass(output, location=loc_name, srs=dem)

        # Start GRASS GIS Session
        import grass.script as grass
        import grass.script.setup as gsetup
        gsetup.init(gbase, output, loc_name, 'PERMANENT')

        from gasp.gt.torst import rst_to_grs, grs_to_rst
        from gasp.gt.nop.surf import grs_viewshed

        # Send DEM to GRASS GIS
        grs_dem = rst_to_grs(dem, 'grs_dem', as_cmd=True)

        # Produce Viewshed for each point in obs
        for idx, row in obs.iterrows():
            vrst = grs_viewshed(grs_dem, (row.geometry.x, row.geometry.y),
                                '{}_{}'.format(vis_basename, str(row[obs_id])),
                                max_dist=maxdst,
                                obs_elv=obselevation)

            frst = grs_to_rst(vrst, os.path.join(output, vrst + '.tif'))
Beispiel #2
0
def match_cellsize_and_clip(rstBands, refRaster, outFolder,
                            clipShp=None):
    """
    Resample images to make them with the same resolution and clip
    
    Good to resample Sentinel bands with more than 10 meters.
    
    Dependencies: 
    * GRASS GIS;
    * GDAL/OGR.
    """
    
    import os
    from gasp.gt.prop.prj import get_rst_epsg
    from gasp.gt.wenv.grs import run_grass
    from gasp.pyt.oss     import fprop, mkdir
    
    # Check if outfolder exists
    if not os.path.exists(outFolder):
        mkdir(outFolder, overwrite=None)
    
    # Get EPSG from refRaster
    epsg = get_rst_epsg(refRaster, returnIsProj=None)
    
    """
    Start GRASS GIS Session
    """
    GRS_WORKSPACE = mkdir(os.path.join(outFolder, 'grswork'))
    grsb = run_grass(
        GRS_WORKSPACE, grassBIN='grass78', location='resample',
        srs=epsg
    )
    
    import grass.script as grass
    import grass.script.setup as gsetup
    
    gsetup.init(grsb, GRS_WORKSPACE, 'resample', 'PERMANENT')
    
    """
    Import packages related with GRASS GIS
    """
    from gasp.gt.torst     import rst_to_grs, grs_to_rst
    from gasp.gt.wenv.grs  import rst_to_region
    from gasp.gt.toshp.cff import shp_to_grs
    from gasp.gt.torst     import shp_to_rst, grs_to_mask
    
    # Send Ref Raster to GRASS GIS and set region
    extRst = rst_to_grs(refRaster, 'ext_rst')
    rst_to_region(extRst)
    
    # Import all bands in rstBands
    grs_bands = [rst_to_grs(i, fprop(i, 'fn')) for i in rstBands]
    
    if clipShp:
        # Add clipShp to GRASS
        grs_clip = shp_to_grs(clipShp, fprop(clipShp, 'fn'), asCMD=True)

        # SHP to Raster
        rstClip = shp_to_rst(
            grs_clip, 1, None, 0, 'rst_' + grs_clip,
            api='grass'
        )

        # Set region using
        rst_to_region(rstClip)

        # Set mask
        grs_to_mask(rstClip)
    
    # Export bands
    return [grs_to_rst(
        i, os.path.join(outFolder, i + '.tif')
    ) for i in grs_bands]
Beispiel #3
0
def bnds_to_mosaic(bands, outdata, ref_raster, loc=None):
    """
    Satellite image To mosaic
    
    bands = {
        'bnd_2' : [path_to_file, path_to_file],
        'bnd_3' : [path_to_file, path_to_file],
        'bnd_4' : [path_to_file, path_to_file],
    }
    """
    
    """
    Start GRASS GIS Session
    """
    
    import os
    from gasp.pyt.oss     import fprop
    from gasp.gt.prop.prj import get_rst_epsg
    from gasp.gt.wenv.grs import run_grass

    # Get EPSG from refRaster
    epsg = get_rst_epsg(ref_raster, returnIsProj=None)
    
    LOC = loc if loc else 'gr_loc'
    grass_base = run_grass(
        outdata, grassBIN='grass78',
        location=LOC, srs=epsg
    )
    
    import grass.script as grass
    import grass.script.setup as gsetup
    
    gsetup.init(grass_base, outdata, LOC, 'PERMANENT')
    
    # ************************************************************************ #
    # GRASS MODULES #
    # ************************************************************************ #
    from gasp.gt.torst   import rst_to_grs, grs_to_rst
    from gasp.gt.wenv.grs import rst_to_region
    # ************************************************************************ #
    # SET GRASS GIS LOCATION EXTENT #
    # ************************************************************************ #
    extRst = rst_to_grs(ref_raster, 'extent_raster')
    rst_to_region(extRst)
    # ************************************************************************ #
    # SEND DATA TO GRASS GIS #
    # ************************************************************************ #
    grs_bnds = {}
    
    for bnd in bands:
        l= []
        for b in bands[bnd]:
            bb = rst_to_grs(b, fprop(b, 'fn'))
            l.append(bb)
        
        grs_bnds[bnd] = l
    # ************************************************************************ #
    # PATCH bands and export #
    # ************************************************************************ #
    for bnd in grs_bnds:
        mosaic_band = rseries(grs_bnds[bnd], bnd, 'maximum')
        
        grs_bnds[bnd] = grs_to_rst(mosaic_band, os.path.join(
            outdata, mosaic_band + '.tif'
        ), as_cmd=True)
    
    return grs_bnds
Beispiel #4
0
def raster_based(osmdata,
                 nomenclature,
                 refRaster,
                 lulcRst,
                 overwrite=None,
                 dataStore=None,
                 roadsAPI='POSTGIS'):
    """
    Convert OSM Data into Land Use/Land Cover Information
    
    An raster based approach.
    
    TODO: Add detailed description
    """

    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import datetime
    import os
    import pandas
    import copy
    # ************************************************************************ #
    # Gasp dependencies #
    # ************************************************************************ #
    from gasp.pyt.oss import mkdir, fprop
    from gasp.gt.prop.ff import check_isRaster
    from gasp.gt.prop.prj import get_rst_epsg
    from gasp.gt.wenv.grs import run_grass
    if roadsAPI == 'POSTGIS':
        from gasp.sql.db import create_db
        from gasp.gql.to.osm import osm_to_psql
        from gasp.sds.osm2lulc.mod2 import roads_sqdb
        from gasp.sql.fm import dump_db
        from gasp.sql.db import drop_db
    else:
        from gasp.gt.toshp.osm import osm_to_sqdb
        from gasp.sds.osm2lulc.mod2 import grs_rst_roads
    from gasp.sds.osm2lulc.utils import osm_project, add_lulc_to_osmfeat, osmlulc_rsttbl
    from gasp.sds.osm2lulc.utils import get_ref_raster
    from gasp.sds.osm2lulc.mod1 import grs_rst
    from gasp.sds.osm2lulc.m3_4 import rst_area
    from gasp.sds.osm2lulc.mod5 import basic_buffer
    from gasp.sds.osm2lulc.mod6 import rst_pnt_to_build
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    # Check if input parameters exists!
    if not os.path.exists(os.path.dirname(lulcRst)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcRst)))

    if not os.path.exists(osmdata):
        raise ValueError(
            'File with OSM DATA ({}) does not exist!'.format(osmdata))

    if not os.path.exists(refRaster):
        raise ValueError(
            'File with reference area ({}) does not exist!'.format(refRaster))

    # Check if Nomenclature is valid
    nomenclature = "URBAN_ATLAS" if nomenclature != "URBAN_ATLAS" and \
        nomenclature != "CORINE_LAND_COVER" and \
        nomenclature == "GLOBE_LAND_30" else nomenclature

    time_a = datetime.datetime.now().replace(microsecond=0)

    workspace = os.path.join(os.path.dirname(lulcRst),
                             'osmtolulc') if not dataStore else dataStore

    # Check if workspace exists
    if os.path.exists(workspace):
        if overwrite:
            mkdir(workspace)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        mkdir(workspace)

    # Get Ref Raster
    refRaster, epsg = get_ref_raster(refRaster, workspace, cellsize=2)

    from gasp.sds.osm2lulc import PRIORITIES, osmTableData, LEGEND

    __priorites = PRIORITIES[nomenclature]
    __legend = LEGEND[nomenclature]
    time_b = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # Convert OSM file to SQLITE DB or to POSTGIS DB #
    # ************************************************************************ #
    if roadsAPI == 'POSTGIS':
        osm_db = create_db(fprop(osmdata, 'fn', forceLower=True),
                           overwrite=True)
        osm_db = osm_to_psql(osmdata, osm_db)
    else:
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(osm_db, osmTableData, nomenclature, api=roadsAPI)
    time_d = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(
        osm_db,
        epsg,
        api=roadsAPI,
        isGlobeLand=None if nomenclature != 'GLOBE_LAND_30' else True)
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Start a GRASS GIS Session #
    # ************************************************************************ #
    grass_base = run_grass(workspace,
                           grassBIN='grass78',
                           location='grloc',
                           srs=epsg)
    import grass.script as grass
    import grass.script.setup as gsetup
    gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT')

    # ************************************************************************ #
    # IMPORT SOME GASP MODULES FOR GRASS GIS #
    # ************************************************************************ #
    from gasp.gt.torst import rst_to_grs, grs_to_rst
    from gasp.gt.nop.mos import rsts_to_mosaic
    from gasp.gt.wenv.grs import rst_to_region
    # ************************************************************************ #
    # SET GRASS GIS LOCATION EXTENT #
    # ************************************************************************ #
    extRst = rst_to_grs(refRaster, 'extent_raster')
    rst_to_region(extRst)
    time_f = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # MapResults #
    mergeOut = {}
    # ************************************************************************ #
    # ************************************************************************ #
    # 1 - Selection Rule #
    # ************************************************************************ #
    """
    selOut = {
        cls_code : rst_name, ...
    }
    """
    selOut, timeCheck1 = grs_rst(osm_db,
                                 osmTableData['polygons'],
                                 api=roadsAPI)

    for cls in selOut:
        mergeOut[cls] = [selOut[cls]]

    time_g = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 2 - Get Information About Roads Location #
    # ************************************************************************ #
    """
    roads = {
        cls_code : rst_name, ...
    }
    """

    if roadsAPI != 'POSTGIS':
        roads, timeCheck2 = grs_rst_roads(
            osm_db, osmTableData['lines'], osmTableData['polygons'], workspace,
            1221 if nomenclature != "GLOBE_LAND_30" else 801)
    else:
        roadCls = 1221 if nomenclature != "GLOBE_LAND_30" else 801

        roads, timeCheck2 = roads_sqdb(osm_db,
                                       osmTableData['lines'],
                                       osmTableData['polygons'],
                                       apidb='POSTGIS',
                                       asRst=roadCls)

        roads = {roadCls: roads}

    for cls in roads:
        if cls not in mergeOut:
            mergeOut[cls] = [roads[cls]]
        else:
            mergeOut[cls].append(roads[cls])

    time_h = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 3 - Area Upper than #
    # ************************************************************************ #
    """
    auOut = {
        cls_code : rst_name, ...
    }
    """

    if nomenclature != 'GLOBE_LAND_30':
        auOut, timeCheck3 = rst_area(osm_db,
                                     osmTableData['polygons'],
                                     UPPER=True,
                                     api=roadsAPI)

        for cls in auOut:
            if cls not in mergeOut:
                mergeOut[cls] = [auOut[cls]]
            else:
                mergeOut[cls].append(auOut[cls])

        time_l = datetime.datetime.now().replace(microsecond=0)
    else:
        timeCheck3 = None
        time_l = None
    # ************************************************************************ #
    # 4 - Area Lower than #
    # ************************************************************************ #
    """
    alOut = {
        cls_code : rst_name, ...
    }
    """
    if nomenclature != 'GLOBE_LAND_30':
        alOut, timeCheck4 = rst_area(osm_db,
                                     osmTableData['polygons'],
                                     UPPER=None,
                                     api=roadsAPI)
        for cls in alOut:
            if cls not in mergeOut:
                mergeOut[cls] = [alOut[cls]]
            else:
                mergeOut[cls].append(alOut[cls])

        time_j = datetime.datetime.now().replace(microsecond=0)
    else:
        timeCheck4 = None
        time_j = None
    # ************************************************************************ #
    # 5 - Get data from lines table (railway | waterway) #
    # ************************************************************************ #
    """
    bfOut = {
        cls_code : rst_name, ...
    }
    """

    bfOut, timeCheck5 = basic_buffer(osm_db,
                                     osmTableData['lines'],
                                     workspace,
                                     apidb=roadsAPI)
    for cls in bfOut:
        if cls not in mergeOut:
            mergeOut[cls] = [bfOut[cls]]
        else:
            mergeOut[cls].append(bfOut[cls])

    time_m = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 7 - Assign untagged Buildings to tags #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        buildsOut, timeCheck7 = rst_pnt_to_build(osm_db,
                                                 osmTableData['points'],
                                                 osmTableData['polygons'],
                                                 api_db=roadsAPI)

        for cls in buildsOut:
            if cls not in mergeOut:
                mergeOut[cls] = buildsOut[cls]
            else:
                mergeOut[cls] += buildsOut[cls]

        time_n = datetime.datetime.now().replace(microsecond=0)

    else:
        timeCheck7 = None
        time_n = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Produce LULC Map  #
    # ************************************************************************ #
    """
    Merge all results for one cls into one raster
    mergeOut = {
        cls_code : [rst_name, rst_name, ...], ...
    }
    into
    mergeOut = {
        cls_code : patched_raster, ...
    }
    """

    for cls in mergeOut:
        if len(mergeOut[cls]) == 1:
            mergeOut[cls] = mergeOut[cls][0]

        else:
            mergeOut[cls] = rsts_to_mosaic(mergeOut[cls],
                                           'mosaic_{}'.format(str(cls)),
                                           api="grass")

    time_o = datetime.datetime.now().replace(microsecond=0)
    """
    Merge all Class Raster using a priority rule
    """

    __priorities = PRIORITIES[nomenclature]
    lst_rst = []
    for cls in __priorities:
        if cls not in mergeOut:
            continue
        else:
            lst_rst.append(mergeOut[cls])

    outGrs = rsts_to_mosaic(lst_rst,
                            os.path.splitext(os.path.basename(lulcRst))[0],
                            api="grass")
    time_p = datetime.datetime.now().replace(microsecond=0)

    # Ceck if lulc Rst has an valid format
    outIsRst = check_isRaster(lulcRst)
    if not outIsRst:
        from gasp.pyt.oss import fprop
        lulcRst = os.path.join(os.path.dirname(lulcRst),
                               fprop(lulcRst, 'fn') + '.tif')

    grs_to_rst(outGrs, lulcRst, as_cmd=True)
    osmlulc_rsttbl(
        nomenclature,
        os.path.join(os.path.dirname(lulcRst),
                     os.path.basename(lulcRst) + '.vat.dbf'))

    time_q = datetime.datetime.now().replace(microsecond=0)

    # Dump Database if PostGIS was used
    # Drop Database if PostGIS was used
    if roadsAPI == 'POSTGIS':
        dump_db(osm_db, os.path.join(workspace, osm_db + '.sql'), api='psql')
        drop_db(osm_db)

    return lulcRst, {
        0: ('set_settings', time_b - time_a),
        1: ('osm_to_sqdb', time_c - time_b),
        2: ('cls_in_sqdb', time_d - time_c),
        3: ('proj_data', time_e - time_d),
        4: ('set_grass', time_f - time_e),
        5: ('rule_1', time_g - time_f, timeCheck1),
        6: ('rule_2', time_h - time_g, timeCheck2),
        7:
        None if not timeCheck3 else ('rule_3', time_l - time_h, timeCheck3),
        8:
        None if not timeCheck4 else ('rule_4', time_j - time_l, timeCheck4),
        9: ('rule_5', time_m - time_j if timeCheck4 else time_m - time_h,
            timeCheck5),
        10:
        None if not timeCheck7 else ('rule_7', time_n - time_m, timeCheck7),
        11: ('merge_rst', time_o - time_n),
        12: ('priority_rule', time_p - time_o),
        13: ('export_rst', time_q - time_p)
    }
Beispiel #5
0
def vector_based(osmdata,
                 nomenclature,
                 refRaster,
                 lulcShp,
                 overwrite=None,
                 dataStore=None,
                 RoadsAPI='POSTGIS'):
    """
    Convert OSM Data into Land Use/Land Cover Information
    
    An vector based approach.
    
    TODO: Add a detailed description.
    
    RoadsAPI Options:
    * GRASS
    * SQLITE
    * POSTGIS
    """

    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import datetime
    import os
    import copy
    # ************************************************************************ #
    # GASP dependencies #
    # ************************************************************************ #
    from gasp.pyt.oss import fprop, mkdir
    from gasp.gt.wenv.grs import run_grass
    if RoadsAPI == 'POSTGIS':
        from gasp.sql.db import create_db
        from gasp.gql.to.osm import osm_to_psql
        from gasp.sql.db import drop_db
        from gasp.sql.fm import dump_db
    else:
        from gasp.gt.toshp.osm import osm_to_sqdb
    from gasp.sds.osm2lulc.utils import osm_project, add_lulc_to_osmfeat, get_ref_raster
    from gasp.gt.toshp.mtos import shps_to_shp
    from gasp.sds.osm2lulc.mod1 import grs_vector
    if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS':
        from gasp.sds.osm2lulc.mod2 import roads_sqdb
    else:
        from gasp.sds.osm2lulc.mod2 import grs_vec_roads
    from gasp.sds.osm2lulc.m3_4 import grs_vect_selbyarea
    from gasp.sds.osm2lulc.mod5 import grs_vect_bbuffer
    from gasp.sds.osm2lulc.mod6 import vector_assign_pntags_to_build
    from gasp.gt.toshp.mtos import same_attr_to_shp
    from gasp.gt.prj import def_prj
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    # Check if input parameters exists!
    if not os.path.exists(os.path.dirname(lulcShp)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcShp)))

    if not os.path.exists(osmdata):
        raise ValueError(
            'File with OSM DATA ({}) does not exist!'.format(osmdata))

    if not os.path.exists(refRaster):
        raise ValueError(
            'File with reference area ({}) does not exist!'.format(refRaster))

    # Check if Nomenclature is valid
    nomenclature = "URBAN_ATLAS" if nomenclature != "URBAN_ATLAS" and \
        nomenclature != "CORINE_LAND_COVER" and \
        nomenclature == "GLOBE_LAND_30" else nomenclature

    time_a = datetime.datetime.now().replace(microsecond=0)

    # Create workspace for temporary files
    workspace = os.path.join(os.path.dirname(lulcShp),
                             'osmtolulc') if not dataStore else dataStore

    # Check if workspace exists
    if os.path.exists(workspace):
        if overwrite:
            mkdir(workspace)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        mkdir(workspace)

    # Get Reference Raster
    refRaster, epsg = get_ref_raster(refRaster, workspace, cellsize=10)

    from gasp.sds.osm2lulc import osmTableData, PRIORITIES, LEGEND

    __priorities = PRIORITIES[nomenclature]
    __legend = LEGEND[nomenclature]

    time_b = datetime.datetime.now().replace(microsecond=0)

    if RoadsAPI != 'POSTGIS':
        # ******************************************************************** #
        # Convert OSM file to SQLITE DB #
        # ******************************************************************** #
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    else:
        # Convert OSM file to POSTGRESQL DB #
        osm_db = create_db(fprop(osmdata, 'fn', forceLower=True),
                           overwrite=True)
        osm_db = osm_to_psql(osmdata, osm_db)
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(osm_db,
                        osmTableData,
                        nomenclature,
                        api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI)
    time_d = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(
        osm_db,
        epsg,
        api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI,
        isGlobeLand=None if nomenclature != 'GLOBE_LAND_30' else True)
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Start a GRASS GIS Session #
    # ************************************************************************ #
    grass_base = run_grass(workspace,
                           grassBIN='grass78',
                           location='grloc',
                           srs=epsg)
    #import grass.script as grass
    import grass.script.setup as gsetup
    gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT')

    # ************************************************************************ #
    # IMPORT SOME GASP MODULES FOR GRASS GIS #
    # ************************************************************************ #
    from gasp.gt.gop.ovlay import erase
    from gasp.gt.wenv.grs import rst_to_region
    from gasp.gt.gop.genze import dissolve
    from gasp.gt.tbl.grs import add_and_update, reset_table, update_table
    from gasp.gt.tbl.fld import add_fields
    from gasp.gt.toshp.cff import shp_to_grs, grs_to_shp
    from gasp.gt.torst import rst_to_grs
    # ************************************************************************ #
    # SET GRASS GIS LOCATION EXTENT #
    # ************************************************************************ #
    extRst = rst_to_grs(refRaster, 'extent_raster')
    rst_to_region(extRst)
    time_f = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # MapResults #
    # ************************************************************************ #
    osmShps = []
    # ************************************************************************ #
    # 1 - Selection Rule #
    # ************************************************************************ #
    ruleOneShp, timeCheck1 = grs_vector(osm_db,
                                        osmTableData['polygons'],
                                        apidb=RoadsAPI)
    osmShps.append(ruleOneShp)

    time_g = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 2 - Get Information About Roads Location #
    # ************************************************************************ #
    ruleRowShp, timeCheck2 = roads_sqdb(
        osm_db,
        osmTableData['lines'],
        osmTableData['polygons'],
        apidb=RoadsAPI
    ) if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS' else grs_vec_roads(
        osm_db, osmTableData['lines'], osmTableData['polygons'])

    osmShps.append(ruleRowShp)
    time_h = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 3 - Area Upper than #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleThreeShp, timeCheck3 = grs_vect_selbyarea(osm_db,
                                                      osmTableData['polygons'],
                                                      UPPER=True,
                                                      apidb=RoadsAPI)

        osmShps.append(ruleThreeShp)
        time_l = datetime.datetime.now().replace(microsecond=0)
    else:
        timeCheck3 = None
        time_l = None
    # ************************************************************************ #
    # 4 - Area Lower than #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleFourShp, timeCheck4 = grs_vect_selbyarea(osm_db,
                                                     osmTableData['polygons'],
                                                     UPPER=False,
                                                     apidb=RoadsAPI)

        osmShps.append(ruleFourShp)
        time_j = datetime.datetime.now().replace(microsecond=0)
    else:
        timeCheck4 = None
        time_j = None
    # ************************************************************************ #
    # 5 - Get data from lines table (railway | waterway) #
    # ************************************************************************ #
    ruleFiveShp, timeCheck5 = grs_vect_bbuffer(osm_db,
                                               osmTableData["lines"],
                                               api_db=RoadsAPI)

    osmShps.append(ruleFiveShp)
    time_m = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 7 - Assign untagged Buildings to tags #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleSeven11, ruleSeven12, timeCheck7 = vector_assign_pntags_to_build(
            osm_db,
            osmTableData['points'],
            osmTableData['polygons'],
            apidb=RoadsAPI)

        if ruleSeven11:
            osmShps.append(ruleSeven11)

        if ruleSeven12:
            osmShps.append(ruleSeven12)

        time_n = datetime.datetime.now().replace(microsecond=0)

    else:
        timeCheck7 = None
        time_n = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # Produce LULC Map  #
    # ************************************************************************ #
    """
    Get Shps with all geometries related with one class - One Shape for Classe
    """

    _osmShps = []
    for i in range(len(osmShps)):
        if not osmShps[i]: continue

        _osmShps.append(
            grs_to_shp(osmShps[i],
                       os.path.join(workspace, osmShps[i] + '.shp'),
                       'auto',
                       lyrN=1,
                       asCMD=True,
                       asMultiPart=None))

    for shp in _osmShps:
        def_prj(os.path.splitext(shp)[0] + '.prj', epsg=epsg, api='epsgio')

    _osmShps = same_attr_to_shp(_osmShps,
                                "cat",
                                workspace,
                                "osm_",
                                resultDict=True)
    del osmShps

    time_o = datetime.datetime.now().replace(microsecond=0)
    """
    Merge all Classes into one feature class using a priority rule
    """

    osmShps = {}
    for cls in _osmShps:
        if cls == '1':
            osmShps[1221] = shp_to_grs(_osmShps[cls], "osm_1221", asCMD=True)

        else:
            osmShps[int(cls)] = shp_to_grs(_osmShps[cls],
                                           "osm_" + cls,
                                           asCMD=True)

    # Erase overlapping areas by priority
    osmNameRef = copy.deepcopy(osmShps)

    for e in range(len(__priorities)):
        if e + 1 == len(__priorities): break

        if __priorities[e] not in osmShps:
            continue
        else:
            for i in range(e + 1, len(__priorities)):
                if __priorities[i] not in osmShps:
                    continue
                else:
                    osmShps[__priorities[i]] = erase(
                        osmShps[__priorities[i]],
                        osmShps[__priorities[e]],
                        "{}_{}".format(osmNameRef[__priorities[i]], e),
                        notTbl=True,
                        api='pygrass')

    time_p = datetime.datetime.now().replace(microsecond=0)

    # Export all classes
    lst_merge = []
    a = None
    for i in range(len(__priorities)):
        if __priorities[i] not in osmShps:
            continue

        if not a:
            reset_table(osmShps[__priorities[i]], {
                'cls': 'varchar(5)',
                'leg': 'varchar(75)'
            }, {
                'cls': str(__priorities[i]),
                'leg': str(__legend[__priorities[i]])
            })

            a = 1

        else:
            add_and_update(osmShps[__priorities[i]], {'cls': 'varchar(5)'},
                           {'cls': str(__priorities[i])})

        ds = dissolve(osmShps[__priorities[i]],
                      'dl_{}'.format(str(__priorities[i])),
                      'cls',
                      api="grass")

        add_fields(ds, {'leg': 'varchar(75)'}, api="grass")
        update_table(ds, 'leg', str(__legend[__priorities[i]]), 'leg is null')

        lst_merge.append(
            grs_to_shp(ds,
                       os.path.join(workspace, "lulc_{}.shp".format(
                           str(__priorities[i]))),
                       'auto',
                       lyrN=1,
                       asCMD=True,
                       asMultiPart=None))

    time_q = datetime.datetime.now().replace(microsecond=0)

    if fprop(lulcShp, 'ff') != '.shp':
        lulcShp = os.path.join(os.path.dirname(lulcShp),
                               fprop(lulcShp, 'fn') + '.shp')

    shps_to_shp(lst_merge, lulcShp, api='pandas')

    # Check if prj of lulcShp exists and create it if necessary
    prj_ff = os.path.splitext(lulcShp)[0] + '.prj'
    if not os.path.exists(prj_ff):
        def_prj(prj_ff, epsg=epsg, api='epsgio')

    time_r = datetime.datetime.now().replace(microsecond=0)

    # Dump Database if PostGIS was used
    # Drop Database if PostGIS was used
    if RoadsAPI == 'POSTGIS':
        dump_db(osm_db, os.path.join(workspace, osm_db + '.sql'), api='psql')
        drop_db(osm_db)

    return lulcShp, {
        0: ('set_settings', time_b - time_a),
        1: ('osm_to_sqdb', time_c - time_b),
        2: ('cls_in_sqdb', time_d - time_c),
        3: ('proj_data', time_e - time_d),
        4: ('set_grass', time_f - time_e),
        5: ('rule_1', time_g - time_f, timeCheck1),
        6: ('rule_2', time_h - time_g, timeCheck2),
        7:
        None if not timeCheck3 else ('rule_3', time_l - time_h, timeCheck3),
        8:
        None if not timeCheck4 else ('rule_4', time_j - time_l, timeCheck4),
        9: ('rule_5', time_m - time_j if timeCheck4 else time_m - time_h,
            timeCheck5),
        10:
        None if not timeCheck7 else ('rule_7', time_n - time_m, timeCheck7),
        11: ('disj_cls', time_o - time_n),
        12: ('priority_rule', time_p - time_o),
        13: ('export_cls', time_q - time_p),
        14: ('merge_cls', time_r - time_q)
    }
Beispiel #6
0
def shp_diff_fm_ref(refshp, refcol, shps, out_folder, refrst, db=None):
    """
    Check differences between each shp in shps and one reference shape

    Dependencies:
    - GRASS;
    - PostgreSQL with Postgis or GeoPandas;
    """

    import os
    from gasp.gt.prop.ff import check_isRaster
    from gasp.gt.wenv.grs import run_grass
    from gasp.pyt.oss import fprop
    from gasp.gt.tbl.tomtx import tbl_to_areamtx

    # Check if folder exists, if not create it
    if not os.path.exists(out_folder):
        from gasp.pyt.oss import mkdir
        mkdir(out_folder)

    # Start GRASS GIS Session
    gbase = run_grass(out_folder,
                      grassBIN='grass78',
                      location='shpdif',
                      srs=refrst)

    import grass.script.setup as gsetup

    gsetup.init(gbase, out_folder, 'shpdif', 'PERMANENT')

    from gasp.gt.toshp.cff import shp_to_grs, grs_to_shp
    from gasp.gt.torst import rst_to_grs
    from gasp.gt.tbl.fld import rn_cols
    from gasp.gt.toshp.rst import rst_to_polyg

    # Convert to SHAPE if file is Raster
    # Rename interest columns
    i = 0
    lstff = [refshp] + list(shps.keys())
    __shps = {}
    for s in lstff:
        is_rst = check_isRaster(s)

        if is_rst:
            # To GRASS
            rname = fprop(s, 'fn')
            inrst = rst_to_grs(s, "rst_" + rname, as_cmd=True)

            # To vector
            d = rst_to_polyg(inrst,
                             rname,
                             rstColumn="lulc_{}".format(str(i)),
                             gisApi="grass")

        else:
            # To GRASS
            d = shp_to_grs(s, fprop(s, 'fn'), asCMD=True)

            # Change name of interest colum
            rn_cols(d, {shps[s] if i else refcol: "lulc_{}".format(str(i))},
                    api="grass")

        # Export To Shapefile
        if not i:
            refshp = grs_to_shp(d, os.path.join(out_folder, d + '.shp'),
                                'area')
            refcol = "lulc_{}".format(str(i))

        else:
            shp = grs_to_shp(d, os.path.join(out_folder, d + '.shp'), 'area')
            __shps[shp] = "lulc_{}".format(str(i))

        i += 1

    # Union Shapefiles
    union_shape = {}

    for shp in __shps:
        # Optimized Union
        sname = fprop(shp, 'fn')
        union_shape[shp] = optimized_union_anls(
            shp,
            refshp,
            os.path.join(out_folder, sname + '_un.shp'),
            refrst,
            os.path.join(out_folder, "wk_" + sname),
            multiProcess=True)

        # Produce confusion matrices
        mtxf = tbl_to_areamtx(union_shape[shp],
                              "a_" + __shps[shp],
                              'b_' + refcol,
                              os.path.join(out_folder, sname + '.xlsx'),
                              db=db,
                              with_metrics=True)

    return out_folder
Beispiel #7
0
def check_shape_diff(SHAPES_TO_COMPARE, OUT_FOLDER, REPORT, DB,
                     GRASS_REGION_TEMPLATE):
    """
    Script to check differences between pairs of Feature Classes
    
    Suponha que temos diversas Feature Classes (FC) e que cada uma delas
    possui um determinado atributo; imagine tambem que,
    considerando todos os pares possiveis entre estas FC,
    se pretende comparar as diferencas na distribuicao dos valores
    desse atributo para cada par.
    
    * Dependencias:
    - GRASS;
    - PostgreSQL;
    - PostGIS.
    """

    import datetime
    import os
    import pandas
    from gasp.sql.fm import q_to_obj
    from gasp.to import db_to_tbl
    from gasp.sql.to import df_to_db
    from gasp.gt.toshp.cff import shp_to_shp
    from gasp.gt.toshp.db import dbtbl_to_shp
    from gasp.gt.toshp.rst import rst_to_polyg
    from gasp.gql.to import shp_to_psql
    from gasp.gql.tomtx import tbl_to_area_mtx
    from gasp.gt.prop.ff import check_isRaster
    from gasp.pyt.oss import fprop
    from gasp.sql.db import create_db
    from gasp.sql.tbl import tbls_to_tbl
    from gasp.sql.to import q_to_ntbl
    from gasp.gql.cln import fix_geom
    from gasp.to import db_to_tbl

    # Check if folder exists, if not create it
    if not os.path.exists(OUT_FOLDER):
        from gasp.pyt.oss import mkdir
        mkdir(OUT_FOLDER, overwrite=None)
    else:
        raise ValueError('{} already exists!'.format(OUT_FOLDER))

    from gasp.gt.wenv.grs import run_grass

    gbase = run_grass(OUT_FOLDER,
                      grassBIN='grass78',
                      location='shpdif',
                      srs=GRASS_REGION_TEMPLATE)

    import grass.script as grass
    import grass.script.setup as gsetup

    gsetup.init(gbase, OUT_FOLDER, 'shpdif', 'PERMANENT')

    from gasp.gt.toshp.cff import shp_to_grs, grs_to_shp
    from gasp.gt.torst import rst_to_grs
    from gasp.gt.tbl.fld import rn_cols

    # Convert to SHAPE if file is Raster
    i = 0
    _SHP_TO_COMPARE = {}
    for s in SHAPES_TO_COMPARE:
        isRaster = check_isRaster(s)

        if isRaster:
            # To GRASS
            rstName = fprop(s, 'fn')
            inRst = rst_to_grs(s, "rst_" + rstName, as_cmd=True)
            # To Vector
            d = rst_to_polyg(inRst,
                             rstName,
                             rstColumn="lulc_{}".format(i),
                             gisApi="grass")

            # Export Shapefile
            shp = grs_to_shp(d, os.path.join(OUT_FOLDER, d + '.shp'), "area")

            _SHP_TO_COMPARE[shp] = "lulc_{}".format(i)

        else:
            # To GRASS
            grsV = shp_to_grs(s, fprop(s, 'fn'), asCMD=True)

            # Change name of column with comparing value
            ncol = "lulc_{}".format(str(i))
            rn_cols(grsV, {SHAPES_TO_COMPARE[s]: "lulc_{}".format(str(i))},
                    api="grass")

            # Export
            shp = grs_to_shp(grsV, os.path.join(OUT_FOLDER, grsV + '_rn.shp'),
                             "area")

            _SHP_TO_COMPARE[shp] = "lulc_{}".format(str(i))

        i += 1

    SHAPES_TO_COMPARE = _SHP_TO_COMPARE
    __SHAPES_TO_COMPARE = SHAPES_TO_COMPARE

    # Create database
    create_db(DB, api='psql')
    """ Union SHAPEs """

    UNION_SHAPE = {}
    FIX_GEOM = {}

    SHPS = list(__SHAPES_TO_COMPARE.keys())
    for i in range(len(SHPS)):
        for e in range(i + 1, len(SHPS)):
            # Optimized Union
            print("Union between {} and {}".format(SHPS[i], SHPS[e]))
            time_a = datetime.datetime.now().replace(microsecond=0)
            __unShp = optimized_union_anls(
                SHPS[i],
                SHPS[e],
                os.path.join(OUT_FOLDER, "un_{}_{}.shp".format(i, e)),
                GRASS_REGION_TEMPLATE,
                os.path.join(OUT_FOLDER, "work_{}_{}".format(i, e)),
                multiProcess=True)
            time_b = datetime.datetime.now().replace(microsecond=0)
            print(time_b - time_a)

            # Rename cols
            unShp = rn_cols(
                __unShp, {
                    "a_" + __SHAPES_TO_COMPARE[SHPS[i]]:
                    __SHAPES_TO_COMPARE[SHPS[i]],
                    "b_" + __SHAPES_TO_COMPARE[SHPS[e]]:
                    __SHAPES_TO_COMPARE[SHPS[e]]
                })

            UNION_SHAPE[(SHPS[i], SHPS[e])] = unShp

    # Send data to postgresql
    SYNTH_TBL = {}

    for uShp in UNION_SHAPE:
        # Send data to PostgreSQL
        union_tbl = shp_to_psql(DB, UNION_SHAPE[uShp], api='shp2pgsql')

        # Produce table with % of area equal in both maps
        areaMapTbl = q_to_ntbl(
            DB,
            "{}_syn".format(union_tbl),
            ("SELECT CAST('{lulc_1}' AS text) AS lulc_1, "
             "CAST('{lulc_2}' AS text) AS lulc_2, "
             "round("
             "CAST(SUM(g_area) / 1000000 AS numeric), 4"
             ") AS agree_area, round("
             "CAST((SUM(g_area) / MIN(total_area)) * 100 AS numeric), 4"
             ") AS agree_percentage, "
             "round("
             "CAST(MIN(total_area) / 1000000 AS numeric), 4"
             ") AS total_area FROM ("
             "SELECT {map1_cls}, {map2_cls}, ST_Area(geom) AS g_area, "
             "CASE "
             "WHEN {map1_cls} = {map2_cls} "
             "THEN 1 ELSE 0 "
             "END AS isthesame, total_area FROM {tbl}, ("
             "SELECT SUM(ST_Area(geom)) AS total_area FROM {tbl}"
             ") AS foo2"
             ") AS foo WHERE isthesame = 1 "
             "GROUP BY isthesame").format(
                 lulc_1=fprop(uShp[0], 'fn'),
                 lulc_2=fprop(uShp[1], 'fn'),
                 map1_cls=__SHAPES_TO_COMPARE[uShp[0]],
                 map2_cls=__SHAPES_TO_COMPARE[uShp[1]],
                 tbl=union_tbl),
            api='psql')

        # Produce confusion matrix for the pair in comparison
        matrixTbl = tbl_to_area_mtx(DB, union_tbl,
                                    __SHAPES_TO_COMPARE[uShp[0]],
                                    __SHAPES_TO_COMPARE[uShp[1]],
                                    union_tbl + '_mtx')

        SYNTH_TBL[uShp] = {"TOTAL": areaMapTbl, "MATRIX": matrixTbl}

    # UNION ALL TOTAL TABLES
    total_table = tbls_to_tbl(DB, [SYNTH_TBL[k]["TOTAL"] for k in SYNTH_TBL],
                              'total_table')

    # Create table with % of agreement between each pair of maps
    mapsNames = q_to_obj(
        DB,
        ("SELECT lulc FROM ("
         "SELECT lulc_1 AS lulc FROM {tbl} GROUP BY lulc_1 "
         "UNION ALL "
         "SELECT lulc_2 AS lulc FROM {tbl} GROUP BY lulc_2"
         ") AS lu GROUP BY lulc ORDER BY lulc").format(tbl=total_table),
        db_api='psql').lulc.tolist()

    FLDS_TO_PIVOT = ["agree_percentage", "total_area"]

    Q = ("SELECT * FROM crosstab('"
         "SELECT CASE "
         "WHEN foo.lulc_1 IS NOT NULL THEN foo.lulc_1 ELSE jtbl.tmp1 "
         "END AS lulc_1, CASE "
         "WHEN foo.lulc_2 IS NOT NULL THEN foo.lulc_2 ELSE jtbl.tmp2 "
         "END AS lulc_2, CASE "
         "WHEN foo.{valCol} IS NOT NULL THEN foo.{valCol} ELSE 0 "
         "END AS agree_percentage FROM ("
         "SELECT lulc_1, lulc_2, {valCol} FROM {tbl} UNION ALL "
         "SELECT lulc_1, lulc_2, {valCol} FROM ("
         "SELECT lulc_1 AS lulc_2, lulc_2 AS lulc_1, {valCol} "
         "FROM {tbl}"
         ") AS tst"
         ") AS foo FULL JOIN ("
         "SELECT lulc_1 AS tmp1, lulc_2 AS tmp2 FROM ("
         "SELECT lulc_1 AS lulc_1 FROM {tbl} GROUP BY lulc_1 "
         "UNION ALL "
         "SELECT lulc_2 AS lulc_1 FROM {tbl} GROUP BY lulc_2"
         ") AS tst_1, ("
         "SELECT lulc_1 AS lulc_2 FROM {tbl} GROUP BY lulc_1 "
         "UNION ALL "
         "SELECT lulc_2 AS lulc_2 FROM {tbl} GROUP BY lulc_2"
         ") AS tst_2 WHERE lulc_1 = lulc_2 GROUP BY lulc_1, lulc_2"
         ") AS jtbl ON foo.lulc_1 = jtbl.tmp1 AND foo.lulc_2 = jtbl.tmp2 "
         "ORDER BY lulc_1, lulc_2"
         "') AS ct("
         "lulc_map text, {crossCols}"
         ")")

    TOTAL_AGREE_TABLE = None
    TOTAL_AREA_TABLE = None
    for f in FLDS_TO_PIVOT:
        if not TOTAL_AGREE_TABLE:
            TOTAL_AGREE_TABLE = q_to_ntbl(
                DB,
                "agreement_table",
                Q.format(tbl=total_table,
                         valCol=f,
                         crossCols=", ".join([
                             "{} numeric".format(map_) for map_ in mapsNames
                         ])),
                api='psql')

        else:
            TOTAL_AREA_TABLE = q_to_ntbl(DB,
                                         "area_table",
                                         Q.format(tbl=total_table,
                                                  valCol=f,
                                                  crossCols=", ".join([
                                                      "{} numeric".format(map_)
                                                      for map_ in mapsNames
                                                  ])),
                                         api='psql')

    # Union Mapping
    UNION_MAPPING = pandas.DataFrame(
        [[k[0], k[1], fprop(UNION_SHAPE[k], 'fn')] for k in UNION_SHAPE],
        columns=['shp_a', 'shp_b', 'union_shp'])

    UNION_MAPPING = df_to_db(DB, UNION_MAPPING, 'union_map', api='psql')

    # Export Results
    TABLES = [UNION_MAPPING, TOTAL_AGREE_TABLE, TOTAL_AREA_TABLE
              ] + [SYNTH_TBL[x]["MATRIX"] for x in SYNTH_TBL]

    SHEETS = ["union_map", "agreement_percentage", "area_with_data_km"] + [
        "{}_{}".format(fprop(x[0], 'fn')[:15],
                       fprop(x[1], 'fn')[:15]) for x in SYNTH_TBL
    ]

    db_to_tbl(DB, ["SELECT * FROM {}".format(x) for x in TABLES],
              REPORT,
              sheetsNames=SHEETS,
              dbAPI='psql')

    return REPORT
Beispiel #8
0
def make_dem(grass_workspace,
             data,
             field,
             output,
             extent_template,
             method="IDW",
             cell_size=None,
             mask=None):
    """
    Create Digital Elevation Model
    
    Methods Available:
    * IDW;
    * BSPLINE;
    * SPLINE;
    * CONTOUR;
    """

    from gasp.pyt.oss import fprop
    from gasp.gt.wenv.grs import run_grass
    from gasp.gt.prop.prj import get_epsg

    LOC_NAME = fprop(data, 'fn', forceLower=True)[:5] + "_loc"

    # Get EPSG From Raster
    EPSG = get_epsg(extent_template)
    if not EPSG:
        raise ValueError(
            'Cannot get EPSG code of Extent Template File ({})'.format(
                extent_template))

    # Know if data geometry are points
    if method == 'BSPLINE' or method == 'SPLINE':
        from gasp.gt.prop.feat import get_gtype

        data_gtype = get_gtype(data, gisApi='ogr')

    # Create GRASS GIS Location
    grass_base = run_grass(grass_workspace, location=LOC_NAME, srs=EPSG)

    # Start GRASS GIS Session
    import grass.script as grass
    import grass.script.setup as gsetup
    gsetup.init(grass_base, grass_workspace, LOC_NAME, 'PERMANENT')

    # Get Extent Raster
    ref_template = ob_ref_rst(extent_template,
                              os.path.join(grass_workspace, LOC_NAME),
                              cellsize=cell_size)

    # IMPORT GRASS GIS MODULES #
    from gasp.gt.torst import rst_to_grs, grs_to_rst
    from gasp.gt.toshp.cff import shp_to_grs
    from gasp.gt.wenv.grs import rst_to_region

    # Configure region
    rst_to_grs(ref_template, 'extent')
    rst_to_region('extent')

    # Convert elevation "data" to GRASS Vector
    elv = shp_to_grs(data, 'elevation')

    OUTPUT_NAME = fprop(output, 'fn', forceLower=True)

    if method == "BSPLINE":
        from gasp.gt.nop.itp import bspline

        # Convert to points if necessary
        if data_gtype != 'POINT' and data_gtype != 'MULTIPOINT':
            from gasp.gt.toshp.cgeo import feat_vertex_to_pnt

            elev_pnt = feat_vertex_to_pnt(elv, "elev_pnt", nodes=None)
        else:
            elev_pnt = elv

        outRst = bspline(elev_pnt,
                         field,
                         OUTPUT_NAME,
                         mway='bicubic',
                         lyrN=1,
                         asCMD=True)

    elif method == "SPLINE":
        from gasp.gt.nop.itp import surfrst

        # Convert to points if necessary
        if data_gtype != 'POINT' and data_gtype != 'MULTIPOINT':
            from gasp.gt.toshp.cgeo import feat_vertex_to_pnt
            elev_pnt = feat_vertex_to_pnt(elv, "elev_pnt", nodes=None)
        else:
            elev_pnt = elv

        outRst = surfrst(elev_pnt, field, OUTPUT_NAME, lyrN=1, ascmd=True)

    elif method == "CONTOUR":
        from gasp.gt.torst import shp_to_rst
        from gasp.gt.nop.itp import surfcontour

        # Apply mask if mask
        if mask:
            from gasp.gt.torst import grs_to_mask, rst_to_grs

            rst_mask = rst_to_grs(mask, 'rst_mask', as_cmd=True)
            grs_to_mask(rst_mask)

        # Elevation (GRASS Vector) to Raster
        elevRst = shp_to_rst(elv,
                             field,
                             None,
                             None,
                             'rst_elevation',
                             api="pygrass")

        # Run Interpolator
        outRst = surfcontour(elevRst, OUTPUT_NAME, ascmd=True)

    elif method == "IDW":
        from gasp.gt.nop.itp import ridw
        from gasp.gt.nop.alg import rstcalc
        from gasp.gt.torst import shp_to_rst

        # Elevation (GRASS Vector) to Raster
        elevRst = shp_to_rst(elv,
                             field,
                             None,
                             None,
                             'rst_elevation',
                             api='pygrass')
        # Multiply cells values by 100 000.0
        rstcalc('int(rst_elevation * 100000)', 'rst_elev_int', api='pygrass')
        # Run IDW to generate the new DEM
        ridw('rst_elev_int', 'dem_int', numberPoints=15)
        # DEM to Float
        rstcalc('dem_int / 100000.0', OUTPUT_NAME, api='pygrass')

    # Export DEM to a file outside GRASS Workspace
    grs_to_rst(OUTPUT_NAME, output)

    return output
Beispiel #9
0
    def run_viewshed_by_cpu(tid,
                            db,
                            obs,
                            dem,
                            srs,
                            vis_basename='vis',
                            maxdst=None,
                            obselevation=None):
        # Create Database
        new_db = create_db("{}_{}".format(db, str(tid)), api='psql')

        # Points to Database
        pnt_tbl = df_to_db(new_db,
                           obs,
                           'pnt_tbl',
                           api='psql',
                           epsg=srs,
                           geomType='Point',
                           colGeom='geometry')

        # Create GRASS GIS Session
        workspace = mkdir(
            os.path.join(os.path.dirname(dem), 'work_{}'.format(str(tid))))
        loc_name = 'vis_loc'
        gbase = run_grass(workspace, location=loc_name, srs=dem)

        # Start GRASS GIS Session
        import grass.script as grass
        import grass.script.setup as gsetup
        gsetup.init(gbase, workspace, loc_name, 'PERMANENT')

        from gasp.gt.torst import rst_to_grs, grs_to_rst
        from gasp.gt.nop.surf import grs_viewshed
        from gasp.gt.deldt import del_rst

        # Send DEM to GRASS GIS
        grs_dem = rst_to_grs(dem, 'grs_dem', as_cmd=True)

        # Produce Viewshed for each point in obs
        for idx, row in obs.iterrows():
            # Get Viewshed raster
            vrst = grs_viewshed(grs_dem, (row.geometry.x, row.geometry.y),
                                '{}_{}'.format(vis_basename, str(row[obs_id])),
                                max_dist=maxdst,
                                obs_elv=obselevation)

            # Export Raster to File
            frst = grs_to_rst(vrst, os.path.join(workspace, vrst + '.tif'))

            # Raster to Array
            img = gdal.Open(frst)
            num = img.ReadAsArray()

            # Two Dimension to One Dimension
            # Reshape Array
            numone = num.reshape(num.shape[0] * num.shape[1])

            # Get Indexes with visibility
            visnum = np.arange(numone.shape[0]).astype(np.uint32)
            visnum = visnum[numone == 1]

            # Get rows indexes
            visrow = visnum / num.shape[0]
            visrow = visrow.astype(np.uint32)

            # Get cols indexes
            viscol = visnum - (visrow * num.shape[1])

            # Visibility indexes to Pandas DataFrame
            idxnum = np.full(visrow.shape, row[obs_id])
            visdf = pd.DataFrame({
                'pntid': idxnum,
                'rowi': visrow,
                'coli': viscol
            })

            # Pandas DF to database
            # Create Visibility table
            df_to_db(new_db,
                     visdf,
                     vis_basename,
                     api='psql',
                     colGeom=None,
                     append=None if not idx else True)

            # Delete all variables
            numone = None
            visnum = None
            visrow = None
            viscol = None
            idxnum = None
            visdf = None
            del img

            # Delete GRASS GIS File
            del_rst(vrst)

            # Delete TIFF File
            del_file(frst)
            frst = None