Esempio n. 1
0
File: dns.py Progetto: jasp382/gasp
def kernel_density(pnt_feat, popField, radius, template, outRst):
    """
    Kernel density estimation. If any point is currently 
    in selection only selected points are taken into account.
    """

    import os
    from gasp.gt.torst import saga_to_tif
    from gasp.gt.prop.rst import rst_ext, get_cellsize
    from gasp.pyt.oss import fprop

    left, right, bottom, top = rst_ext(template)
    cellsize = get_cellsize(template)

    SAGA_RASTER = os.path.join(os.path.dirname(outRst),
                               'saga_{}.sgrd'.format(fprop(outRst, 'fn')))

    cmd = ("saga_cmd grid_gridding 6 -POINTS {} -POPULATION {} "
           "-RADIUS {} -TARGET_DEFINITION 0 -TARGET_USER_SIZE {} "
           "-TARGET_USER_XMIN {} -TARGET_USER_XMAX {} "
           "-TARGET_USER_YMIN {} -TARGET_USER_YMAX {} "
           "-TARGET_OUT_GRID {}").format(pnt_feat, popField, str(radius),
                                         str(abs(cellsize)), str(left),
                                         str(right), str(bottom), str(top),
                                         SAGA_RASTER)

    outcmd = exec_cmd(cmd)

    # Convert to tiff
    saga_to_tif(SAGA_RASTER, outRst)

    return outRst
Esempio n. 2
0
def osm2lulc(osmdata, nomenclature, refRaster, lulcRst,
             overwrite=None, dataStore=None, roadsAPI='POSTGIS'):
    """
    Convert OSM data into Land Use/Land Cover Information
    
    A matrix based approach
    
    roadsAPI Options:
    * SQLITE
    * POSTGIS
    """
    
    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import os; import numpy; import datetime
    from threading import Thread
    from osgeo     import gdal
    # ************************************************************************ #
    # Dependencies #
    # ************************************************************************ #
    from gasp.gt.fmrst              import rst_to_array
    from gasp.gt.prop.ff            import check_isRaster
    from gasp.gt.prop.rst           import get_cellsize
    from gasp.gt.prop.prj           import get_rst_epsg
    from gasp.pyt.oss               import mkdir, copy_file
    from gasp.pyt.oss               import fprop
    if roadsAPI == 'POSTGIS':
        from gasp.sql.db            import create_db
        from gasp.gql.to.osm        import osm_to_psql
        from gasp.sds.osm2lulc.mod2 import pg_num_roads
        from gasp.sql.fm            import dump_db
        from gasp.sql.db            import drop_db
    else:
        from gasp.gt.toshp.osm      import osm_to_sqdb
        from gasp.sds.osm2lulc.mod2 import num_roads
    from gasp.sds.osm2lulc.utils    import osm_project, add_lulc_to_osmfeat
    from gasp.sds.osm2lulc.utils    import osmlulc_rsttbl
    from gasp.sds.osm2lulc.utils    import get_ref_raster
    from gasp.sds.osm2lulc.mod1     import num_selection
    from gasp.sds.osm2lulc.m3_4     import num_selbyarea
    from gasp.sds.osm2lulc.mod5     import num_base_buffer
    from gasp.sds.osm2lulc.mod6     import num_assign_builds
    from gasp.gt.torst              import obj_to_rst
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    # Check if input parameters exists!
    if not os.path.exists(os.path.dirname(lulcRst)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcRst)))
    
    if not os.path.exists(osmdata):
        raise ValueError('File with OSM DATA ({}) does not exist!'.format(osmdata))
    
    if not os.path.exists(refRaster):
        raise ValueError('File with reference area ({}) does not exist!'.format(refRaster))
    
    # Check if Nomenclature is valid
    nomenclature = "URBAN_ATLAS" if nomenclature != "URBAN_ATLAS" and \
        nomenclature != "CORINE_LAND_COVER" and \
        nomenclature == "GLOBE_LAND_30" else nomenclature
    
    time_a = datetime.datetime.now().replace(microsecond=0)
    
    workspace = os.path.join(os.path.dirname(
        lulcRst), 'num_osmto') if not dataStore else dataStore
    
    # Check if workspace exists:
    if os.path.exists(workspace):
        if overwrite:
            mkdir(workspace, overwrite=True)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        mkdir(workspace, overwrite=None)
    
    # Get Ref Raster and EPSG
    refRaster, epsg = get_ref_raster(refRaster, workspace, cellsize=2)
    CELLSIZE = get_cellsize(refRaster, gisApi='gdal')
        
    from gasp.sds.osm2lulc import osmTableData, PRIORITIES
    
    time_b = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Convert OSM file to SQLITE DB or to POSTGIS DB #
    # ************************************************************************ #
    if roadsAPI == 'POSTGIS':
        osm_db = create_db(fprop(
            osmdata, 'fn', forceLower=True), overwrite=True)
        osm_db = osm_to_psql(osmdata, osm_db)
    
    else:
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(osm_db, osmTableData, nomenclature, api=roadsAPI)
    time_d = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(
        osm_db, epsg, api=roadsAPI,
        isGlobeLand=None if nomenclature != "GLOBE_LAND_30" else True
    )
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # MapResults #
    # ************************************************************************ #
    mergeOut  = {}
    timeCheck = {}
    RULES = [1, 2, 3, 4, 5, 7]
    
    def run_rule(ruleID):
        time_start = datetime.datetime.now().replace(microsecond=0)
        _osmdb = copy_file(
            osm_db, os.path.splitext(osm_db)[0] + '_r{}.sqlite'.format(ruleID)
        ) if roadsAPI == 'SQLITE' else None
        # ******************************************************************** #
        # 1 - Selection Rule #
        # ******************************************************************** #
        if ruleID == 1:
            res, tm = num_selection(
                _osmdb if _osmdb else osm_db, osmTableData['polygons'], workspace,
                CELLSIZE, epsg, refRaster, api=roadsAPI
            )
        # ******************************************************************** #
        # 2 - Get Information About Roads Location #
        # ******************************************************************** #
        elif ruleID == 2:
            res, tm = num_roads(
                _osmdb, nomenclature, osmTableData['lines'],
                osmTableData['polygons'], workspace, CELLSIZE, epsg,
                refRaster
            ) if _osmdb else pg_num_roads(
                osm_db, nomenclature,
                osmTableData['lines'], osmTableData['polygons'],
                workspace, CELLSIZE, epsg, refRaster
            )
        
        # ******************************************************************** #
        # 3 - Area Upper than #
        # ******************************************************************** #
        elif ruleID == 3:
            if nomenclature != "GLOBE_LAND_30":
                res, tm = num_selbyarea(
                    osm_db if not _osmdb else _osmdb,
                    osmTableData['polygons'], workspace,
                    CELLSIZE, epsg, refRaster, UPPER=True, api=roadsAPI
                )
            else:
                return
        
        # ******************************************************************** #
        # 4 - Area Lower than #
        # ******************************************************************** #
        elif ruleID == 4:
            if nomenclature != "GLOBE_LAND_30":
                res, tm = num_selbyarea(
                    osm_db if not _osmdb else _osmdb,
                    osmTableData['polygons'], workspace,
                    CELLSIZE, epsg, refRaster, UPPER=False, api=roadsAPI
                )
            else:
                return
        
        # ******************************************************************** #
        # 5 - Get data from lines table (railway | waterway) #
        # ******************************************************************** #
        elif ruleID == 5:
            res, tm = num_base_buffer(
                osm_db if not _osmdb else _osmdb,
                osmTableData['lines'], workspace,
                CELLSIZE, epsg, refRaster, api=roadsAPI
            )
        # ******************************************************************** #
        # 7 - Assign untagged Buildings to tags #
        # ******************************************************************** #
        elif ruleID == 7:
            if nomenclature != "GLOBE_LAND_30":
                res, tm = num_assign_builds(
                    osm_db if not _osmdb else _osmdb,
                    osmTableData['points'], osmTableData['polygons'],
                    workspace, CELLSIZE, epsg, refRaster, apidb=roadsAPI
                )
            
            else:
                return
        
        time_end = datetime.datetime.now().replace(microsecond=0)
        mergeOut[ruleID] = res
        timeCheck[ruleID] = {'total': time_end - time_start, 'detailed': tm}
    
    thrds = []
    for r in RULES:
        thrds.append(Thread(
            name="to_{}".format(str(r)), target=run_rule,
            args=(r,)
        ))
        
    
    for t in thrds: t.start()
    for t in thrds: t.join()
    
    # Merge all results into one Raster
    compileResults = {}
    for rule in mergeOut:
        for cls in mergeOut[rule]:
            if cls not in compileResults:
                if type(mergeOut[rule][cls]) == list:
                    compileResults[cls] = mergeOut[rule][cls]
                else:
                    compileResults[cls] = [mergeOut[rule][cls]]
            
            else:
                if type(mergeOut[rule][cls]) == list:
                    compileResults[cls] += mergeOut[rule][cls]
                else:
                    compileResults[cls].append(mergeOut[rule][cls])
    
    time_m = datetime.datetime.now().replace(microsecond=0)
    # All Rasters to Array
    arrayRst = {}
    for cls in compileResults:
        for raster in compileResults[cls]:
            if not raster:
                continue
            
            array = rst_to_array(raster)
            
            if cls not in arrayRst:
                arrayRst[cls] = [array.astype(numpy.uint8)]
            
            else:
                arrayRst[cls].append(array.astype(numpy.uint8))
    time_n = datetime.datetime.now().replace(microsecond=0)
    
    # Sum Rasters of each class
    for cls in arrayRst:
        if len(arrayRst[cls]) == 1:
            sumArray = arrayRst[cls][0]
        
        else:
            sumArray = arrayRst[cls][0]
            
            for i in range(1, len(arrayRst[cls])):
                sumArray = sumArray + arrayRst[cls][i]
        
        arrayRst[cls] = sumArray
    
    time_o = datetime.datetime.now().replace(microsecond=0)
    
    # Apply priority rule
    __priorities = PRIORITIES[nomenclature + "_NUMPY"]
    
    for lulcCls in __priorities:
        __lulcCls = rstcls_map(lulcCls)

        if __lulcCls not in arrayRst:
            continue
        else:
            numpy.place(arrayRst[__lulcCls], arrayRst[__lulcCls] > 0,
                lulcCls
            )
    
    for i in range(len(__priorities)):
        lulc_i = rstcls_map(__priorities[i])

        if lulc_i not in arrayRst:
            continue
        
        else:
            for e in range(i+1, len(__priorities)):
                lulc_e = rstcls_map(__priorities[e])

                if lulc_e not in arrayRst:
                    continue
                
                else:
                    numpy.place(arrayRst[lulc_e],
                        arrayRst[lulc_i] == __priorities[i], 0
                    )
    
    time_p = datetime.datetime.now().replace(microsecond=0)
    
    # Merge all rasters
    startCls = 'None'
    for i in range(len(__priorities)):
        lulc_i = rstcls_map(__priorities[i])
        
        if lulc_i in arrayRst:
            resultSum = arrayRst[lulc_i]
            startCls = i
            break
    
    if startCls == 'None':
        return 'NoResults'
    
    for i in range(startCls + 1, len(__priorities)):
        lulc_i = rstcls_map(__priorities[i])
        
        if lulc_i not in arrayRst:
            continue
        
        resultSum = resultSum + arrayRst[lulc_i]
    
    # Save Result
    outIsRst = check_isRaster(lulcRst)
    if not outIsRst:
        from gasp.pyt.oss import fprop
        
        lulcRst = os.path.join(
            os.path.dirname(lulcRst), fprop(lulcRst, 'fn') + '.tif'
        )
    
    numpy.place(resultSum, resultSum==0, 1)
    obj_to_rst(resultSum, lulcRst, refRaster, noData=1)
    
    osmlulc_rsttbl(nomenclature + "_NUMPY", os.path.join(
        os.path.dirname(lulcRst), os.path.basename(lulcRst) + '.vat.dbf'
    ))
    
    time_q = datetime.datetime.now().replace(microsecond=0)

    # Dump Database if PostGIS was used
    # Drop Database if PostGIS was used
    if roadsAPI == 'POSTGIS':
        dump_db(osm_db, os.path.join(workspace, osm_db + '.sql'), api='psql')
        drop_db(osm_db)
    
    return lulcRst, {
        0  : ('set_settings', time_b - time_a),
        1  : ('osm_to_sqdb', time_c - time_b),
        2  : ('cls_in_sqdb', time_d - time_c),
        3  : ('proj_data', time_e - time_d),
        4  : ('rule_1', timeCheck[1]['total'], timeCheck[1]['detailed']),
        5  : ('rule_2', timeCheck[2]['total'], timeCheck[2]['detailed']),
        6  : None if 3 not in timeCheck else (
            'rule_3', timeCheck[3]['total'], timeCheck[3]['detailed']),
        7  : None if 4 not in timeCheck else (
            'rule_4', timeCheck[4]['total'], timeCheck[4]['detailed']),
        8  : ('rule_5', timeCheck[5]['total'], timeCheck[5]['detailed']),
        9  : None if 7 not in timeCheck else (
            'rule_7', timeCheck[7]['total'], timeCheck[7]['detailed']),
        10 : ('rst_to_array', time_n - time_m),
        11 : ('sum_cls', time_o - time_n),
        12 : ('priority_rule', time_p - time_o),
        13 : ('merge_rst', time_q - time_p)
    }
Esempio n. 3
0
def update_globe_land_cover(original_globe_raster, osm_urban_atlas_raster,
                            osm_globe_raster, epsg, updated_globe_raster,
                            detailed_globe_raster):
    """
    Update the original Glob Land 30 with the result of the conversion of
    OSM DATA to the Globe Land Cover nomenclature;
    
    Also updates he previous updated Glob Land 30 with the result of the
    conversion of osm data to the Urban Atlas Nomenclature
    """

    import os
    import numpy as np
    from gasp.gt.fmrst import rst_to_array
    from gasp.gt.prop.rst import get_cellsize, get_nodata
    from gasp.gt.torst import obj_to_rst

    # ############################# #
    # Convert images to numpy array #
    # ############################# #
    np_globe_original = rst_to_array(original_globe_raster)
    np_globe_osm = rst_to_array(osm_globe_raster)
    np_ua_osm = rst_to_array(osm_urban_atlas_raster)

    # ################################## #
    # Check the dimension of both images #
    # ################################## #
    if np_globe_original.shape != np_globe_osm.shape:
        return (
            'The Globe Land 30 raster (original) do not have the same number'
            ' of columns/lines comparing with the Globe Land 30 derived '
            'from OSM data')

    elif np_globe_original.shape != np_ua_osm.shape:
        return (
            'The Globe Land 30 raster (original) do not have the same '
            'number of columns/lines comparing with the Urban Atlas raster '
            'derived from OSM data')

    elif np_globe_osm.shape != np_ua_osm.shape:
        return (
            'The Globe Land 30 derived from OSM data do not have the same '
            'number of columns/lines comparing with the Urban Atlas raster '
            'derived from OSM data')

    # ############## #
    # Check Cellsize #
    # ############## #
    cell_of_rsts = get_cellsize(
        [original_globe_raster, osm_globe_raster, osm_urban_atlas_raster],
        xy=True,
        gisApi='gdal')

    cell_globe_original = cell_of_rsts[original_globe_raster]
    cell_globe_osm = cell_of_rsts[osm_globe_raster]
    cell_ua_osm = cell_of_rsts[osm_urban_atlas_raster]

    if cell_globe_original != cell_globe_osm:
        return (
            'The cellsize of the Globe Land 30 raster (original) is not the '
            'same comparing with the Globe Land 30 derived from OSM data')

    elif cell_globe_original != cell_ua_osm:
        return (
            'The cellsize of the Globe Land 30 raster (original) is not the '
            'same comparing with the Urban Atlas raster derived from OSM data')

    elif cell_ua_osm != cell_globe_osm:
        return (
            'The cellsize of the Globe Land 30 derived from OSM data is not '
            'the same comparing with the Urban Atlas raster derived from '
            'OSM data')

    # ############################# #
    # Get the Value of Nodata Cells #
    # ############################# #
    nodata_glob_original = get_nodata(original_globe_raster, gisApi='gdal')
    nodata_glob_osm = get_nodata(osm_globe_raster, gisApi='gdal')
    nodata_ua_osm = get_nodata(osm_urban_atlas_raster, gisApi='gdal')

    # ######################################## #
    # Create a new map - Globe Land 30 Updated #
    # ######################################## #
    """
    Create a new array with zeros...
    
    1) The zeros will be replaced by the values in the Globe Land derived from
    OSM.
    
    2) The zeros will be replaced by the values in the Original Globe Land at
    the cells with NULL data in the Globe Land derived from OSM.
    
    The meta array will identify values origins in the updated raster:
    1 - Orinal Raster
    2 - OSM Derived Raster
    """

    update_array = np.zeros(
        (np_globe_original.shape[0], np_globe_original.shape[1]))

    update_meta_array = np.zeros(
        (np_globe_original.shape[0], np_globe_original.shape[1]))

    # 1)
    np.copyto(update_array, np_globe_osm, 'no',
              np_globe_osm != nodata_glob_osm)
    # 1) meta
    np.place(update_meta_array, update_array != 0, 2)
    # 2) meta
    np.place(update_meta_array, update_array == 0, 1)
    # 2)
    np.copyto(update_array, np_globe_original, 'no', update_array == 0)
    # 2) meta
    np.place(update_meta_array, update_array == nodata_glob_original,
             int(nodata_glob_original))
    # noData to int
    np.place(update_array, update_array == nodata_glob_original,
             int(nodata_glob_original))

    updated_meta = os.path.join(
        os.path.dirname(updated_globe_raster), '{n}_meta{e}'.format(
            n=os.path.splitext(os.path.basename(updated_globe_raster))[0],
            e=os.path.splitext(os.path.basename(updated_globe_raster))[1]))
    # Create Updated Globe Cover 30
    obj_to_rst(update_array,
               updated_globe_raster,
               original_globe_raster,
               noData=int(nodata_glob_original))
    # Create Updated Globe Cover 30 meta
    obj_to_rst(update_meta_array,
               updated_meta,
               original_globe_raster,
               noData=int(nodata_glob_original))

    # ################################################# #
    # Create a new map - Globe Land 30 Detailed with UA #
    # ################################################# #
    np_update = rst_to_array(updated_globe_raster)

    detailed_array = np.zeros((np_update.shape[0], np_update.shape[1]))

    detailed_meta_array = np.zeros((np_update.shape[0], np_update.shape[1]))
    """
    Replace 80 Globe Land for 11, 12, 13, 14 of Urban Atlas
    
    The meta array will identify values origins in the detailed raster:
    1 - Updated Raster
    2 - UA Derived Raster from OSM
    """
    # Globe - Mantain some classes
    np.place(detailed_array, np_update == 30, 8)
    np.place(detailed_array, np_update == 30, 1)

    np.place(detailed_array, np_update == 40, 9)
    np.place(detailed_array, np_update == 40, 1)

    np.place(detailed_array, np_update == 50, 10)
    np.place(detailed_array, np_update == 50, 1)

    np.place(detailed_array, np_update == 10, 5)
    np.place(detailed_array, np_update == 10, 1)

    # Water bodies
    np.place(detailed_array, np_ua_osm == 50 or np_update == 60, 7)
    np.place(detailed_meta_array, np_ua_osm == 50 or np_update == 60, 1)

    # Urban - Where Urban Atlas IS NOT NULL
    np.place(detailed_array, np_ua_osm == 11, 1)
    np.place(detailed_meta_array, np_ua_osm == 11, 2)

    np.place(detailed_array, np_ua_osm == 12, 2)
    np.place(detailed_meta_array, np_ua_osm == 12, 2)

    np.place(detailed_array, np_ua_osm == 13, 3)
    np.place(detailed_meta_array, np_ua_osm == 13, 2)

    np.place(detailed_array, np_ua_osm == 14, 4)
    np.place(detailed_meta_array, np_ua_osm == 14, 2)

    # Urban Atlas - Class 30 to 6
    np.place(detailed_array, np_ua_osm == 30, 6)
    np.place(detailed_meta_array, np_ua_osm == 30, 2)

    # Create Detailed Globe Cover 30
    obj_to_rst(detailed_array,
               detailed_globe_raster,
               original_globe_raster,
               noData=0)

    # Create Detailed Globe Cover 30 meta
    detailed_meta = os.path.join(
        os.path.dirname(detailed_globe_raster), '{n}_meta{e}'.format(
            n=os.path.splitext(os.path.basename(detailed_meta))[0],
            e=os.path.splitext(os.path.basename(detailed_meta))[1]))
    obj_to_rst(detailed_meta_array,
               detailed_meta,
               original_globe_raster,
               noData=0)
Esempio n. 4
0
File: rst.py Progetto: jasp382/gasp
def adjust_ext_to_snap(outExt, snapRst):
    """
    Adjust extent for a output raster to snap with other raster
    """
    
    from gasp.gt.prop.ff  import check_isShp, check_isRaster
    from gasp.gt.prop.rst import rst_ext, get_cellsize
    from gasp.g.to        import new_pnt, create_polygon
    
    # Check if outExt is a raster or not
    isRst = check_isRaster(outExt)
    
    if isRst:
        shpAExt = rst_ext(outExt)
    
    else:
        isShp = check_isShp(outExt)
        
        if isShp:
            from gasp.gt.prop.feat import get_ext
            
            shpAExt = get_ext(outExt)
        
        else:
            raise ValueError((
                "outExt value should be a path to a SHP or to a Raster file"
            ))
    
    # Check if snapRst is a raster
    isRst = check_isRaster(snapRst)
    
    if not isRst:
        raise ValueError((
            "snapRst should be a path to a raster file"
        ))
    
    # Get snapRst Extent
    snapRstExt = rst_ext(snapRst)
    
    # Get cellsize
    csize = get_cellsize(snapRst)
    
    # Find extent point of outExt inside the two extents
    # This will be used as pseudo origin
    
    snapRstPnt = [
        new_pnt(snapRstExt[0], snapRstExt[3]),
        new_pnt(snapRstExt[1], snapRstExt[3]),
        new_pnt(snapRstExt[1], snapRstExt[2]),
        new_pnt(snapRstExt[0], snapRstExt[2]),
        new_pnt(snapRstExt[0], snapRstExt[3]),
    ]
    
    poly_snap_rst = create_polygon(snapRstPnt)
    
    outExtPnt = {
        'top_left'     : new_pnt(shpAExt[0], shpAExt[3]),
        'top_right'    : new_pnt(shpAExt[1], shpAExt[3]),
        'bottom_right' : new_pnt(shpAExt[1], shpAExt[2]),
        'bottom_left'  : new_pnt(shpAExt[0], shpAExt[2])
    }
    
    out_rst_pseudo = {}
    for pnt in outExtPnt:
        out_rst_pseudo[pnt] = outExtPnt[pnt].Intersects(poly_snap_rst)
    
    pseudoOrigin = outExtPnt['top_left'] if out_rst_pseudo['top_left'] else \
        outExtPnt['bottom_left'] if out_rst_pseudo['bottom_left'] else \
        outExtPnt['top_right'] if out_rst_pseudo['top_right'] else \
        outExtPnt['bottom_right'] if out_rst_pseudo['bottom_right'] else None
        
    if not pseudoOrigin:
        raise ValueError((
            'Extents doesn\'t have overlapping areas'
        ))
    
    pseudoOriginName = 'top_left' if out_rst_pseudo['top_left'] else \
        'bottom_left' if out_rst_pseudo['bottom_left'] else \
        'top_right' if out_rst_pseudo['top_right'] else \
        'bottom_right' if out_rst_pseudo['bottom_right'] else None
    
    # Get out Raster Shape
    n_col = int((shpAExt[1] - shpAExt[0]) / csize)
    n_row = int((shpAExt[3] - shpAExt[2]) / csize)
    
    # Get Output Raster real origin/top left
    yName, xName = pseudoOriginName.split('_')
    
    if xName == 'left':
        # Obtain left of output Raster
        left_out_rst = snapRstExt[0] + (
            csize * int((shpAExt[0] - snapRstExt[0]) / csize))
    
    else:
        # obtain right of output Raster
        right_out_rst = snapRstExt[1] - (
            csize * int((snapRstExt[1] - shpAExt[1]) / csize))
        
        # Use right to obtain left coordinate
        left_out_rst = right_out_rst - (n_col * csize)
    
    if yName == 'top':
        # Obtain top of output Raster
        top_out_rst = snapRstExt[3] - (
            csize * int((snapRstExt[3] - shpAExt[3]) / csize))
        
    else:
        # obtain bottom of output raster
        bot_out_rst = snapRstExt[2] + (
            csize * int((shpAExt[2] - snapRstExt[2]) / csize))
        
        # use bottom to find the top of the output raster
        top_out_rst = bot_out_rst + (n_row * csize)
        
    return left_out_rst, top_out_rst, n_row, n_col, csize
Esempio n. 5
0
def gdal_slope(dem, srs, slope, unit='DEGREES'):
    """
    Create Slope Raster
    
    TODO: Test and see if is running correctly
    """

    import numpy
    import math
    from osgeo import gdal
    from scipy.ndimage import convolve
    from gasp.gt.fmrst import rst_to_array
    from gasp.gt.torst import obj_to_rst
    from gasp.gt.prop.rst import get_cellsize, get_nodata

    # ################ #
    # Global Variables #
    # ################ #
    cellsize = get_cellsize(dem, gisApi='gdal')
    # Get Nodata Value
    NoData = get_nodata(dem)

    # #################### #
    # Produce Slope Raster #
    # #################### #
    # Get Elevation array
    arr_dem = rst_to_array(dem)
    # We have to get a array with the number of nearst cells with values
    with_data = numpy.zeros((arr_dem.shape[0], arr_dem.shape[1]))
    numpy.place(with_data, arr_dem != NoData, 1.0)
    mask = numpy.array([[1, 1, 1], [1, 0, 1], [1, 1, 1]])
    arr_neigh = convolve(with_data, mask, mode='constant')
    numpy.place(arr_dem, arr_dem == NoData, 0.0)
    # The rate of change in the x direction for the center cell e is:
    kernel_dz_dx_left = numpy.array([[0, 0, 1], [0, 0, 2], [0, 0, 1]])
    kernel_dz_dx_right = numpy.array([[1, 0, 0], [2, 0, 0], [1, 0, 0]])
    dz_dx = (convolve(arr_dem, kernel_dz_dx_left, mode='constant') - convolve(
        arr_dem, kernel_dz_dx_right, mode='constant')) / (arr_neigh * cellsize)
    # The rate of change in the y direction for cell e is:
    kernel_dz_dy_left = numpy.array([[0, 0, 0], [0, 0, 0], [1, 2, 1]])
    kernel_dz_dy_right = numpy.array([[1, 2, 1], [0, 0, 0], [0, 0, 0]])
    dz_dy = (convolve(arr_dem, kernel_dz_dy_left, mode='constant') - convolve(
        arr_dem, kernel_dz_dy_right, mode='constant')) / (arr_neigh * cellsize)
    # Taking the rate of change in the x and y direction, the slope for the center cell e is calculated using
    rise_run = ((dz_dx)**2 + (dz_dy)**2)**0.5
    if unit == 'DEGREES':
        arr_slope = numpy.arctan(rise_run) * 57.29578
    elif unit == 'PERCENT_RISE':
        arr_slope = numpy.tan(numpy.arctan(rise_run)) * 100.0
    # Estimate the slope for the cells with less than 8 neigh
    aux_dem = rst_to_array(dem)
    index_vizinhos = numpy.where(arr_neigh < 8)
    for idx in range(len(index_vizinhos[0])):
        # Get Value of the cell
        lnh = index_vizinhos[0][idx]
        col = index_vizinhos[1][idx]
        e = aux_dem[lnh][col]
        a = aux_dem[lnh - 1][col - 1]
        if a == NoData:
            a = e
        if lnh == 0 or col == 0:
            a = e
        b = aux_dem[lnh - 1][col]
        if b == NoData:
            b = e
        if lnh == 0:
            b = e
        try:
            c = aux_dem[lnh - 1][col + 1]
            if c == NoData:
                c = e
            if lnh == 0:
                c = e
        except:
            c = e
        d = aux_dem[lnh][col - 1]
        if d == NoData:
            d = e
        if col == 0:
            d = e
        try:
            f = aux_dem[lnh][col + 1]
            if f == NoData:
                f = e
        except:
            f = e
        try:
            g = aux_dem[lnh + 1][col - 1]
            if g == NoData:
                g = e
            if col == 0:
                g = e
        except:
            g = e
        try:
            h = aux_dem[lnh + 1][col]
            if h == NoData:
                h = e
        except:
            h = e
        try:
            i = aux_dem[lnh + 1][col + 1]
            if i == NoData:
                i = e
        except:
            i = e
        dz_dx = ((c + 2 * f + i) - (a + 2 * d + g)) / (8 * cellsize)
        dz_dy = ((g + 2 * h + i) - (a + 2 * b + c)) / (8 * cellsize)
        rise_sun = ((dz_dx)**2 + (dz_dy)**2)**0.5
        if unit == 'DEGREES':
            arr_slope[lnh][col] = math.atan(rise_sun) * 57.29578
        elif unit == 'PERCENT_RISE':
            arr_slope[lnh][col] = math.tan(math.atan(rise_sun)) * 100.0
    # Del value originally nodata
    numpy.place(arr_slope, aux_dem == NoData, numpy.nan)
    #arr_slope[lnh][col] = slope_degres
    obj_to_rst(arr_slope, slope, dem)