Ejemplo n.º 1
0
def refine(input, region, method="bilinear"):
    """change resolution back to base resolution and resample a raster"""

    # grow input
    input_padded = rand_id("padded{}".format(L + 1))
    cell_padding(input=input, output=input_padded, radius=2)

    # resample
    refined = rand_id("refined{}".format(L + 1))
    TMP_RAST[L].append(refined)

    region.write()

    if method == "bilinear":
        r.resamp_interp(input=input_padded, output=refined, method="bilinear")

    if method == "average":
        r.resamp_stats(input=input_padded,
                       output=refined,
                       method="average",
                       flags="w")

    # remove padded raster
    g.remove(type="raster", name=input_padded, flags="f", quiet=True)

    return refined
Ejemplo n.º 2
0
def remove_map(map_name):
    """ Remove the provided map """
    grass.verbose("Removing %s" % map_name)
    g.remove(
        flags="fb",
        type=("raster", "vector"),
        name=map_name,
        quiet=True,
    )
Ejemplo n.º 3
0
def cleanup():
    """Clean-up procedure for module"""
    gs.message("Deleting intermediate files...")
    for k, v in TMP_RAST.items():
        for f in v:
            if len(gs.find_file(f)["fullname"]) > 0:
                g.remove(type="raster", name=f, flags="f", quiet=True)

    current_region.write()
Ejemplo n.º 4
0
def tear_down():
    """Delete randomly created data
    """

    mlist = grass.pipe_command('g.mlist', type='rast', mapset='.')
    for raster in mlist.stdout:
        g.remove(rast=raster.strip())

    mlist = grass.pipe_command('g.mlist', type='vect', mapset='.')
    for vector in mlist.stdout:
        g.remove(vect=vector.strip())
Ejemplo n.º 5
0
def grass_write_vect_sql(gpdf,
                         x="x_crd",
                         y="y_crd",
                         output=None,
                         overwrite=False):
    """
    Write a geopandas.GeodataFrame of Point geometries into a GRASS GIS
    vector map

    Currently only point geometries are supported

    Parameters
    ----------
    gpdf : geopandas.GeoDataFrame
        Containing point geometries

    x, y : str
        Name of coordinate fields to use in GRASS table

    output : str
        Name of output GRASS GIS vector map
    """

    if overwrite is True:
        try:
            db.droptable(table=output + "_table", flags="f")
            g.remove(name=output, type="vector", flags="f")
        except:
            pass

    sqlpath = gs.read_command("db.databases",
                              driver="sqlite").strip(os.linesep)
    con = sqlite3.connect(sqlpath)

    gpdf[x] = gpdf.geometry.bounds.iloc[:, 0]
    gpdf[y] = gpdf.geometry.bounds.iloc[:, 1]

    (gpdf.drop(labels=["geometry"], axis=1).to_sql(output + "_table",
                                                   con,
                                                   index=True,
                                                   index_label="cat",
                                                   if_exists="replace"))

    con.close()

    gvect.in_db(table=output + "_table",
                x=x,
                y=y,
                output=output,
                flags="t",
                key="cat")
Ejemplo n.º 6
0
def remove_temporary_maps(save_temporary_maps=False):
    """Clean up temporary maps"""
    # options, flags = grass.parser()
    # if not flags['s']:  # 's' for save temporary maps
    if not save_temporary_maps:
        g.message("Removing temporary maps")
        g.remove(
            flags="f",
            type="raster",
            pattern="tmp.{pid}*".format(pid=os.getpid()),
            quiet=True,
        )
    else:
        msg = "I will not remove temporary maps in order to support your debugging!"
        msg += "Take care to remove them, i.e. via `g.remove raster pattern=tmp.*`"
        grass.warning(_(msg))
Ejemplo n.º 7
0
def remove_temporary_maps():
    """Clean up temporary maps"""

    # get list of temporary maps
    # temporary_raster_maps = grass.list_strings(
    #     type="raster", pattern="tmp.{pid}*".format(pid=os.getpid())
    # )

    # # remove temporary maps
    # if temporary_raster_maps:
    g.message("Removing temporary maps")
    g.remove(
        flags="f",
        type="raster",
        pattern="tmp.{pid}*".format(pid=os.getpid()),
        quiet=True,
    )
Ejemplo n.º 8
0
def RobustRegression(collection, band, fet, dod, order, iterates):
    suff = '_lwr'
    output = selectFromCollection(collection, band)
    #date = [JulianDate(getDate(im[0]).year, getDate(im[0]).month, getDate(im[0]).day) for im in collection]
    for iter in range(iterates):
        input = output
        r.series_lwr(input=input,
                     suffix='_lwr',
                     order=order,
                     fet=fet,
                     dod=dod,
                     flags='lh')
        output = [im + suff for im in input]
        if iter != 0:
            g.remove(type='raster', name=input, flags='fb')
        if iter == iterates - 1:
            for im, im_new in zip(collection, output):
                im.append(im_new)
Ejemplo n.º 9
0
def initGrassSetup(userDemDir,
                   userid,
                   lat,
                   lon,
                   filename=options['demname'] + '.tif'):
    # redudant conections to grass
    r, g, gscript = connect2grass(userid)
    # r.in.gdal input=/home/justin/Documents/ucmi/geodata/zip/tempEPSG3857/tile.tif output=tile
    gscript.run_command(
        'r.in.gdal',
        input=userDemDir + filename,
        output=filename[:-4],
        overwrite=True,
    )

    g.region(raster=filename[:-4] + '@' + str(userid))
    g.region(flags='p')

    # remove old viewsheds
    g.remove(flags='fb', type='raster', pattern='viewshed*')
Ejemplo n.º 10
0
def main(index, AOI, grass_params):
    PERMANENT = Session()
    PERMANENT.open(gisdb=grass_params[0],
                   location=grass_params[1],
                   mapset=grass_params[2])

    aoi = uniq_name('aoi')
    tiles = uniq_name('tiles')
    intersection = uniq_name('common')

    try:
        v.in_ogr(input=index, output=tiles)
        v.in_ogr(input=AOI, output=aoi)
        v.select(binput=aoi,
                 ainput=tiles,
                 output=intersection,
                 operator='overlap')
        v.db_select(map=intersection, columns='location', flags='c')
    finally:
        g.remove(type='vector', name=tiles, flags='f')
        g.remove(type='vector', name=aoi, flags='f')
        g.remove(type='vector', name=intersection, flags='f')
Ejemplo n.º 11
0
def cleanup():
    for rast in tmp_rast:
        gg.remove(name=rast, type="raster", flags="fb", quiet=True)
Ejemplo n.º 12
0
def main():
    """
    Builds river reaches for input to the USGS hydrologic model, GSFLOW.
    These reaches link the PRMS stream segments to the MODFLOW grid cells.
    """

    ##################
    # OPTION PARSING #
    ##################

    options, flags = gscript.parser()
    segments = options['segment_input']
    grid = options['grid_input']
    reaches = options['output']
    elevation = options['elevation']
    Smin = options['s_min']
    h_stream = options['h_stream']
    x1 = options['upstream_easting_column_seg']
    y1 = options['upstream_northing_column_seg']
    x2 = options['downstream_easting_column_seg']
    y2 = options['downstream_northing_column_seg']
    tostream = options['tostream_cat_column_seg']
    # Hydraulic paramters
    STRTHICK = options['strthick']
    STRHC1 = options['strhc1']
    THTS = options['thts']
    THTI = options['thti']
    EPS = options['eps']
    UHC = options['uhc']
    # Build reach maps by overlaying segments on grid
    if len(gscript.find_file(segments, element='vector')['name']) > 0:
        v.extract(input=segments,
                  output='GSFLOW_TEMP__',
                  type='line',
                  quiet=True,
                  overwrite=True)
        v.overlay(ainput='GSFLOW_TEMP__',
                  atype='line',
                  binput=grid,
                  output=reaches,
                  operator='and',
                  overwrite=gscript.overwrite(),
                  quiet=True)
        g.remove(type='vector', name='GSFLOW_TEMP__', quiet=True, flags='f')
    else:
        gscript.fatal('No vector file "' + segments + '" found.')

    # Start editing database table
    reachesTopo = VectorTopo(reaches)
    reachesTopo.open('rw')

    # Rename a,b columns
    reachesTopo.table.columns.rename('a_' + x1, 'x1')
    reachesTopo.table.columns.rename('a_' + x2, 'x2')
    reachesTopo.table.columns.rename('a_' + y1, 'y1')
    reachesTopo.table.columns.rename('a_' + y2, 'y2')
    reachesTopo.table.columns.rename('a_NSEG', 'NSEG')
    reachesTopo.table.columns.rename('a_ISEG', 'ISEG')
    reachesTopo.table.columns.rename('a_stream_type', 'stream_type')
    reachesTopo.table.columns.rename('a_type_code', 'type_code')
    reachesTopo.table.columns.rename('a_cat', 'rnum_cat')
    reachesTopo.table.columns.rename('a_' + tostream, 'tostream')
    reachesTopo.table.columns.rename('a_id', 'segment_id')
    reachesTopo.table.columns.rename('a_OUTSEG', 'OUTSEG')
    reachesTopo.table.columns.rename('b_row', 'row')
    reachesTopo.table.columns.rename('b_col', 'col')
    reachesTopo.table.columns.rename('b_id', 'cell_id')

    # Drop unnecessary columns
    cols = reachesTopo.table.columns.names()
    for col in cols:
        if (col[:2] == 'a_') or (col[:2] == 'b_'):
            reachesTopo.table.columns.drop(col)

    # Add new columns to 'reaches'
    reachesTopo.table.columns.add('KRCH', 'integer')
    reachesTopo.table.columns.add('IRCH', 'integer')
    reachesTopo.table.columns.add('JRCH', 'integer')
    reachesTopo.table.columns.add('IREACH', 'integer')
    reachesTopo.table.columns.add('RCHLEN', 'double precision')
    reachesTopo.table.columns.add('STRTOP', 'double precision')
    reachesTopo.table.columns.add('SLOPE', 'double precision')
    reachesTopo.table.columns.add('STRTHICK', 'double precision')
    reachesTopo.table.columns.add('STRHC1', 'double precision')
    reachesTopo.table.columns.add('THTS', 'double precision')
    reachesTopo.table.columns.add('THTI', 'double precision')
    reachesTopo.table.columns.add('EPS', 'double precision')
    reachesTopo.table.columns.add('UHC', 'double precision')
    reachesTopo.table.columns.add('xr1', 'double precision')
    reachesTopo.table.columns.add('xr2', 'double precision')
    reachesTopo.table.columns.add('yr1', 'double precision')
    reachesTopo.table.columns.add('yr2', 'double precision')

    # Commit columns before editing (necessary?)
    reachesTopo.table.conn.commit()
    reachesTopo.close()

    # Update some columns that can be done now
    reachesTopo.open('rw')
    colNames = np.array(gscript.vector_db_select(reaches, layer=1)['columns'])
    colValues = np.array(
        gscript.vector_db_select(reaches, layer=1)['values'].values())
    cats = colValues[:, colNames == 'cat'].astype(int).squeeze()
    nseg = np.arange(1, len(cats) + 1)
    nseg_cats = []
    for i in range(len(cats)):
        nseg_cats.append((nseg[i], cats[i]))
    cur = reachesTopo.table.conn.cursor()
    # Hydrogeologic properties
    cur.execute("update " + reaches + " set STRTHICK=" + str(STRTHICK))
    cur.execute("update " + reaches + " set STRHC1=" + str(STRHC1))
    cur.execute("update " + reaches + " set THTS=" + str(THTS))
    cur.execute("update " + reaches + " set THTI=" + str(THTI))
    cur.execute("update " + reaches + " set EPS=" + str(EPS))
    cur.execute("update " + reaches + " set UHC=" + str(UHC))
    # Grid properties
    cur.execute("update " + reaches + " set KRCH=1")  # Top layer: unchangable
    cur.executemany("update " + reaches + " set IRCH=? where row=?", nseg_cats)
    cur.executemany("update " + reaches + " set JRCH=? where col=?", nseg_cats)
    reachesTopo.table.conn.commit()
    reachesTopo.close()
    v.to_db(map=reaches, columns='RCHLEN', option='length', quiet=True)

    # Still to go after these:
    # STRTOP (added with slope)
    # IREACH (whole next section dedicated to this)
    # SLOPE (need z_start and z_end)

    # Now, the light stuff is over: time to build the reach order
    v.to_db(map=reaches, option='start', columns='xr1,yr1')
    v.to_db(map=reaches, option='end', columns='xr2,yr2')

    # Now just sort by category, find which stream has the same xr1 and yr1 as
    # x1 and y1 (or a_x1, a_y1) and then find where its endpoint matches another
    # starting point and move down the line.
    # v.db.select reaches col=cat,a_id,xr1,xr2 where="a_x1 = xr1"

    # First, get the starting coordinates of each stream segment
    # and a set of river ID's (ordered from 1...N)
    colNames = np.array(gscript.vector_db_select(segments, layer=1)['columns'])
    colValues = np.array(
        gscript.vector_db_select(segments, layer=1)['values'].values())
    number_of_segments = colValues.shape[0]
    segment_x1s = colValues[:, colNames == 'x1'].astype(float).squeeze()
    segment_y1s = colValues[:, colNames == 'y1'].astype(float).squeeze()
    segment_ids = colValues[:, colNames == 'id'].astype(float).squeeze()

    # Then move back to the reaches map to produce the ordering
    colNames = np.array(gscript.vector_db_select(reaches, layer=1)['columns'])
    colValues = np.array(
        gscript.vector_db_select(reaches, layer=1)['values'].values())
    reach_cats = colValues[:, colNames == 'cat'].astype(int).squeeze()
    reach_x1s = colValues[:, colNames == 'xr1'].astype(float).squeeze()
    reach_y1s = colValues[:, colNames == 'yr1'].astype(float).squeeze()
    reach_x2s = colValues[:, colNames == 'xr2'].astype(float).squeeze()
    reach_y2s = colValues[:, colNames == 'yr2'].astype(float).squeeze()
    segment_ids__reach = colValues[:, colNames == 'segment_id'].astype(
        float).squeeze()

    for segment_id in segment_ids:
        reach_order_cats = []
        downstream_directed = []
        ssel = segment_ids == segment_id
        rsel = segment_ids__reach == segment_id  # selector
        # Find first segment: x1y1 first here, but not necessarily later
        downstream_directed.append(1)
        _x_match = reach_x1s[rsel] == segment_x1s[ssel]
        _y_match = reach_y1s[rsel] == segment_y1s[ssel]
        _i_match = _x_match * _y_match
        x1y1 = True  # false if x2y2
        # Find cat
        _cat = int(reach_cats[rsel][_x_match * _y_match])
        reach_order_cats.append(_cat)
        # Get end of reach = start of next one
        reach_x_end = float(reach_x2s[reach_cats == _cat])
        reach_y_end = float(reach_y2s[reach_cats == _cat])
        while _i_match.any():
            _x_match = reach_x1s[rsel] == reach_x_end
            _y_match = reach_y1s[rsel] == reach_y_end
            _i_match = _x_match * _y_match
            if _i_match.any():
                _cat = int(reach_cats[rsel][_x_match * _y_match])
                reach_x_end = float(reach_x2s[reach_cats == _cat])
                reach_y_end = float(reach_y2s[reach_cats == _cat])
                reach_order_cats.append(_cat)
        print len(reach_order_cats), len(reach_cats[rsel])

        # Reach order to database table
        reach_number__reach_order_cats = []
        for i in range(len(reach_order_cats)):
            reach_number__reach_order_cats.append((i + 1, reach_order_cats[i]))
        reachesTopo = VectorTopo(reaches)
        reachesTopo.open('rw')
        cur = reachesTopo.table.conn.cursor()
        cur.executemany("update " + reaches + " set IREACH=? where cat=?",
                        reach_number__reach_order_cats)
        reachesTopo.table.conn.commit()
        reachesTopo.close()

    # TOP AND BOTTOM ARE OUT OF ORDER: SOME SEGS ARE BACKWARDS. UGH!!!!
    # NEED TO GET THEM IN ORDER TO GET THE Z VALUES AT START AND END

    # 2018.10.01: Updating this to use the computational region for the DEM
    g.region(raster=elevation)

    # Compute slope and starting elevations from the elevations at the start and
    # end of the reaches and the length of each reach]

    gscript.message('Obtaining elevation values from raster: may take time.')
    v.db_addcolumn(map=reaches,
                   columns='zr1 double precision, zr2 double precision')
    zr1 = []
    zr2 = []
    for i in range(len(reach_cats)):
        _x = reach_x1s[i]
        _y = reach_y1s[i]
        #print _x, _y
        _z = float(
            gscript.parse_command('r.what',
                                  map=elevation,
                                  coordinates=str(_x) + ',' +
                                  str(_y)).keys()[0].split('|')[-1])
        zr1.append(_z)
        _x = reach_x2s[i]
        _y = reach_y2s[i]
        _z = float(
            gscript.parse_command('r.what',
                                  map=elevation,
                                  coordinates=str(_x) + ',' +
                                  str(_y)).keys()[0].split('|')[-1])
        zr2.append(_z)

    zr1_cats = []
    zr2_cats = []
    for i in range(len(reach_cats)):
        zr1_cats.append((zr1[i], reach_cats[i]))
        zr2_cats.append((zr2[i], reach_cats[i]))

    reachesTopo = VectorTopo(reaches)
    reachesTopo.open('rw')
    cur = reachesTopo.table.conn.cursor()
    cur.executemany("update " + reaches + " set zr1=? where cat=?", zr1_cats)
    cur.executemany("update " + reaches + " set zr2=? where cat=?", zr2_cats)
    reachesTopo.table.conn.commit()
    reachesTopo.close()

    # Use these to create slope -- backwards possible on DEM!
    v.db_update(map=reaches, column='SLOPE', value='(zr1 - zr2)/RCHLEN')
    v.db_update(map=reaches,
                column='SLOPE',
                value=Smin,
                where='SLOPE <= ' + str(Smin))

    # srtm_local_filled_grid = srtm_local_filled @ 200m (i.e. current grid)
    #  resolution
    # r.to.vect in=srtm_local_filled_grid out=srtm_local_filled_grid col=z type=area --o#
    # NOT SURE IF IT IS BEST TO USE MEAN ELEVATION OR TOP ELEVATION!!!!!!!!!!!!!!!!!!!!!!!
    v.db_addcolumn(map=reaches, columns='z_topo_mean double precision')
    v.what_rast(map=reaches, raster=elevation,
                column='z_topo_mean')  #, query_column='z')
    v.db_update(map=reaches,
                column='STRTOP',
                value='z_topo_mean -' + str(h_stream),
                quiet=True)
Ejemplo n.º 13
0
def main():
    """
    Import any raster or vector data set and add its attribute
    to a GSFLOW data object
    """

    ##################
    # OPTION PARSING #
    ##################

    options, flags = gscript.parser()

    # Parsing
    if options["attrtype"] == "int":
        attrtype = "integer"
    elif options["attrtype"] == "float":
        attrtype = "double precision"
    elif options["attrtype"] == "string":
        attrtype = "varchar"
    else:
        attrtype = ""

    ########################################
    # PROCESS AND UPLOAD TO DATABASE TABLE #
    ########################################

    if options["vector_area"] is not "":
        gscript.use_temp_region()
        g.region(vector=options["map"], res=options["dxy"])
        v.to_rast(
            input=options["vector_area"],
            output="tmp___tmp",
            use="attr",
            attribute_column=options["from_column"],
            quiet=True,
            overwrite=True,
        )
        try:
            gscript.message("Checking for existing column to overwrite")
            v.db_dropcolumn(map=options["map"],
                            columns=options["column"],
                            quiet=True)
        except:
            pass
        if attrtype is "double precision":
            try:
                gscript.message("Checking for existing column to overwrite")
                v.db_dropcolumn(map=options["map"],
                                columns="tmp_average",
                                quiet=True)
            except:
                pass
            v.rast_stats(
                map=options["map"],
                raster="tmp___tmp",
                column_prefix="tmp",
                method="average",
                flags="c",
                quiet=True,
            )
            g.remove(type="raster", name="tmp___tmp", flags="f", quiet=True)
            v.db_renamecolumn(
                map=options["map"],
                column=["tmp_average", options["column"]],
                quiet=True,
            )

        else:
            try:
                v.db_addcolumn(
                    map=options["map"],
                    columns=options["column"] + " " + attrtype,
                    quiet=True,
                )
            except:
                pass
            gscript.run_command(
                "v.distance",
                from_=options["map"],
                to=options["vector_area"],
                upload="to_attr",
                to_column=options["from_column"],
                column=options["column"],
                quiet=True,
            )
    elif options["vector_points"] is not "":
        try:
            gscript.message("Checking for existing column to overwrite")
            v.db_dropcolumn(map=options["map"],
                            columns=options["column"],
                            quiet=True)
            v.db_addcolumn(
                map=options["map"],
                columns=options["column"] + " " + attrtype,
                quiet=True,
            )
        except:
            pass
        gscript.run_command(
            "v.distance",
            from_=options["map"],
            to=options["vector_points"],
            upload="to_attr",
            to_column=options["from_column"],
            column=options["column"],
            quiet=True,
        )

    elif options["raster"] is not "":
        try:
            gscript.message("Checking for existing column to overwrite")
            v.db_dropcolumn(map=options["map"],
                            columns=options["column"],
                            quiet=True)
        except:
            pass
        v.rast_stats(
            map=options["map"],
            raster=options["raster"],
            column_prefix="tmp",
            method="average",
            flags="c",
            quiet=True,
        )
        v.db_renamecolumn(map=options["map"],
                          column=["tmp_average", options["column"]],
                          quiet=True)

    gscript.message("Done.")
Ejemplo n.º 14
0
def TMaskAlgorithm(images,
                   BACKUP_ALG_THRESHOLD=15,
                   RADIUS_BUFF=3,
                   T_MEDIAN_THRESHOLD=0.04,
                   BLUE_CHANNEL_PURE_SNOW_THRESHOLD=0.4,
                   NIR_CHANNEL_PURE_SNOW_THRESHOLD=0.12,
                   BLUE_CHANNEL_THRESHOLD=0.04,
                   NIR_CHANNEL_CLOUD_SNOW_THRESHOLD=0.04,
                   NIR_CHANNEL_SHADOW_CLEAR_THRESHOLD=-0.04,
                   SWIR1_CHANNEL_SHADOW_CLEAR_THRESHOLD=-0.04):

    # sorting by date:
    images.sort(key=lambda im: getDate(im[0]), reverse=True)

    # the size of the collection:
    ImageCounts = len(images)
    text1 = 'Total number of images: %s' % (ImageCounts)
    text2 = 'Warning: You have less than %s images!' % (BACKUP_ALG_THRESHOLD)
    if ImageCounts >= BACKUP_ALG_THRESHOLD:
        logging.info(text1)
    else:
        logging.info(text2)

    # FMask, composite and non-snow masks:
    FMask_collection = [FMask(im, RADIUS_BUFF) for im in images]
    for im in images:
        delete(im, 'BQA')

    # reducing FMask collection:
    r.series(input=FMask_collection,
             output='ConditionMap.const',
             method='sum',
             overwrite=True)
    ConditionMap = 'ConditionMap.const'
    map(lambda im: g.remove(type='raster', name=im, flags='fb'),
        FMask_collection)

    # detect which part of data should be used for BackUp algorithm:
    expression = 'ClearSmall.const=%(im)s>(%(all)s-%(thresh)s)' % {
        'im': ConditionMap,
        'all': ImageCounts,
        'thresh': BACKUP_ALG_THRESHOLD
    }
    r.mapcalc(expression=expression, overwrite=True)
    ClearSmall = 'ClearSmall.const'
    g.remove(type='raster', name=ConditionMap, flags='fb')

    # forming non-snow pixels:
    for im in images:
        BackUp_mask(im, ClearSmall)
        delete(im, 'nonSnow')

    # calculate mediana for potential clear pixels in BackUp approach:
    r.series(input=selectFromCollection(images, 'B3_masked'),
             output='Mediana.const',
             method='median',
             overwrite=True)
    Mediana = 'Mediana.const'
    for im in images:
        delete(im, 'B3_masked')

    # BackUp algorithm:
    for im in images:
        BackUpAlgorithm(im, ClearSmall, Mediana, T_MEDIAN_THRESHOLD)
        delete(im, 'Composite')
    g.remove(type='raster', name=Mediana, flags='fb')

    # create mask for TMask algorithm:
    for im in images:
        TMaskp_mask(im)
        #delete(im, 'B3_toar')
        #delete(im, 'B5_toar')
        #delete(im, 'B6_toar')
    #g.remove(type='raster', name=ClearSmall, flags='fb')

    # regression for blue, NIR, SWIR channel:
    RobustRegression(images, 'B3_masked', fet=0.5, dod=2, order=1, iterates=2)
    RobustRegression(images, 'B5_masked', fet=0.5, dod=2, order=1, iterates=2)
    RobustRegression(images, 'B6_masked', fet=0.5, dod=2, order=1, iterates=2)

    # getting residuals:
    for im in images:
        getResidual(im, selectFromImage(im, 'B3_masked_lwr_lwr'), 'B3_masked')
        getResidual(im, selectFromImage(im, 'B5_masked_lwr_lwr'), 'B5_masked')
        getResidual(im, selectFromImage(im, 'B6_masked_lwr_lwr'), 'B6_masked')
        delete(im, 'B5_masked')
        delete(im, 'B6_masked')
        #delete(im, 'B5_masked_lwr_lwr')

    # classification:
    const1 = NIR_CHANNEL_PURE_SNOW_THRESHOLD
    const2 = BLUE_CHANNEL_PURE_SNOW_THRESHOLD
    const3 = BLUE_CHANNEL_THRESHOLD
    const4 = NIR_CHANNEL_CLOUD_SNOW_THRESHOLD
    const5 = NIR_CHANNEL_SHADOW_CLEAR_THRESHOLD
    const6 = SWIR1_CHANNEL_SHADOW_CLEAR_THRESHOLD
    for im in images:
        classify(im, const1, const2, const3, const4, const5, const6,
                 'B3_masked_lwr_lwr', 'B6_masked_lwr_lwr')
        delete(im, 'B3_masked_lwr_lwr')
        delete(im, 'B6_masked_lwr_lwr')
        delete(im, 'B3_masked')
        delete(im, 'B3_masked_residual')
        delete(im, 'B5_masked_residual')
        delete(im, 'B6_masked_residual')

    for im in images:
        basename = im[0].split('.')[0]
        out = basename + '.Mask'
        expression = '%(out)s=(%(mask1)s) + (%(mask2)s*not(%(mask1)s))' \
                     %{'out':out, 'mask1': selectFromImage(im,'BackUpMask'), 'mask2': selectFromImage(im,'TMask')}
        r.mapcalc(expression=expression, overwrite=True)
        delete(im, 'BackUpMask')
        delete(im, 'TMask')
        im.append(out)

    return images
Ejemplo n.º 15
0
def delete(image, channel):
    im = selectFromImage(image, channel)
    g.remove(type='raster', name=im, flags='fb')
    image.pop(image.index(im))
Ejemplo n.º 16
0
def main():
    # options and flags
    options, flags = gs.parser()
    input_raster = options["input"]
    minradius = int(options["minradius"])
    maxradius = int(options["maxradius"])
    steps = int(options["steps"])
    output_raster = options["output"]

    region = Region()
    res = np.mean([region.nsres, region.ewres])

    # some checks
    if "@" in output_raster:
        output_raster = output_raster.split("@")[0]

    if maxradius <= minradius:
        gs.fatal("maxradius must be greater than minradius")

    if steps < 2:
        gs.fatal("steps must be greater than 1")

    # calculate radi for generalization
    radi = np.logspace(np.log(minradius),
                       np.log(maxradius),
                       steps,
                       base=np.exp(1),
                       dtype=np.int)
    radi = np.unique(radi)
    sizes = radi * 2 + 1

    # multiscale calculation
    ztpi_maps = list()

    for step, (radius, size) in enumerate(zip(radi[::-1], sizes[::-1])):
        gs.message(
            "Calculating the TPI at radius {radius}".format(radius=radius))

        # generalize the dem
        step_res = res * size
        step_res_pretty = str(step_res).replace(".", "_")
        generalized_dem = gs.tempname(4)

        if size > 15:
            step_dem = gs.tempname(4)
            gg.region(res=str(step_res))
            gr.resamp_stats(
                input=input_raster,
                output=step_dem,
                method="average",
                flags="w",
            )
            gr.resamp_rst(
                input=step_dem,
                ew_res=res,
                ns_res=res,
                elevation=generalized_dem,
                quiet=True,
            )
            region.write()
            gg.remove(type="raster", name=step_dem, flags="f", quiet=True)
        else:
            gr.neighbors(input=input_raster, output=generalized_dem, size=size)

        # calculate the tpi
        tpi = gs.tempname(4)
        gr.mapcalc(expression="{x} = {a} - {b}".format(
            x=tpi, a=input_raster, b=generalized_dem))
        gg.remove(type="raster", name=generalized_dem, flags="f", quiet=True)

        # standardize the tpi
        raster_stats = gr.univar(map=tpi, flags="g",
                                 stdout_=PIPE).outputs.stdout
        raster_stats = parse_key_val(raster_stats)
        tpi_mean = float(raster_stats["mean"])
        tpi_std = float(raster_stats["stddev"])
        ztpi = gs.tempname(4)
        ztpi_maps.append(ztpi)
        RAST_REMOVE.append(ztpi)

        gr.mapcalc(expression="{x} = ({a} - {mean})/{std}".format(
            x=ztpi, a=tpi, mean=tpi_mean, std=tpi_std))
        gg.remove(type="raster", name=tpi, flags="f", quiet=True)

        # integrate
        if step > 1:
            tpi_updated2 = gs.tempname(4)
            gr.mapcalc("{x} = if(abs({a}) > abs({b}), {a}, {b})".format(
                a=ztpi_maps[step], b=tpi_updated1, x=tpi_updated2))
            RAST_REMOVE.append(tpi_updated2)
            tpi_updated1 = tpi_updated2
        else:
            tpi_updated1 = ztpi_maps[0]

    RAST_REMOVE.pop()
    gg.rename(raster=(tpi_updated2, output_raster), quiet=True)

    # set color theme
    with RasterRow(output_raster) as src:
        color_rules = """{minv} blue
            -1 0:34:198
            0 255:255:255
            1 255:0:0
            {maxv} 110:15:0
            """
        color_rules = color_rules.format(minv=src.info.min, maxv=src.info.max)
        gr.colors(map=output_raster, rules="-", stdin_=color_rules, quiet=True)
Ejemplo n.º 17
0
def main():
    """
    Import any raster or vector data set and add its attribute
    to a GSFLOW data object
    """

    ##################
    # OPTION PARSING #
    ##################

    options, flags = gscript.parser()

    # Parsing
    if options['attrtype'] == 'int':
        attrtype = 'integer'
    elif options['attrtype'] == 'float':
        attrtype = 'double precision'
    elif options['attrtype'] == 'string':
        attrtype = 'varchar'
    else:
        attrtype = ''

    ########################################
    # PROCESS AND UPLOAD TO DATABASE TABLE #
    ########################################

    if options['vector_area'] is not '':
        gscript.use_temp_region()
        g.region(vector=options['map'], res=options['dxy'])
        v.to_rast(input=options['vector_area'],
                  output='tmp___tmp',
                  use='attr',
                  attribute_column=options['from_column'],
                  quiet=True,
                  overwrite=True)
        try:
            gscript.message("Checking for existing column to overwrite")
            v.db_dropcolumn(map=options['map'],
                            columns=options['column'],
                            quiet=True)
        except:
            pass
        if attrtype is 'double precision':
            try:
                gscript.message("Checking for existing column to overwrite")
                v.db_dropcolumn(map=options['map'],
                                columns='tmp_average',
                                quiet=True)
            except:
                pass
            v.rast_stats(map=options['map'],
                         raster='tmp___tmp',
                         column_prefix='tmp',
                         method='average',
                         flags='c',
                         quiet=True)
            g.remove(type='raster', name='tmp___tmp', flags='f', quiet=True)
            v.db_renamecolumn(map=options['map'],
                              column=['tmp_average', options['column']],
                              quiet=True)

        else:
            try:
                v.db_addcolumn(map=options['map'],
                               columns=options['column'] + ' ' + attrtype,
                               quiet=True)
            except:
                pass
            gscript.run_command('v.distance',
                                from_=options['map'],
                                to=options['vector_area'],
                                upload='to_attr',
                                to_column=options['from_column'],
                                column=options['column'],
                                quiet=True)
    elif options['vector_points'] is not '':
        try:
            gscript.message("Checking for existing column to overwrite")
            v.db_dropcolumn(map=options['map'],
                            columns=options['column'],
                            quiet=True)
            v.db_addcolumn(map=options['map'],
                           columns=options['column'] + ' ' + attrtype,
                           quiet=True)
        except:
            pass
        gscript.run_command('v.distance',
                            from_=options['map'],
                            to=options['vector_points'],
                            upload='to_attr',
                            to_column=options['from_column'],
                            column=options['column'],
                            quiet=True)

    elif options['raster'] is not '':
        try:
            gscript.message("Checking for existing column to overwrite")
            v.db_dropcolumn(map=options['map'],
                            columns=options['column'],
                            quiet=True)
        except:
            pass
        v.rast_stats(map=options['map'],
                     raster=options['raster'],
                     column_prefix='tmp',
                     method='average',
                     flags='c',
                     quiet=True)
        v.db_renamecolumn(map=options['map'],
                          column=['tmp_average', options['column']],
                          quiet=True)

    gscript.message("Done.")
Ejemplo n.º 18
0
def main():
    """
    Builds river reaches for input to the USGS hydrologic model, GSFLOW.
    These reaches link the PRMS stream segments to the MODFLOW grid cells.
    """

    ##################
    # OPTION PARSING #
    ##################

    options, flags = gscript.parser()
    segments = options["segment_input"]
    grid = options["grid_input"]
    reaches = options["output"]
    elevation = options["elevation"]
    Smin = options["s_min"]
    h_stream = options["h_stream"]
    x1 = options["upstream_easting_column_seg"]
    y1 = options["upstream_northing_column_seg"]
    x2 = options["downstream_easting_column_seg"]
    y2 = options["downstream_northing_column_seg"]
    tostream = options["tostream_cat_column_seg"]
    # Hydraulic paramters
    STRTHICK = options["strthick"]
    STRHC1 = options["strhc1"]
    THTS = options["thts"]
    THTI = options["thti"]
    EPS = options["eps"]
    UHC = options["uhc"]
    # Build reach maps by overlaying segments on grid
    if len(gscript.find_file(segments, element="vector")["name"]) > 0:
        v.extract(
            input=segments,
            output="GSFLOW_TEMP__",
            type="line",
            quiet=True,
            overwrite=True,
        )
        v.overlay(
            ainput="GSFLOW_TEMP__",
            atype="line",
            binput=grid,
            output=reaches,
            operator="and",
            overwrite=gscript.overwrite(),
            quiet=True,
        )
        g.remove(type="vector", name="GSFLOW_TEMP__", quiet=True, flags="f")
    else:
        gscript.fatal('No vector file "' + segments + '" found.')

    # Start editing database table
    reachesTopo = VectorTopo(reaches)
    reachesTopo.open("rw")

    # Rename a,b columns
    reachesTopo.table.columns.rename("a_" + x1, "x1")
    reachesTopo.table.columns.rename("a_" + x2, "x2")
    reachesTopo.table.columns.rename("a_" + y1, "y1")
    reachesTopo.table.columns.rename("a_" + y2, "y2")
    reachesTopo.table.columns.rename("a_NSEG", "NSEG")
    reachesTopo.table.columns.rename("a_ISEG", "ISEG")
    reachesTopo.table.columns.rename("a_stream_type", "stream_type")
    reachesTopo.table.columns.rename("a_type_code", "type_code")
    reachesTopo.table.columns.rename("a_cat", "rnum_cat")
    reachesTopo.table.columns.rename("a_" + tostream, "tostream")
    reachesTopo.table.columns.rename("a_id", "segment_id")
    reachesTopo.table.columns.rename("a_OUTSEG", "OUTSEG")
    reachesTopo.table.columns.rename("b_row", "row")
    reachesTopo.table.columns.rename("b_col", "col")
    reachesTopo.table.columns.rename("b_id", "cell_id")

    # Drop unnecessary columns
    cols = reachesTopo.table.columns.names()
    for col in cols:
        if (col[:2] == "a_") or (col[:2] == "b_"):
            reachesTopo.table.columns.drop(col)

    # Add new columns to 'reaches'
    reachesTopo.table.columns.add("KRCH", "integer")
    reachesTopo.table.columns.add("IRCH", "integer")
    reachesTopo.table.columns.add("JRCH", "integer")
    reachesTopo.table.columns.add("IREACH", "integer")
    reachesTopo.table.columns.add("RCHLEN", "double precision")
    reachesTopo.table.columns.add("STRTOP", "double precision")
    reachesTopo.table.columns.add("SLOPE", "double precision")
    reachesTopo.table.columns.add("STRTHICK", "double precision")
    reachesTopo.table.columns.add("STRHC1", "double precision")
    reachesTopo.table.columns.add("THTS", "double precision")
    reachesTopo.table.columns.add("THTI", "double precision")
    reachesTopo.table.columns.add("EPS", "double precision")
    reachesTopo.table.columns.add("UHC", "double precision")
    reachesTopo.table.columns.add("xr1", "double precision")
    reachesTopo.table.columns.add("xr2", "double precision")
    reachesTopo.table.columns.add("yr1", "double precision")
    reachesTopo.table.columns.add("yr2", "double precision")

    # Commit columns before editing (necessary?)
    reachesTopo.table.conn.commit()
    reachesTopo.close()

    # Update some columns that can be done now
    reachesTopo.open("rw")
    colNames = np.array(gscript.vector_db_select(reaches, layer=1)["columns"])
    colValues = np.array(gscript.vector_db_select(reaches, layer=1)["values"].values())
    cats = colValues[:, colNames == "cat"].astype(int).squeeze()
    nseg = np.arange(1, len(cats) + 1)
    nseg_cats = []
    for i in range(len(cats)):
        nseg_cats.append((nseg[i], cats[i]))
    cur = reachesTopo.table.conn.cursor()
    # Hydrogeologic properties
    cur.execute("update " + reaches + " set STRTHICK=" + str(STRTHICK))
    cur.execute("update " + reaches + " set STRHC1=" + str(STRHC1))
    cur.execute("update " + reaches + " set THTS=" + str(THTS))
    cur.execute("update " + reaches + " set THTI=" + str(THTI))
    cur.execute("update " + reaches + " set EPS=" + str(EPS))
    cur.execute("update " + reaches + " set UHC=" + str(UHC))
    # Grid properties
    cur.execute("update " + reaches + " set KRCH=1")  # Top layer: unchangable
    cur.executemany("update " + reaches + " set IRCH=? where row=?", nseg_cats)
    cur.executemany("update " + reaches + " set JRCH=? where col=?", nseg_cats)
    reachesTopo.table.conn.commit()
    reachesTopo.close()
    v.to_db(map=reaches, columns="RCHLEN", option="length", quiet=True)

    # Still to go after these:
    # STRTOP (added with slope)
    # IREACH (whole next section dedicated to this)
    # SLOPE (need z_start and z_end)

    # Now, the light stuff is over: time to build the reach order
    v.to_db(map=reaches, option="start", columns="xr1,yr1")
    v.to_db(map=reaches, option="end", columns="xr2,yr2")

    # Now just sort by category, find which stream has the same xr1 and yr1 as
    # x1 and y1 (or a_x1, a_y1) and then find where its endpoint matches another
    # starting point and move down the line.
    # v.db.select reaches col=cat,a_id,xr1,xr2 where="a_x1 = xr1"

    # First, get the starting coordinates of each stream segment
    # and a set of river ID's (ordered from 1...N)
    colNames = np.array(gscript.vector_db_select(segments, layer=1)["columns"])
    colValues = np.array(gscript.vector_db_select(segments, layer=1)["values"].values())
    number_of_segments = colValues.shape[0]
    segment_x1s = colValues[:, colNames == "x1"].astype(float).squeeze()
    segment_y1s = colValues[:, colNames == "y1"].astype(float).squeeze()
    segment_ids = colValues[:, colNames == "id"].astype(float).squeeze()

    # Then move back to the reaches map to produce the ordering
    colNames = np.array(gscript.vector_db_select(reaches, layer=1)["columns"])
    colValues = np.array(gscript.vector_db_select(reaches, layer=1)["values"].values())
    reach_cats = colValues[:, colNames == "cat"].astype(int).squeeze()
    reach_x1s = colValues[:, colNames == "xr1"].astype(float).squeeze()
    reach_y1s = colValues[:, colNames == "yr1"].astype(float).squeeze()
    reach_x2s = colValues[:, colNames == "xr2"].astype(float).squeeze()
    reach_y2s = colValues[:, colNames == "yr2"].astype(float).squeeze()
    segment_ids__reach = colValues[:, colNames == "segment_id"].astype(float).squeeze()

    for segment_id in segment_ids:
        reach_order_cats = []
        downstream_directed = []
        ssel = segment_ids == segment_id
        rsel = segment_ids__reach == segment_id  # selector
        # Find first segment: x1y1 first here, but not necessarily later
        downstream_directed.append(1)
        _x_match = reach_x1s[rsel] == segment_x1s[ssel]
        _y_match = reach_y1s[rsel] == segment_y1s[ssel]
        _i_match = _x_match * _y_match
        x1y1 = True  # false if x2y2
        # Find cat
        _cat = int(reach_cats[rsel][_x_match * _y_match])
        reach_order_cats.append(_cat)
        # Get end of reach = start of next one
        reach_x_end = float(reach_x2s[reach_cats == _cat])
        reach_y_end = float(reach_y2s[reach_cats == _cat])
        while _i_match.any():
            _x_match = reach_x1s[rsel] == reach_x_end
            _y_match = reach_y1s[rsel] == reach_y_end
            _i_match = _x_match * _y_match
            if _i_match.any():
                _cat = int(reach_cats[rsel][_x_match * _y_match])
                reach_x_end = float(reach_x2s[reach_cats == _cat])
                reach_y_end = float(reach_y2s[reach_cats == _cat])
                reach_order_cats.append(_cat)
        _message = str(len(reach_order_cats)) + " " + str(len(reach_cats[rsel]))
        gscript.message(_message)

        # Reach order to database table
        reach_number__reach_order_cats = []
        for i in range(len(reach_order_cats)):
            reach_number__reach_order_cats.append((i + 1, reach_order_cats[i]))
        reachesTopo = VectorTopo(reaches)
        reachesTopo.open("rw")
        cur = reachesTopo.table.conn.cursor()
        cur.executemany(
            "update " + reaches + " set IREACH=? where cat=?",
            reach_number__reach_order_cats,
        )
        reachesTopo.table.conn.commit()
        reachesTopo.close()

    # TOP AND BOTTOM ARE OUT OF ORDER: SOME SEGS ARE BACKWARDS. UGH!!!!
    # NEED TO GET THEM IN ORDER TO GET THE Z VALUES AT START AND END

    # 2018.10.01: Updating this to use the computational region for the DEM
    g.region(raster=elevation)

    # Compute slope and starting elevations from the elevations at the start and
    # end of the reaches and the length of each reach]

    gscript.message("Obtaining elevation values from raster: may take time.")
    v.db_addcolumn(map=reaches, columns="zr1 double precision, zr2 double precision")
    zr1 = []
    zr2 = []
    for i in range(len(reach_cats)):
        _x = reach_x1s[i]
        _y = reach_y1s[i]
        # print _x, _y
        _z = float(
            gscript.parse_command(
                "r.what", map=elevation, coordinates=str(_x) + "," + str(_y)
            )
            .keys()[0]
            .split("|")[-1]
        )
        zr1.append(_z)
        _x = reach_x2s[i]
        _y = reach_y2s[i]
        _z = float(
            gscript.parse_command(
                "r.what", map=elevation, coordinates=str(_x) + "," + str(_y)
            )
            .keys()[0]
            .split("|")[-1]
        )
        zr2.append(_z)

    zr1_cats = []
    zr2_cats = []
    for i in range(len(reach_cats)):
        zr1_cats.append((zr1[i], reach_cats[i]))
        zr2_cats.append((zr2[i], reach_cats[i]))

    reachesTopo = VectorTopo(reaches)
    reachesTopo.open("rw")
    cur = reachesTopo.table.conn.cursor()
    cur.executemany("update " + reaches + " set zr1=? where cat=?", zr1_cats)
    cur.executemany("update " + reaches + " set zr2=? where cat=?", zr2_cats)
    reachesTopo.table.conn.commit()
    reachesTopo.close()

    # Use these to create slope -- backwards possible on DEM!
    v.db_update(map=reaches, column="SLOPE", value="(zr1 - zr2)/RCHLEN")
    v.db_update(map=reaches, column="SLOPE", value=Smin, where="SLOPE <= " + str(Smin))

    # srtm_local_filled_grid = srtm_local_filled @ 200m (i.e. current grid)
    #  resolution
    # r.to.vect in=srtm_local_filled_grid out=srtm_local_filled_grid col=z type=area --o#
    # NOT SURE IF IT IS BEST TO USE MEAN ELEVATION OR TOP ELEVATION!!!!!!!!!!!!!!!!!!!!!!!
    v.db_addcolumn(map=reaches, columns="z_topo_mean double precision")
    v.what_rast(
        map=reaches, raster=elevation, column="z_topo_mean"
    )  # , query_column='z')
    v.db_update(
        map=reaches, column="STRTOP", value="z_topo_mean -" + str(h_stream), quiet=True
    )
from grass.pygrass.modules.shortcuts import raster as r
from grass.pygrass.modules.shortcuts import vector as v
from grass.pygrass.modules import Module
#os.environ['GRASS_VERBOSE'] = '-1' #runs modules silently

#-------------------------------- Variables -----------------------------------
Join = os.path.join
O = ['overwrite']
Layers = ('Buildings', 'Roads', 'TankPoints')  # Available Layers
Colors = ['red', 'green', 'orange', 'purple', 'yellow', 'blue', 'black']
dia = '1'  # metres
#-------------------------------- Code ----------------------------------------

if __name__ == '__main__':

    g.remove(type='vector', pattern='*', flags=['f'])  # Cleaning the Workspace

    # Importing the ShapeFiles to GRASS
    for lr in Layers:
        v.in_ogr(input=Join('Test1', lr), output=lr, key='id', flags=O)
    #v.in_ascii(input=Join('Test1', 'TankPoints.csv'),
    #output='TankPoints',
    #cat=1, x=4, y=5, skip=1, separator='comma',
    #columns='id int, name varchar(20), type varchar(10), lat double precision, lon double precision, pipe varchar(10), litre int',
    #flags=O)
    """
    Method-1:
    This method will connect all the TankPoints with the Nearest vector lines
    (Road in this case). Using v.net.salesman the network is closed and Creating
    a buffer for this output will be our Layout of TankPoints along the Road.
Ejemplo n.º 20
0
 def tearDownClass(cls):
     """Remove the generated vector map, if exist"""
     from grass.pygrass.modules.shortcuts import general as g
     g.remove(type='rast', pattern=cls.tmp, flags='f')
Ejemplo n.º 21
0
 def tearDownClass(cls):
     """Remove the generated vector map, if exist"""
     from grass.pygrass.modules.shortcuts import general as g
     g.remove(type='rast', pattern=cls.tmp, flags='f')
Ejemplo n.º 22
0
def main():
    r_elevation = options['elevation']
    mrvbf = options['mrvbf'].split('@')[0]
    mrrtf = options['mrrtf'].split('@')[0]
    t_slope = float(options['t_slope'])
    t_pctl_v = float(options['t_pctl_v'])
    t_pctl_r = float(options['t_pctl_r'])
    t_vf = float(options['t_rf'])
    t_rf = float(options['t_rf'])
    p_slope = float(options['p_slope'])
    p_pctl = float(options['p_pctl'])
    moving_window_square = flags['s']
    levels = int(options['levels'])

    global TMP_RAST
    global current_region
    TMP_RAST = {k: [] for k in range(levels)}
    current_region = Region()

    ###########################################################################
    # Some checks
    if levels < 3:
        grass.fatal('Number of generalization steps (levels) cannot be < 3')
    if t_slope <=0 or t_pctl_v <=0 or t_pctl_r <=0 or t_vf <=0 or t_rf <=0 or \
        p_slope <=0 or p_pctl <=0:
        grass.fatal('Parameter values cannot be <= 0')
    if levels > 10:
        grass.warning('A large number (>10) processing steps are selected, recommended is between 3 to 8')
	
	###########################################################################
    # Intermediate outputs
    Xres_step, Yres_step, DEM = [], [], []
    slope, F, PCTL, PVF, PVF_RF = [0]*levels, [0]*levels, [0]*levels, [0]*levels, [0]*levels
    VF, VF_RF, MRVBF, MRRTF = [0]*levels, [0]*levels, [0]*levels, [0]*levels
	
	###########################################################################
	# Step 1 (L=0)
    # Base scale resolution
    L = 0
    Xres_step.append(current_region.ewres)
    Yres_step.append(current_region.nsres)
    DEM.append(r_elevation)
    radi = 3

    g.message(os.linesep)
    g.message("Step {L}".format(L=L+1))
    g.message("------")

    # Calculation of slope (S1) and calculation of flatness (F1) (Equation 2)
    grass.message("Calculation of slope and transformation to flatness F{L}...".format(L=L+1))
    slope[L] = get_slope(L, DEM[L])
    F[L] = get_flatness(L, slope[L], t_slope, p_slope)

    # Calculation of elevation percentile PCTL for step 1
    grass.message("Calculation of elevation percentile PCTL{L}...".format(L=L+1))
    PCTL[L] = get_percentile(L, DEM[L], radi, moving_window_square)

    # Transform elevation percentile to local lowness for step 1 (Equation 3)
    grass.message("Calculation of preliminary valley flatness index PVF{L}...".format(L=L+1))
    PVF[L] = get_prelim_flatness(L, F[L], PCTL[L], t_pctl_v, p_pctl)
    if mrrtf != '':
        grass.message("Calculation of preliminary ridge top flatness index PRF{L}...".format(L=L+1))
        PVF_RF[L] = get_prelim_flatness_rf(L, F[L], PCTL[L], t_pctl_r, p_pctl)

    # Calculation of the valley flatness step 1 VF1 (Equation 4)
    grass.message("Calculation of valley flatness VF{L}...".format(L=L+1))
    VF[L] = get_valley_flatness(L, PVF[L], t_vf, p_slope)
    if mrrtf != '':
        grass.message("Calculation of ridge top flatness RF{L}...".format(L=L+1))
        VF_RF[L] = get_valley_flatness(L, PVF_RF[L], t_rf, p_slope)

	##################################################################################
	# Step 2 (L=1)
    # Base scale resolution		
    L = 1
    Xres_step.append(current_region.ewres)
    Yres_step.append(current_region.nsres)
    DEM.append(r_elevation)
    t_slope /= 2.0
    radi = 6

    grass.message(os.linesep)
    grass.message("Step {L}".format(L=L+1))
    grass.message("------")
	
    # Calculation of flatness for step 2 (Equation 5)
    # The second step commences the same way with the original DEM at its base resolution,
    # using a slope threshold ts,2 half of ts,1:
    grass.message("Calculation of flatness F{L}...".format(L=L+1))
    F[L] = get_flatness(L, slope[L-1], t_slope, p_slope)

    # Calculation of elevation percentile PCTL for step 2 (radius of 6 cells)
    grass.message("Calculation of elevation percentile PCTL{L}...".format(L=L+1))
    PCTL[L] = get_percentile(L, r_elevation, radi, moving_window_square)

    # PVF for step 2 (Equation 6)
    grass.message("Calculation of preliminary valley flatness index PVF{L}...".format(L=L+1))
    PVF[L] = get_prelim_flatness(L, F[L], PCTL[L], t_pctl_v, p_pctl)
    if mrrtf != '':
        grass.message("Calculation of preliminary ridge top flatness index PRF{L}...".format(L=L+1))
        PVF_RF[L] = get_prelim_flatness_rf(L, F[L], PCTL[L], t_pctl_r, p_pctl)
    
    # Calculation of the valley flatness VF for step 2 (Equation 7)
    grass.message("Calculation of valley flatness VF{L}...".format(L=L+1))
    VF[L] = get_valley_flatness(L, PVF[L], t_vf, p_slope)
    if mrrtf != '':
        grass.message("Calculation of ridge top flatness RF{L}...".format(L=L+1))
        VF_RF[L] = get_valley_flatness(L, PVF_RF[L], t_rf, p_slope)
            
    # Calculation of MRVBF for step 2
    grass.message("Calculation of MRVBF{L}...".format(L=L+1))
    MRVBF[L] = get_mrvbf(L, VF_Lminus1=VF[L-1], VF_L=VF[L], t=t_pctl_v)
    if mrrtf != '':
        grass.message("Calculation of MRRTF{L}...".format(L=L+1))
        MRRTF[L] = get_mrvbf(L, VF_Lminus1=VF_RF[L-1], VF_L=VF_RF[L], t=t_pctl_r)

    # Update flatness for step 2 with combined flatness from F1 and F2 (Equation 10)
    grass.message("Calculation  of combined flatness index CF{L}...".format(L=L+1))
    F[L] = get_combined_flatness(L, F[L-1], F[L])
	
	##################################################################################
    # Remaining steps
    # DEM_1_1 refers to scale (smoothing) and resolution (cell size)
    # so that DEM_L1_L-1 refers to smoothing of current step,
    # but resolution of previous step 

    for L in range(2, levels):

        t_slope /= 2.0
        Xres_step.append(Xres_step[L-1] * 3)
        Yres_step.append(Yres_step[L-1] * 3)
        radi = 6

        # delete temporary maps from L-2
        for tmap in TMP_RAST[L-2]:
            g.remove(type='raster', name=tmap, flags='f', quiet=True)

        grass.message(os.linesep)
        grass.message("Step {L}".format(L=L+1))
        grass.message("------")
        
        # Coarsen resolution to resolution of prevous step (step L-1) and smooth DEM
        if L >= 3:
            grass.run_command('g.region', ewres = Xres_step[L-1], nsres = Yres_step[L-1])
            grass.message('Coarsening resolution to ew_res={e} and ns_res={n}...'.format(
                e=Xres_step[L-1], n=Yres_step[L-1]))
        
        grass.message("DEM smoothing 11 x 11 windows with Gaussian smoothing kernel (sigma) 3...")
        DEM.append(get_smoothed_dem(L, DEM[L-1]))

        # Calculate slope
        grass.message("Calculation of slope...")
        slope[L] = get_slope(L, DEM[L])

        # Refine slope to base resolution
        if L >= 3:
            grass.message('Resampling slope back to base resolution...')
            slope[L] = refine(L, slope[L], current_region, method='bilinear')

        # Coarsen resolution to current step L and calculate PCTL
        grass.run_command('g.region', ewres=Xres_step[L], nsres=Yres_step[L])
        DEM[L] = refine(L, DEM[L], Region(), method = 'average')
        grass.message("Calculation of elevation percentile PCTL{L}...".format(L=L+1))
        PCTL[L] = get_percentile(L, DEM[L], radi, moving_window_square)
        
        # Refine PCTL to base resolution
        grass.message("Resampling PCTL{L} to base resolution...".format(L=L+1))
        PCTL[L] = refine(L, PCTL[L], current_region, method='bilinear')

        # Calculate flatness F at the base resolution
        grass.message("Calculate F{L} at base resolution...".format(L=L+1))
        F[L] = get_flatness(L, slope[L], t_slope, p_slope)

        # Update flatness with combined flatness CF from the previous step
        grass.message("Calculate combined flatness CF{L} at base resolution...".format(L=L+1))
        F[L] = get_combined_flatness(L, F1=F[L-1], F2=F[L])

        # Calculate preliminary valley flatness index PVF at the base resolution
        grass.message("Calculate preliminary valley flatness index PVF{L} at base resolution...".format(L=L+1))
        PVF[L] = get_prelim_flatness(L, F[L], PCTL[L], t_pctl_v, p_pctl)
        if mrrtf != '':
            grass.message("Calculate preliminary ridge top flatness index PRF{L} at base resolution...".format(L=L+1))
            PVF_RF[L] = get_prelim_flatness_rf(L, F[L], PCTL[L], t_pctl_r, p_pctl)
        
        # Calculate valley flatness index VF
        grass.message("Calculate valley flatness index VF{L} at base resolution...".format(L=L+1))
        VF[L] = get_valley_flatness(L, PVF[L], t_vf, p_slope)
        if mrrtf != '':
            grass.message("Calculate ridge top flatness index RF{L} at base resolution...".format(L=L+1))
            VF_RF[L] = get_valley_flatness(L, PVF_RF[L], t_rf, p_slope)
            
        # Calculation of MRVBF
        grass.message("Calculation of MRVBF{L}...".format(L=L+1))
        MRVBF[L] = get_mrvbf(L, VF_Lminus1=MRVBF[L-1], VF_L=VF[L], t=t_pctl_v)
        if mrrtf != '':
            grass.message("Calculation of MRRTF{L}...".format(L=L+1))
            MRRTF[L] = get_mrvbf(L, VF_Lminus1=MRRTF[L-1], VF_L=VF_RF[L], t=t_pctl_r)

    # Output final MRVBF
    grass.mapcalc("$x = $y", x = mrvbf, y=MRVBF[L])

    if mrrtf != '':
        grass.mapcalc("$x = $y", x = mrrtf, y=MRRTF[L])
#!/usr/bin/env python

import ImageProcessing
import grass.script as gscript
from grass.pygrass.modules.shortcuts import general as g

folder = 'C:\\Documents\\My_works\\Forest_and_rubber_plantation_monitoring\\Cloud shadow snow detection\\Cutting_qqq'
channels = ['B3', 'B5', 'B6', 'BQA']

rasters = gscript.parse_command('g.list' ,type='raster').keys()
g.remove(type='raster', name=rasters, flags='fb')

rasters = ImageProcessing.loadCollection(folder=folder, channels=channels)

ImageProcessing.TOAR(rasters, ['B3','B5','B6'])

T = ImageProcessing.TMaskAlgorithm(images=rasters)

print(T)
print('OK')

  'Neotropical_Hansen_treecoverlossperyear_wgs84_2017@PERMANENT < ' + str(years[i]) + ', 0, Neotropic_Hansen_percenttreecoverd_2000_wgs84@PERMANENT)'
  r.mapcalc(expr, overwrite = True)
  
  # thresholds for binary values of natural vegetation
  thresholds = [70, 80, 90]
    
  # loop to cut for each one and account for deforestation
  for tr in thresholds:
    
    # Hansen bin
    r.mapcalc(comm_code[i]+'_treecover_GFW_2000_deforestation_threshold'+str(tr)+'_binary = if('+comm_code[i]+'_treecover_GFW_2000_deforestation > '+str(tr)+', 1, 0)', 
      overwrite = True)
         
  # remove mask and vector_cat to avoid problems
  r.mask(flags = 'r')
  g.remove(type = 'vector', name = 'vector_cat', flags = 'f')


#---------------------------------------
# exporting all output

# output folder
pa = r'D:\bernardo\00_academico\01_artigos\ms_Lucas_world_landscape_metrics\maps'
os.chdir(pa)

# list maps
list_maps = grass.list_grouped(type = 'raster', pattern = 'com_*')[mapset_name]

# export
for i in list_maps:
  
Ejemplo n.º 25
0
def euclidean_distance_fields(prefix, region, overwrite=False):
    """
    Generate euclidean distance fields from map corner and centre coordinates

    Parameters
    ----------
    prefix : str
        Name to use as prefix to save distance maps

    region : grass.pygrass.gis.region.Region
        Region

    overwrite : bool
        Whether to overwrite existing maps
    """

    point_topleft = Point(region.west + region.ewres / 2,
                          region.north - region.nsres / 2)
    point_topright = Point(region.east - region.ewres / 2,
                           region.north - region.nsres / 2)
    point_lowerleft = Point(region.west + region.ewres / 2,
                            region.south + region.nsres / 2)
    point_lowerright = Point(region.east - region.ewres / 2,
                             region.south + region.nsres / 2)
    point_centre = Point(
        region.west + (region.east - region.west) / 2,
        region.south + (region.north - region.south) / 2,
    )

    points = {
        "topleft": point_topleft,
        "topright": point_topright,
        "lowerleft": point_lowerleft,
        "lowerright": point_lowerright,
        "centre": point_centre,
    }

    for name, p in points.items():

        point_name = "_".join([prefix, name])

        vect = VectorTopo(name=point_name)
        vect.open(
            mode="w",
            tab_name=point_name,
            tab_cols=[("cat", "INTEGER PRIMARY KEY"), ("name", "TEXT")],
        )
        vect.write(p, ("point", ))
        vect.table.conn.commit()
        vect.close()

        gvect.to_rast(
            input=point_name,
            type="point",
            use="val",
            output=point_name,
            overwrite=overwrite,
        )
        grast.grow_distance(point_name,
                            distance="distance_to_" + point_name,
                            overwrite=overwrite)

        g.remove(name=point_name, type="raster", flags="f")
        g.remove(name=point_name, type="raster", flags="f")
water_maps = [i for i in water_maps if 'dist' not in i]

for i in water_maps:

    print i

    g.region(raster=i, flags='p')
    if 'Drainage' in i:
        r.mapcalc(i + '_1null = if(' + i + ' == 1, 1, null())', overwrite=True)
    else:
        r.mapcalc(i + '_1null = if(' + i + ' == 2, 1, null())', overwrite=True)

    r.grow_distance(input=i + '_1null',
                    distance=i + '_water_dist_m',
                    overwrite=True)
    g.remove(type='raster', name=i + '_1null', flags='f')

#------
# export

output_folder = r'E:\_neojaguardatabase\Buffer70_zones\variables_30m'

# Outpt folder
os.chdir(output_folder)

for i in regions:

    os.chdir(output_folder)

    ind = i.split('buffer')[1]
    buffer_dir = 'ind' + ind
Ejemplo n.º 27
0
def main():
    r_elevation = options["elevation"]
    mrvbf = options["mrvbf"].split("@")[0]
    mrrtf = options["mrrtf"].split("@")[0]
    t_slope = float(options["t_slope"])
    t_pctl_v = float(options["t_pctl_v"])
    t_pctl_r = float(options["t_pctl_r"])
    t_vf = float(options["t_rf"])
    t_rf = float(options["t_rf"])
    p_slope = float(options["p_slope"])
    p_pctl = float(options["p_pctl"])
    moving_window_square = flags["s"]
    min_cells = int(options["min_cells"])

    global current_region, TMP_RAST, L
    TMP_RAST = {}
    current_region = Region()

    # some checks
    if (t_slope <= 0 or t_pctl_v <= 0 or t_pctl_r <= 0 or t_vf <= 0
            or t_rf <= 0 or p_slope <= 0 or p_pctl <= 0):
        gs.fatal("Parameter values cannot be <= 0")

    if min_cells < 2:
        gs.fatal(
            "Minimum number of cells in generalized DEM cannot be less than 2")

    if min_cells > current_region.cells:
        gs.fatal(
            "Minimum number of cells in the generalized DEM cannot exceed the ungeneralized number of cells"
        )

    # calculate the number of levels
    levels = 2
    remaining_cells = current_region.cells
    while remaining_cells >= min_cells:
        levels += 1
        g.region(nsres=Region().nsres * 3, ewres=Region().ewres * 3)
        remaining_cells = Region().cells
    current_region.write()

    if levels < 3:
        gs.fatal(
            "MRVBF algorithm requires a greater level of generalization. Reduce number of min_cells or use a larger computational region."
        )

    gs.message("Parameter Settings")
    gs.message("------------------")
    gs.message("min_cells = %d will result in %d generalization steps" %
               (min_cells, levels))

    # intermediate outputs
    Xres_step = list()
    Yres_step = list()

    DEM = list()
    SLOPE = list()
    F = list()
    PCTL = list()
    PVF = list()
    PVF_RF = list()
    VF = list()
    VF_RF = list()
    MRVBF = list()
    MRRTF = list()

    # step 1 at base resolution -------------------------------------------------------
    L = 0
    TMP_RAST[L] = list()
    Xres_step.append(current_region.ewres)
    Yres_step.append(current_region.nsres)
    DEM.append(r_elevation)
    radius = 3

    step_message(L, Xres_step[L], Yres_step[L], current_region.cells, t_slope)

    # calculation of slope (S1) and calculation of flatness (F1) (equation 2)
    SLOPE.append(calc_slope(DEM[L]))
    F.append(flatness(SLOPE[L], t_slope, p_slope))

    # calculation of elevation percentile PCTL for step 1
    PCTL.append(elevation_percentile(DEM[L], radius, moving_window_square))

    # transform elevation percentile to local lowness for step 1 (equation 3)
    PVF.append(prelim_flatness_valleys(F[L], PCTL[L], t_pctl_v, p_pctl))

    if mrrtf != "":
        PVF_RF.append(prelim_flatness_ridges(F[L], PCTL[L], t_pctl_r, p_pctl))

    # calculation of the valley flatness step 1 VF1 (equation 4)
    VF.append(valley_flatness(PVF[L], t_vf, p_slope))
    MRVBF.append(None)

    if mrrtf != "":
        VF_RF.append(valley_flatness(PVF_RF[L], t_rf, p_slope))
        MRRTF.append(None)

    # step 2 at base scale resolution -------------------------------------------------
    L = 1
    TMP_RAST[L] = list()
    Xres_step.append(current_region.ewres)
    Yres_step.append(current_region.nsres)
    DEM.append(r_elevation)
    t_slope /= 2.0
    radius = 6

    step_message(L, Xres_step[L], Yres_step[L], current_region.cells, t_slope)

    # calculation of flatness for step 2 (equation 5)
    SLOPE.append(SLOPE[L - 1])
    F.append(flatness(SLOPE[L], t_slope, p_slope))

    # calculation of elevation percentile PCTL for step 2 (radius of 6 cells)
    PCTL.append(elevation_percentile(r_elevation, radius,
                                     moving_window_square))

    # PVF for step 2 (equation 6)
    PVF.append(prelim_flatness_valleys(F[L], PCTL[L], t_pctl_v, p_pctl))
    if mrrtf != "":
        PVF_RF.append(prelim_flatness_ridges(F[L], PCTL[L], t_pctl_r, p_pctl))

    # calculation of the valley flatness VF for step 2 (equation 7)
    VF.append(valley_flatness(PVF[L], t_vf, p_slope))
    if mrrtf != "":
        VF_RF.append(valley_flatness(PVF_RF[L], t_rf, p_slope))

    # calculation of MRVBF for step 2
    MRVBF.append(calc_mrvbf(VF1=VF[L - 1], VF2=VF[L], t=t_pctl_v))
    if mrrtf != "":
        MRRTF.append(calc_mrvbf(VF1=VF_RF[L - 1], VF2=VF_RF[L], t=t_pctl_r))

    # update flatness for step 2 with combined flatness from F1 and F2 (equation 10)
    F[L] = combined_flatness(F[L - 1], F[L])

    # remaining steps -----------------------------------------------------------------
    # for steps >= 2, each step uses the smoothing radius of the current step
    # but at the dem resolution of the previous step
    remaining_cells = current_region.cells

    while remaining_cells >= min_cells:
        L += 1
        TMP_RAST[L] = list()
        t_slope /= 2.0
        Xres_step.append(Xres_step[L - 1] * 3)
        Yres_step.append(Yres_step[L - 1] * 3)
        radius = 6

        # delete temporary maps from L-2
        for tmap in TMP_RAST[L - 2]:
            if len(gs.find_file(tmap)["fullname"]) > 0:
                g.remove(type="raster", name=tmap, flags="f", quiet=True)

        # coarsen resolution to resolution of previous step (step L-1) and smooth DEM
        if L > 2:
            g.region(ewres=Xres_step[L - 1], nsres=Yres_step[L - 1])

        step_message(L, Xres_step[L], Yres_step[L], remaining_cells, t_slope)
        DEM.append(smooth_dem(DEM[L - 1]))

        # calculate slope at coarser resolution
        SLOPE.append(calc_slope(DEM[L]))

        # refine slope back to base resolution
        if L > 2:
            SLOPE[L] = refine(SLOPE[L], current_region, method="bilinear")

        # coarsen resolution to current step L and calculate PCTL
        g.region(ewres=Xres_step[L], nsres=Yres_step[L])
        remaining_cells = Region().cells
        DEM[L] = refine(DEM[L], Region(), method="average")
        PCTL.append(elevation_percentile(DEM[L], radius, moving_window_square))

        # refine PCTL to base resolution
        PCTL[L] = refine(PCTL[L], current_region, method="bilinear")

        # calculate flatness F at the base resolution
        F.append(flatness(SLOPE[L], t_slope, p_slope))

        # update flatness with combined flatness CF from the previous step
        F[L] = combined_flatness(F1=F[L - 1], F2=F[L])

        # calculate preliminary valley flatness index PVF at the base resolution
        PVF.append(prelim_flatness_valleys(F[L], PCTL[L], t_pctl_v, p_pctl))
        if mrrtf != "":
            PVF_RF.append(
                prelim_flatness_ridges(F[L], PCTL[L], t_pctl_r, p_pctl))

        # calculate valley flatness index VF
        VF.append(valley_flatness(PVF[L], t_vf, p_slope))
        if mrrtf != "":
            VF_RF.append(valley_flatness(PVF_RF[L], t_rf, p_slope))

        # calculation of MRVBF
        MRVBF.append(calc_mrvbf(VF1=MRVBF[L - 1], VF2=VF[L], t=t_pctl_v))
        if mrrtf != "":
            MRRTF.append(calc_mrvbf(VF1=MRRTF[L - 1], VF2=VF_RF[L],
                                    t=t_pctl_r))

    # output final MRVBF --------------------------------------------------------------
    current_region.write()
    gs.mapcalc("$x = $y", x=mrvbf, y=MRVBF[L])

    if mrrtf != "":
        gs.mapcalc("$x = $y", x=mrrtf, y=MRRTF[L])
Ejemplo n.º 28
0
def cleanup():
    for ras in RAST_REMOVE:
        gg.remove(flags="f", type="raster", name=ras, quiet=True)
Ejemplo n.º 29
0
def main():
    r_elevation = options['elevation']
    mrvbf = options['mrvbf'].split('@')[0]
    mrrtf = options['mrrtf'].split('@')[0]
    t_slope = float(options['t_slope'])
    t_pctl_v = float(options['t_pctl_v'])
    t_pctl_r = float(options['t_pctl_r'])
    t_vf = float(options['t_rf'])
    t_rf = float(options['t_rf'])
    p_slope = float(options['p_slope'])
    p_pctl = float(options['p_pctl'])
    moving_window_square = flags['s']
    min_cells = int(options['min_cells'])

    global current_region
    global TMP_RAST
    TMP_RAST = {}
    current_region = Region()

    # Some checks
    if t_slope <= 0 or t_pctl_v <= 0 or t_pctl_r <= 0 or t_vf <= 0 or t_rf <= 0 or \
        p_slope <= 0 or p_pctl <= 0:
        grass.fatal('Parameter values cannot be <= 0')

    if min_cells < 1:
        grass.fatal('Minimum number of cells in generalized DEM cannot be less than 1')

    if min_cells > current_region.cells:
        grass.fatal('Minimum number of cells in the generalized DEM cannot exceed the ungeneralized number of cells')

    ###########################################################################
    # Calculate number of levels

    levels = math.ceil(-math.log(float(min_cells)/current_region.cells) / math.log(3) - 2)
    levels = int(levels)

    if levels < 3:
        grass.fatal('MRVBF algorithm requires a greater level of generalization. Reduce number of min_cells')

    grass.message('Parameter Settings')
    grass.message('------------------')
    grass.message('min_cells = %d will result in %d generalization steps' % (min_cells, levels))

    TMP_RAST = {k: [] for k in range(levels)}

    ###########################################################################
    # Intermediate outputs
    Xres_step, Yres_step, DEM = [], [], []
    slope, F, PCTL, PVF, PVF_RF = [0]*levels, [0]*levels, [0]*levels, [0]*levels, [0]*levels
    VF, VF_RF, MRVBF, MRRTF = [0]*levels, [0]*levels, [0]*levels, [0]*levels

    ###########################################################################
    # Step 1 (L=0)
    # Base scale resolution
    L = 0
    Xres_step.append(current_region.ewres)
    Yres_step.append(current_region.nsres)
    DEM.append(r_elevation)
    radi = 3

    g.message(os.linesep)
    g.message("Step {L}".format(L=L+1))
    g.message("------")

    # Calculation of slope (S1) and calculation of flatness (F1) (Equation 2)
    grass.message("Calculation of slope and transformation to flatness F{L}...".format(L=L+1))
    slope[L] = get_slope(L, DEM[L])
    F[L] = get_flatness(L, slope[L], t_slope, p_slope)

    # Calculation of elevation percentile PCTL for step 1
    grass.message("Calculation of elevation percentile PCTL{L}...".format(L=L+1))
    PCTL[L] = get_percentile(L, DEM[L], radi, moving_window_square)

    # Transform elevation percentile to local lowness for step 1 (Equation 3)
    grass.message("Calculation of preliminary valley flatness index PVF{L}...".format(L=L+1))
    PVF[L] = get_prelim_flatness(L, F[L], PCTL[L], t_pctl_v, p_pctl)
    if mrrtf != '':
        grass.message("Calculation of preliminary ridge top flatness index PRF{L}...".format(L=L+1))
        PVF_RF[L] = get_prelim_flatness_rf(L, F[L], PCTL[L], t_pctl_r, p_pctl)

    # Calculation of the valley flatness step 1 VF1 (Equation 4)
    grass.message("Calculation of valley flatness VF{L}...".format(L=L+1))
    VF[L] = get_valley_flatness(L, PVF[L], t_vf, p_slope)
    if mrrtf != '':
        grass.message("Calculation of ridge top flatness RF{L}...".format(L=L+1))
        VF_RF[L] = get_valley_flatness(L, PVF_RF[L], t_rf, p_slope)

    ##################################################################################
    # Step 2 (L=1)
    # Base scale resolution
    L = 1
    Xres_step.append(current_region.ewres)
    Yres_step.append(current_region.nsres)
    DEM.append(r_elevation)
    t_slope /= 2.0
    radi = 6

    grass.message(os.linesep)
    grass.message("Step {L}".format(L=L+1))
    grass.message("------")

    # Calculation of flatness for step 2 (Equation 5)
    # The second step commences the same way with the original DEM at its base resolution,
    # using a slope threshold ts,2 half of ts,1:
    grass.message("Calculation of flatness F{L}...".format(L=L+1))
    F[L] = get_flatness(L, slope[L-1], t_slope, p_slope)

    # Calculation of elevation percentile PCTL for step 2 (radius of 6 cells)
    grass.message("Calculation of elevation percentile PCTL{L}...".format(L=L+1))
    PCTL[L] = get_percentile(L, r_elevation, radi, moving_window_square)

    # PVF for step 2 (Equation 6)
    grass.message("Calculation of preliminary valley flatness index PVF{L}...".format(L=L+1))
    PVF[L] = get_prelim_flatness(L, F[L], PCTL[L], t_pctl_v, p_pctl)
    if mrrtf != '':
        grass.message("Calculation of preliminary ridge top flatness index PRF{L}...".format(L=L+1))
        PVF_RF[L] = get_prelim_flatness_rf(L, F[L], PCTL[L], t_pctl_r, p_pctl)
    
    # Calculation of the valley flatness VF for step 2 (Equation 7)
    grass.message("Calculation of valley flatness VF{L}...".format(L=L+1))
    VF[L] = get_valley_flatness(L, PVF[L], t_vf, p_slope)
    if mrrtf != '':
        grass.message("Calculation of ridge top flatness RF{L}...".format(L=L+1))
        VF_RF[L] = get_valley_flatness(L, PVF_RF[L], t_rf, p_slope)
            
    # Calculation of MRVBF for step 2
    grass.message("Calculation of MRVBF{L}...".format(L=L+1))
    MRVBF[L] = get_mrvbf(L, VF_Lminus1=VF[L-1], VF_L=VF[L], t=t_pctl_v)
    if mrrtf != '':
        grass.message("Calculation of MRRTF{L}...".format(L=L+1))
        MRRTF[L] = get_mrvbf(L, VF_Lminus1=VF_RF[L-1], VF_L=VF_RF[L], t=t_pctl_r)

    # Update flatness for step 2 with combined flatness from F1 and F2 (Equation 10)
    grass.message("Calculation  of combined flatness index CF{L}...".format(L=L+1))
    F[L] = get_combined_flatness(L, F[L-1], F[L])

    ##################################################################################
    # Remaining steps
    # DEM_1_1 refers to scale (smoothing) and resolution (cell size)
    # so that DEM_L1_L-1 refers to smoothing of current step,
    # but resolution of previous step 

    for L in range(2, levels):

        t_slope /= 2.0
        Xres_step.append(Xres_step[L-1] * 3)
        Yres_step.append(Yres_step[L-1] * 3)
        radi = 6

        # delete temporary maps from L-2
        for tmap in TMP_RAST[L-2]:
            g.remove(type='raster', name=tmap, flags='f', quiet=True)

        grass.message(os.linesep)
        grass.message("Step {L}".format(L=L+1))
        grass.message("------")
        
        # Coarsen resolution to resolution of prevous step (step L-1) and smooth DEM
        if L >= 3:
            grass.run_command('g.region', ewres = Xres_step[L-1], nsres = Yres_step[L-1])
            grass.message('Coarsening resolution to ew_res={e} and ns_res={n}...'.format(
                e=Xres_step[L-1], n=Yres_step[L-1]))
        
        grass.message("DEM smoothing 11 x 11 windows with Gaussian smoothing kernel (sigma) 3...")
        DEM.append(get_smoothed_dem(L, DEM[L-1]))

        # Calculate slope
        grass.message("Calculation of slope...")
        slope[L] = get_slope(L, DEM[L])

        # Refine slope to base resolution
        if L >= 3:
            grass.message('Resampling slope back to base resolution...')
            slope[L] = refine(L, slope[L], current_region, method='bilinear')

        # Coarsen resolution to current step L and calculate PCTL
        grass.run_command('g.region', ewres=Xres_step[L], nsres=Yres_step[L])
        DEM[L] = refine(L, DEM[L], Region(), method = 'average')
        grass.message("Calculation of elevation percentile PCTL{L}...".format(L=L+1))
        PCTL[L] = get_percentile(L, DEM[L], radi, moving_window_square)
        
        # Refine PCTL to base resolution
        grass.message("Resampling PCTL{L} to base resolution...".format(L=L+1))
        PCTL[L] = refine(L, PCTL[L], current_region, method='bilinear')

        # Calculate flatness F at the base resolution
        grass.message("Calculate F{L} at base resolution...".format(L=L+1))
        F[L] = get_flatness(L, slope[L], t_slope, p_slope)

        # Update flatness with combined flatness CF from the previous step
        grass.message("Calculate combined flatness CF{L} at base resolution...".format(L=L+1))
        F[L] = get_combined_flatness(L, F1=F[L-1], F2=F[L])

        # Calculate preliminary valley flatness index PVF at the base resolution
        grass.message("Calculate preliminary valley flatness index PVF{L} at base resolution...".format(L=L+1))
        PVF[L] = get_prelim_flatness(L, F[L], PCTL[L], t_pctl_v, p_pctl)
        if mrrtf != '':
            grass.message("Calculate preliminary ridge top flatness index PRF{L} at base resolution...".format(L=L+1))
            PVF_RF[L] = get_prelim_flatness_rf(L, F[L], PCTL[L], t_pctl_r, p_pctl)
        
        # Calculate valley flatness index VF
        grass.message("Calculate valley flatness index VF{L} at base resolution...".format(L=L+1))
        VF[L] = get_valley_flatness(L, PVF[L], t_vf, p_slope)
        if mrrtf != '':
            grass.message("Calculate ridge top flatness index RF{L} at base resolution...".format(L=L+1))
            VF_RF[L] = get_valley_flatness(L, PVF_RF[L], t_rf, p_slope)
            
        # Calculation of MRVBF
        grass.message("Calculation of MRVBF{L}...".format(L=L+1))
        MRVBF[L] = get_mrvbf(L, VF_Lminus1=MRVBF[L-1], VF_L=VF[L], t=t_pctl_v)
        if mrrtf != '':
            grass.message("Calculation of MRRTF{L}...".format(L=L+1))
            MRRTF[L] = get_mrvbf(L, VF_Lminus1=MRRTF[L-1], VF_L=VF_RF[L], t=t_pctl_r)

    # Output final MRVBF
    grass.mapcalc("$x = $y", x = mrvbf, y=MRVBF[L])

    if mrrtf != '':
        grass.mapcalc("$x = $y", x = mrrtf, y=MRRTF[L])
Ejemplo n.º 30
0
                    New_Qa[arc] = Qa[arc] * Dir[arc] + Dels[id] - Dels[int(
                        not id)]
                elif arc == 'DB':
                    New_Qa[arc] = Qa[arc] * Dir[arc] + Dels[id] - Dels[int(
                        not id)]
                else:
                    New_Qa[arc] = Qa[arc] * Dir[arc] + Dels[id]
        Generate(New_Qa)
    else:
        print("\n\n")
        print("Number of Iterations: %s\n" % Iterations)
        pprint(Qa)


# Cleaning the Workspace
g.remove(type='vector', pattern='*', flags=['f'])

# Importing the ShapeFiles to GRASS
for lr in Layers:
    v.in_ogr(input=Join('Test1', lr), output=lr, key='id', flags=O)

# Adding columns `color` and `size` to layers `Buildings` and `TankPoints`
# such that it can be used for artistic map layout.
v.db_addcolumn(map='Buildings', columns='color varchar(10)')
v.db_addcolumn(map='TankPoints', columns='size double precision')
for id, color in enumerate(Colors, start=1):
    whr = 'id like %s' % id
    v.db_update(map='Buildings', column='color', where=whr, value=color)
v.db_update(map='TankPoints', column='size', value=10)
v.db_update(map='TankPoints', column='size', value=20, where='id like 1')