Beispiel #1
0
def run_mapcalc3d(expr):
    """Helper function to run r3.mapcalc in parallel"""
    try:
        gscript.run_command("r3.mapcalc", expression=expr,
                            overwrite=gscript.overwrite(), quiet=True)
    except CalledModuleError:
        exit(1)
def create_db(driver, database):
    subst_database = substitute_db(database)
    if driver == 'dbf':
        path = subst_database
        # check if destination directory exists
        if not os.path.isdir(path):
	    # create dbf database
            os.makedirs(path)
	    return True
        return False
    
    if driver == 'sqlite':
        path = os.path.dirname(subst_database)
        # check if destination directory exists
        if not os.path.isdir(path):
            os.makedirs(path)
    
    if subst_database in grass.read_command('db.databases', quiet = True,
                                      driver = driver).splitlines():
        return False

    grass.info(_("Target database doesn't exist, "
                 "creating a new database using <%s> driver...") % driver)
    try:
        grass.run_command('db.createdb', driver = driver,
                          database = subst_database)
    except CalledModuleError:
        grass.fatal(_("Unable to create database <%s> by driver <%s>") % \
                        (subst_database, driver))
        
    return False
Beispiel #3
0
def convert_map(output, variable):
    """Convert imported raster map unit and format."""

    # prepare for unit conversion
    if flags['c'] and variable in ['tmin', 'tmax', 'tmean']:
        grass.message("Converting {} to degree Celcius...".format(output))
        a = 0.1
        b = 0
    elif flags['k'] and variable in ['tmin', 'tmax', 'tmean']:
        grass.message("Converting {} to Kelvin...".format(output))
        a = 0.1
        b = 273.15
    elif flags['y'] and variable == 'prec':
        grass.message("Converting {} to meter per year...".format(output))
        a = 0.012
        b = 0
    elif flags['f']:
        grass.message("Converting {} to floating-point...".format(output))
        a = 1
        b = 0
    else:
        a = None
        b = None

    # convert unit and format
    if a or b:
        grass.use_temp_region()
        grass.run_command('g.region', rast=output)
        grass.mapcalc('$output=float($output*$a+$b)', a=a, b=b, output=output,
                      overwrite=True)
        grass.del_temp_region()
def import_data(directory, file_filter):
    # collect files to be imported
    files = []
    for f in os.listdir(directory):
        if f.endswith(".tif") and [x for x in file_filter if x in f]:
            files.append(f)

    # import selected files into GRASS
    count = len(files)
    i = 0
    for f in files:
        i += 1
        grass.message("Importing <{0}> ({1}/{2})...".format(f, i, count))
        grass.percent(i, count, 2)
        map_name = os.path.splitext(f)[0]
        ###imodule = 'r.external' # ~1sec
        imodule = 'r.in.gdal' # ~4sec
        grass.run_command(imodule, input=os.path.join(directory, f),
                          output=map_name, quiet=True, overwrite=True)
        # set color table for cfmask map
        if 'cfmask' in map_name:
            # 0 clear
            # 1 water
            # 2 shadow
            # 3 snow
            # 4 cloud
            colors = """0 black
1 blue
2 grey
3 white
4 149 186 224"""
            Module('r.colors', map=map_name, rules='-', quiet=True, stdin_=colors)
Beispiel #5
0
def cleanup():
    if tmp:
        grass.run_command('g.remove', flags='f', type='raster',
                          name=tmp, quiet=True)
    if tmp_hull:
        grass.run_command('g.remove', flags='f', type='vector',
                          name=tmp_hull, quiet=True)
Beispiel #6
0
    def execute(self):
        # execute
        if self.s:
            self.print_statistics()
            sys.exit()

        # carve streams
        if 'streamcarve' in self.options:
            gm('Carving streams...')
            self.carve_streams()

        # process DEM if need be
        if not self.d:
            self.process_DEM()

        # always process
        self.snap_stations()
        self.make_catchments()
        self.make_subbasins()
        self.postprocess_catchments()
        self.postprocess_subbasins()
        self.write_stations_snapped()
        self.print_statistics()

        # clean
        if not self.k:
            grass.run_command('g.remove', type='raster,vector', pattern='*__*',
                              flags='fb', quiet=True)
        return
Beispiel #7
0
def import_file(filename, archive, output, region):
    """Extracts one binary file from its archive and import it."""

    # open the archive
    with ZipFile(archive, 'r') as a:

        # create temporary file and directory
        tempdir = grass.tempdir()
        tempfile = os.path.join(tempdir, filename)

        # try to inflate and import the layer

        if os.path.isfile(archive):
            try:
                grass.message("Inflating {} ...".format(filename))
                a.extract(filename, tempdir)
                grass.message("Importing {} as {} ..."
                              .format(filename, output))
                grass.run_command('r.in.bin',  flags='s', input=tempfile,
                                  output=output, bytes=2, anull=-9999,
                                  **region)

            # if file is not present in the archive
            except KeyError:
                grass.warning("Could not find {} in {}. Skipping"
                              .format(filename, archive))

            # make sure temporary files are cleaned
            finally:
                grass.try_remove(tempfile)
                grass.try_rmdir(tempdir)
        else:
            grass.warning("Could not find file {}. Skipping"
                          .format(archive))
Beispiel #8
0
    def __del__(self):
        # removes temporary mask, used for import transparent or warped temp_map
        if self.cleanup_mask:
            # clear temporary mask, which was set by module
            if grass.run_command("r.mask", quiet=True, flags="r") != 0:
                grass.fatal(_("%s failed") % "r.mask")

            # restore original mask, if exists
            if grass.find_file(self.opt_output + self.original_mask_suffix, element="cell", mapset=".")["name"]:
                if (
                    grass.run_command("g.copy", quiet=True, rast=self.opt_output + self.original_mask_suffix + ",MASK")
                    != 0
                ):
                    grass.fatal(_("%s failed") % "g.copy")

        # remove temporary created rasters
        if self.cleanup_layers:
            maps = []
            for suffix in (".red", ".green", ".blue", ".alpha", self.original_mask_suffix):
                rast = self.opt_output + suffix
                if grass.find_file(rast, element="cell", mapset=".")["file"]:
                    maps.append(rast)

            if maps:
                grass.run_command("g.remove", quiet=True, flags="fb", type="rast", pattern=",".join(maps))

        # delete environmental variable which overrides region
        if "GRASS_REGION" in os.environ.keys():
            os.environ.pop("GRASS_REGION")
Beispiel #9
0
    def sum_one(request, response):
        input = request.inputs["input"]
        # What do we need to assert a Complex input?
        # assert type(input) is text_type

        sys.path.append("/usr/lib/grass64/etc/python/")
        import grass.script as grass

        # Import the raster and set the region
        if grass.run_command("r.in.gdal", flags="o", out="input", input=input) != 0:
            raise NoApplicableCode("Could not import cost map. Please check the WCS service.")

        if grass.run_command("g.region", flags="ap", rast="input") != 0:
            raise NoApplicableCode("Could not set GRASS region.")

        # Add 1
        if grass.mapcalc("$output = $input + $value", output="output", input="input", value=1.0) != 0:
            raise NoApplicableCode("Could not set GRASS region.")

        # Export the result
        out = "./output.tif"
        if grass.run_command("r.out.gdal", input="output", type="Float32", output=out) != 0:
            raise NoApplicableCode("Could not export result from GRASS.")

        response.outputs["output"] = out
        return response
def AddCol(vect,t):
    list_c = []
    list_col = ((grass.read_command("db.describe",table=vect,flags="c",quiet=True)).split("\n"))[2:-1]
    for c in list_col:
        list_c.append((c.split(":")[1]).lstrip())
    if not "%s"%t in list_c:
        grass.run_command("v.db.addcolumn",map=vect,columns="%s double"%t,quiet=True)
Beispiel #11
0
 def process(self):
     grass.run_command(
        self.GRASS_COMMAND,
        overwrite = self.overwrite,
        rast = "%s,%s" % (self.from_rast, self.to_rast)
     )
     
Beispiel #12
0
def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs):
    for row in rows:
        name = row["name"]
        start = row["start_time"]
        end = row["end_time"]
        layer = row["layer"]
        if not layer:
            layer = 1
        if not end:
            end = start
        string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
        # Write the filename, the start_time and the end_time
        list_file.write(string)
        # Export the vector map with v.out.ogr
        try:
            gscript.run_command("v.out.ogr", input=name, output=(name + ".xml"),
                                layer=layer, format="GML")
        except CalledModuleError:
            shutil.rmtree(new_cwd)
            tar.close()
            gscript.fatal(_("Unable to export vector map <%s> as "
                            "GML with v.out.ogr" % name))

        tar.add(name + ".xml")
        tar.add(name + ".xsd")
Beispiel #13
0
def _export_vector_maps(rows, tar, list_file, new_cwd, fs):
    for row in rows:
        name = row["name"]
        start = row["start_time"]
        end = row["end_time"]
        layer = row["layer"]

        # Export unique maps only
        if name in exported_maps:
            continue

        if not layer:
            layer = 1
        if not end:
            end = start
        string = "%s:%s%s%s%s%s\n" % (name, layer, fs, start, fs, end)
        # Write the filename, the start_time and the end_time
        list_file.write(string)
        # Export the vector map with v.pack
        try:
            gscript.run_command("v.pack", input=name, flags="c")
        except CalledModuleError:
            shutil.rmtree(new_cwd)
            tar.close()
            gscript.fatal(_("Unable to export vector map <%s> with v.pack" %
                          name))

        tar.add(name + ".pack")

        exported_maps[name] = name
Beispiel #14
0
def fsegm(pa):

 ### mp.current_process().cnt += 1
 current = multiprocessing.current_process()
 mn = current._identity[0]
 print 'running:', mn

 GISBASE = os.environ['GISBASE'] = "/home/majavie/hierba_hanks/grass-7.1.svn"
 GRASSDBASE = "/home/majavie/hanksgrass7"
 MYLOC = "global_MW"
 mapset = 'm'
 sys.path.append(os.path.join(os.environ['GISBASE'], "etc", "python"))
 import grass.script as grass
 import grass.script.setup as gsetup
 gsetup.init(GISBASE, GRASSDBASE, MYLOC, mapset)

 mapset2 = 'm'+str(mn)#ehabitat'
 os.system ('rm -rf /home/majavie/hanksgrass7/global_MW/'+mapset2)
 grass.run_command('g.mapset',mapset=mapset2,location='global_MW',gisdbase='/home/majavie/hanksgrass7',flags='c')

 gsetup.init(GISBASE, GRASSDBASE, MYLOC, mapset2)
 print mapset2, grass.gisenv()
 print pa
 grass.run_command('g.mapsets',mapset='ehabitat,rasterized_parks',operation='add')
 grass. message ("Deleting tmp layers")
 os.system ('rm -rf /home/majavie/hanksgrass7/global_MW/'+mapset2)
 gc.collect()
Beispiel #15
0
def test_vector_dataset():
    
    # Create a test map
    grass.run_command("v.random", output="test", n=20, column="height", zmin=0, \
                      zmax=100, flags="z", overwrite = True)
    
    name = "test"
    mapset =  grass.gisenv()["MAPSET"]
    
    print "Create a vector object"

    # We need to specify the name and the mapset as identifier
    vds = vector_dataset(name + "@" + mapset)
    
    # Load data from the raster map in the mapset
    vds.load()
    
    print "Is in db: ", vds.is_in_db()
    
    if vds.is_in_db():      
        # Remove the entry if it is in the db
        vds.delete()
    
    # Set the absolute valid time
    vds.set_absolute_time(start_time= datetime(year=2000, month=1, day=1), \
                            end_time= datetime(year=2010, month=1, day=1))
    # Insert the map data into the SQL database
    vds.insert()
    # Print self info
    vds.print_self()
    # The temporal relation must be equal
    print vds.temporal_relation(vds)
Beispiel #16
0
 def calcWatershed(self,inRast,inThresh,inDA,subId='',overwrt=False):
     '''
         Run r.watershed GRASS function.
         INPUT: inRast (input raster of elevation)
                inThres (threshold value (#cells) for basin)
                inDA (drainage area based on inThres)
         OUTPUT: wshedDict (dictionary of elev, drain, basin, stream, flow accumulation rasters and threshold value)
     '''
     outRastBase=inRast
     if len(subId)>0:
         outRastBase=inRast.replace('cnty20',subId)
         
     rDrain='%s.drain%s' % (outRastBase,inDA) 
     rBasin='%s.basin%s' % (outRastBase,inDA)
     rStream='%s.strms%s' % (outRastBase,inDA)
     rAccum='%s.accum%s' % (outRastBase,inDA) 
     rThresh=inThresh
     grass.run_command(self.__rWatershed,'m',overwrite=overwrt,elevation=inRast, drain=rDrain, basin=rBasin, stream=rStream, accumulation=rAccum,thres=rThresh,memory=1000)
     wshedDict={}
     wshedDict['elev']=inRast
     wshedDict['drain']=rDrain
     wshedDict['basin']=rBasin
     wshedDict['stream']=rStream
     wshedDict['flAccum']=rAccum
     wshedDict['thres']=rThresh
     
     return wshedDict
Beispiel #17
0
 def calcWShedBasin(self,inRast,inThresh,inDA,overwrt=False):
     '''
         Run r.watershed GRASS function.
         INPUT: inRast (input raster of elevation)
                inThres (threshold value (#cells) for basin)
                inDA (drainage area based on inThres)
         OUTPUT: wshedDict (dictionary of elev, basin, rasters and threshold value)
     '''
     #get the resolution
     rastRes=self.getRasterRes(inRast)
     if rastRes=='20':
         #set the name with the mask
         rastMask=self.getMask()
         suffix=(rastMask.split('.'))[2] # ex) upneuse.basin10.6 -> 6
         rBasin='%s.basin%s.%s' % (inRast,inDA,suffix)
         rStream='%s.strms%s.%s' % (inRast,inDA,suffix)
         rDrain='%s.drain%s.%s' % (inRast,inDA,suffix)
         rAccum='%s.flwaccum%s.%s' % (inRast,inDA,suffix)
     else: 
         rBasin='%s.basin%s' % (inRast,inDA)
         rStream='%s.strms%s' % (inRast,inDA)
         rDrain='%s.drain%s' % (inRast,inDA)
         rAccum='%s.flwaccum%s' % (inRast,inDA)
     rThresh=inThresh
     grass.run_command(self.__rWatershed,'m',overwrite=overwrt, elev=inRast, basin=rBasin, drain=rDrain,accumulation=rAccum,stream=rStream, thres=rThresh,memory=1000)
     wshedDict={}
     wshedDict['elev']=inRast
     wshedDict['basin']=rBasin
     wshedDict['stream']=rStream
     wshedDict['drain']=rDrain
     wshedDict['flAccum']=rAccum
     wshedDict['thres']=rThresh
     return wshedDict
def createtxtED(mapa):
    pct_edge=0
    grass.run_command('g.region',rast=mapa)
    x=grass.read_command('r.stats',flags='a',input=mapa)
    y=x.split('\n')
    os.chdir(outputfolder)
    nome=mapa.replace("extracByMask_rast_imgbin_eroED_50m_EDGE_FINAL",'')
    
    txtreclass=open(nome+'PCT_EDGE.txt','w')
    txtreclass.write('class'',''COD'',''A_M2'',''PCT\n')
    classe=['Matrix','EDGE','CORE']
    cont_class=0
    #print y
    del y[-1]
    del y[-1]
   # print y
    if y!=0:
        acumula=0
        for i in y:
            split=i.split(' ')
            split=float(split[1]) 
            acumula=acumula+split    
            #print acumula
        for i in y:
            if i !='':
                  
              ##print i
                f=i.split(' ')
                if '*' in f :
                    break
              
                else:
                ##print f
                    ids=f[0]
                    ids=int(ids)
                    ##print ids
                    m2=f[1]
                    m2=float(m2)                
                    pct=m2/acumula*100
                    pct=round(pct,2)                   
                    txtreclass.write(classe[cont_class]+','+`ids`+','+`m2`+','+`pct`+'\n')
                    cont_class=cont_class+1
                # indice de matheron
                
                if ids==1:
                    pct_edge=m2/acumula*100
                    pct_edge=round(pct_edge,2)
                if ids==2:
                    pctflt=m2/acumula*100
                    pctflt=round(pctflt,2)
                    txt_Matheron=open(nome+'_Matheron.txt','w')
                    if pct_edge>0:
                        txt_Matheron.write('Matheron\n')
                        #pct de edge por pct de flt
                        Matheron=pct_edge/pctflt
                        txt_Matheron.write(`Matheron`)
                    txt_Matheron.close()
                        
                    
        txtreclass.close()
Beispiel #19
0
 def createRelief(self,inRast,overWrt=False):
     '''Creates a shaded relief raster from input
     INPUT: rast
     OUTPUT: shaded rastNm'''
     shdRast='%s.shade' % (inRast)
     grass.run_command(self.__rShade,overwrite=overWrt,map=inRast,shadedmap=shdRast)
     return shdRast
Beispiel #20
0
def main(options, flags): 
    if flags['d']:
        grass.run_command('g.mapset', mapset='PERMANENT', quiet=True)
        grass.run_command('g.region', flags='d')

    reg = grass.region()

    cols = int(options['cols'])  # Count of columns in the mapsets
    rows = int(options['rows'])  # Count of rows in the mapsets
    
    # ew = reg['e'] - reg['w']
    dx = cols * reg['ewres']

    ns = reg['n'] - reg['s']
    dy = rows * reg['nsres']

    west = reg['w']
    south = reg['s']

    i = j = 0
    try:
        while west < reg['e']:
            while south < reg['n']:
                mapset_name = "node_%s_%s" % (j, i)
                grass.run_command('g.mapset', mapset=mapset_name, flags='c', quiet=True)
                grass.run_command('g.region', s=south, n=min(reg['n'], south+dy),
                                  w=west, e=min(reg['e'], west+dx), flags='p')
                south += dy
                j += 1
            west += dx
            i += 1
            j = 0
            south = reg['s']
    finally:
        grass.run_command('g.mapset', mapset='PERMANENT')
def txt(mapa,txtname,folder):
    grass.run_command('g.region',rast=mapa)
    os.chdir(r'C:\_data\talitha\Mapas_classificados_final\saidas_grass\saidas_2015_03_d11')
    os.chdir(folder)
    x=grass.read_command('r.stats',flags='a',input=mapa)
    y=x.split('\n')
    
    listapoio=[]
    for i in y:
        if ('*' in i):
            continue
        else:
            listapoio.append(i)
    del listapoio[-1]
    
    fd = open(txtname,'w')
    myCsvRow="cod"",""areaM2" ",""Area_ha\n"
    fd.write(myCsvRow)
    for i in listapoio:
        temp1=i.split(' ')
        cod=int(temp1[0])
    
        aream2=float(temp1[1])
        area_HA=round(aream2/10000,2)+1
        fd.write(`cod`+','+`aream2`+','+`area_HA`+'\n')
    fd.close()
Beispiel #22
0
def main():
    global tmp
    tmp = grass.tempfile()

    vector = options['map']
    layer = options['layer']
    column = options['column']
    where = options['where']
    perc = options['percentile']
    extend = flags['e']
    shellstyle = flags['g']

    
    fi = grass.vector_db(vector, stderr = nuldev)[int(layer)]
    table = fi['table']
    database = fi['database']
    driver = fi['driver']
    
    passflags = None
    if flags['e']:
	passflags = 'e'
    if flags['g']:
	if not passflags:
	    passflags = 'g'
	else:
	    passflags = passflags + 'g'
    
    grass.run_command('db.univar', table = table, column = column, 
                      database = database, driver = driver,
		      perc = perc, where = where, flags = passflags)
Beispiel #23
0
def create_localities(loc):
	"""
	Read and parse the csv file of localities
	The csv file must have exactly 5 folumns:
	"Locality name" (a string), index (integer), code (3-4 letters), X coord, Y coord (floats)
	Create a python list, where each entry is a list of:
	('Locality Name',Locality index,'Locality code, X coord, Y coord)
	Also Create a GRASS vector
	"""
	try:
		csv_file = open(loc,"rb")
	except IOError:
		grass.fatal("Cannot open localities file: "+loc)

	grass.message(" === Reading list of localities ===")
	locs=csv.reader(csv_file, delimiter=',')
	loc_list=[]
	for l in locs:
		# Add to the loc_list a tuple containing the code, X coord and Y coord
		loc_list.append([l[0],l[1],l[2],l[3],l[4]])

	csv_file.close()

	# Make a GRASS vector
	loc_vector = os.path.splitext(loc)[0]
	grass.run_command('v.in.ascii', input=loc, output=loc_vector, fs=",", x=4, y=5, 
			columns="loc_name varchar(32), id integer, code varchar(6), longitude double, latitude double", 
			quiet=True, overwrite=True)
	
	return loc_list, loc_vector
def main():

    # process command options
    input = options['input']
    if not gs.find_file(input)['file']:
        gs.fatal(_("Raster map <%s> not found") % input)

    output = options['output']
    if gs.find_file(output)['file'] and not gs.overwrite():
        gs.fatal(_("Output map <%s> already exists") % output)

    # set aside region for internal use
    gs.use_temp_region()

    # subset input if desired
    region = options.get('region')
    if region:
        if not gs.find_file(region)['file']:
            gs.fatal(_("Raster map <%s> not found") % region)
        gs.message("Setting region to %s" % region, flag='i')
        gs.run_command('g.region', rast=region, align=input)
    else:
        gs.message("Using existing GRASS region", flag='i')
    gs.debug('='*50)
    gs.debug('\n'.join(gs.parse_command('g.region', 'p').keys()))
    gs.debug('='*50)

    calculate_noise(input, output)

    # restore original region
    gs.del_temp_region()

    return None
def txt(mapa,txtname,folder):
    grass.run_command('g.region',rast=mapa)
    os.chdir(r'E:\data_2015\___john\001.Thalita_p2\__Resultados_metricas_parte1')
    os.chdir(folder)
    x=grass.read_command('r.stats',flags='a',input=mapa)
    y=x.split('\n')
    
    listapoio=[]
    for i in y:
        if ('*' in i):
            continue
        else:
            listapoio.append(i)
    del listapoio[-1]
    
    fd = open(txtname,'w')
    myCsvRow="Cod"",""AreaM2" ",""Area_ha\n"
    fd.write(myCsvRow)
    for i in listapoio:
        temp1=i.split(' ')
        cod=int(temp1[0])
        aream2=float(temp1[1])
        area_HA=round(aream2/10000,2)
        fd.write(`cod`+','+`aream2`+','+`area_HA`+'\n')
    fd.close()
def dn_to_reflectance(dir, dict, bands):
    """
    Convert Digital Number values to reflectance for each band in the directory dir
    using the metadata values from dict
    See http://landsat.usgs.gov/Landsat8_Using_Product.php for details
    """
    basedir = os.path.basename(dir)
    cnt = 0
    for b in bands:
        tif_file = basedir+"_B"+str(b)+".TIF"
        tif_path = os.path.join(dir, tif_file)
        grass.message("Working on %s " % tif_path)
        # Import the tile
        rast = basedir.lower()+"_b"+str(b)
        grass.run_command('r.in.gdal', input=tif_path, output=rast, overwrite=True)
        grass.run_command('g.region', rast=rast, flags='p')
        rho = rast+"_reflect"
        # Get metadata values from the dict
        mult = '{:f}'.format(dict["REFLECTANCE_MULT_BAND_"+str(b)])
        add = '{:f}'.format(dict["REFLECTANCE_ADD_BAND_"+str(b)])
        sun = dict["SUN_ELEVATION"]
        zenith = '{:f}'.format(90.0-sun)
        # Prepare mapcalc expression
        expr = rho+" = ("+str(mult)+"*"+rast+"+("+str(add)+"))/cos("+zenith+")"
        grass.message("Calculating expression: %s" % expr)
        grass.mapcalc(expr, overwrite=True)
        grass.message("Created reflectance raster: %s" % rho)
        cnt += 1
        
    grass.message("Completed %s reflectance rasters" % cnt)
def blender_export_DEM(raster, path, name=None, tmp_path='/tmp', time_suffix=True, env=None):
    """Export raster DEM under  certain name to be used by Blender"""
    if not (path and os.path.exists(path)):
        print 'Blender path does not exist:\n{p}'.format(p=path)
        return
    local = True
    if 'server=' in path:
        local = False

    if time_suffix:
        time = datetime.now()
        suffix = '_{}_{}_{}'.format(time.hour, time.minute, time.second)
    else:
        suffix = ''

    if not name:
        name = raster

    fullname = '{name}{suffix}.tif'.format(name=name, suffix=suffix)

    if local:
        out = os.path.join(path, fullname)
    else:
        out = os.path.join(tmp_path, fullname)
    gscript.run_command('r.out.gdal', flags='cf', input=raster, type="Float32",
                        create='TFW=YES', out=out, quiet=True, env=env)

    if not local:
        try:
            shutil.copyfile(out, os.path.join(path, fullname))
        except OSError as e:
            if e.errno == 95:
                pass
def set_mask(name,msk_type='rast',maskcats='*'):
    if msk_type=='rast':
        gscript.run_command('r.mask',
                       raster=name,overwrite=True,verbose=True,maskcats=maskcats)
    elif msk_type=='vector':
        gscript.run_command('r.mask',
                       vector=name,overwrite=True,verbose=True)
def mask_data(band_filter, cfmask_filter, cloud_mask_value, file_separator):
    # do cleanup first
    grass.run_command('g.remove', type='raster', pattern='*_masked', flags='f', quiet=True)
    
    # find band1 raster maps first
    bands1 = grass.list_grouped('raster', pattern='*{}1*'.format(band_filter))[mapset]
    count = len(bands1)
    i = 0
    for b1 in bands1:
        i += 1
        basename = b1.split(file_separator)[0]
        grass.message("Processing <{0}> ({1}/{2})...".format(basename, i, count))
        grass.percent(i, count, 5)
        # set computational region based on first band
        grass.run_command('g.region', raster=b1)
        maskname = '{}{}{}'.format(basename, file_separator, cfmask_filter)
        mask = grass.find_file(maskname, element='raster')['fullname']
        # apply cloud mask if found
        if mask:
            grass.run_command('r.mask', flags='i', raster=maskname, maskcats=cloud_mask_value, overwrite=True, quiet=True)
        else:
            grass.warning("Mask missing for <{}>".format(basename))
        # create copy of band with mask applied
        bands = grass.list_grouped('raster', pattern='{}{}{}*'.format(basename, file_separator, band_filter))[mapset]
        for b in bands:
            grass.mapcalc('{name}_masked={name}'.format(name=b), quiet=True, overwrite=True)
            grass.run_command('r.colors', map=b, color='grey.eq')
        # remove mask if applied
        if mask:
            grass.run_command('r.mask', flags='r', quiet=True)
def rulesreclass(mapa,dirs):
  grass.run_command('g.region',rast=mapa)
  x=grass.read_command('r.stats',flags='a',input=mapa)
  #print x
  
  #t=grass.read_command('r.stats',flags='a',input='buffers_10000_MERGE_id2_0_clipMapa_tif_sum2')
  y=x.split('\n')
  #print y
  os.chdir(dirs)
  txtsaida=mapa+'_rules.txt'
  txtreclass=open(mapa+'_rules.txt','w')
  
 
  
  if y!=0:
    for i in y:
          if i !='':
                ##print i
                f=i.split(' ')
          if '*' in f or 'L' in f :
                break
          else:
                ##print f 
                ids=f[0]
                ids=int(ids)
                ##print ids
                ha=f[1]
                ha=float(ha)
                haint=float(round(ha))
                
                ##print haint
                haint2=haint/10000+1
                txtreclass.write(`ids`+'='+`haint2`+ '\n')
    txtreclass.close()      
  return txtsaida
Beispiel #31
0
def main():
    options, flags = grass.parser()

    elevation_input = options['elevation']
    aspect_input = options['aspect']
    slope_input = options['slope']
    linke = options['linke']
    linke_value = options['linke_value']
    albedo = options['albedo']
    albedo_value = options['albedo_value']

    beam_rad_basename = options['beam_rad_basename']
    diff_rad_basename = options['diff_rad_basename']
    refl_rad_basename = options['refl_rad_basename']
    glob_rad_basename = options['glob_rad_basename']
    incidout_basename = options['incidout_basename']

    if not any([
            beam_rad_basename, diff_rad_basename, refl_rad_basename,
            glob_rad_basename, incidout_basename
    ]):
        grass.fatal(_("No output specified."))

    start_time = float(options['start_time'])
    end_time = float(options['end_time'])
    time_step = float(options['time_step'])
    nprocs = int(options['nprocs'])
    day = int(options['day'])
    temporal = flags['t']
    binary = flags['b']
    binaryTmpName = 'binary'
    year = int(options['year'])
    rsun_flags = ''
    if flags['m']:
        rsun_flags += 'm'
    if flags['p']:
        rsun_flags += 'p'

    if not is_grass_7() and temporal:
        grass.warning(_("Flag t has effect only in GRASS 7"))

    # check: start < end
    if start_time > end_time:
        grass.fatal(_("Start time is after end time."))
    if time_step >= end_time - start_time:
        grass.fatal(_("Time step is too big."))

    # here we check all the days
    if not grass.overwrite():
        check_time_map_names(beam_rad_basename,
                             grass.gisenv()['MAPSET'], start_time, end_time,
                             time_step, binary, binaryTmpName)
        check_time_map_names(diff_rad_basename,
                             grass.gisenv()['MAPSET'], start_time, end_time,
                             time_step, binary, binaryTmpName)
        check_time_map_names(refl_rad_basename,
                             grass.gisenv()['MAPSET'], start_time, end_time,
                             time_step, binary, binaryTmpName)
        check_time_map_names(glob_rad_basename,
                             grass.gisenv()['MAPSET'], start_time, end_time,
                             time_step, binary, binaryTmpName)

    # check for slope/aspect
    if not aspect_input or not slope_input:
        params = {}
        if not aspect_input:
            aspect_input = create_tmp_map_name('aspect')
            params.update({'aspect': aspect_input})
            TMP.append(aspect_input)
        if not slope_input:
            slope_input = create_tmp_map_name('slope')
            params.update({'slope': slope_input})
            TMP.append(slope_input)

        grass.info(_("Running r.slope.aspect..."))
        grass.run_command('r.slope.aspect',
                          elevation=elevation_input,
                          quiet=True,
                          **params)

    grass.info(_("Running r.sun in a loop..."))
    count = 0
    # Parallel processing
    proc_list = []
    proc_count = 0
    suffixes = []
    suffixes_all = []
    times = list(frange(start_time, end_time, time_step))
    num_times = len(times)
    core.percent(0, num_times, 1)
    for time in times:
        count += 1
        core.percent(count, num_times, 10)

        suffix = '_' + format_time(time)
        proc_list.append(
            Process(target=run_r_sun,
                    args=(elevation_input, aspect_input, slope_input, day,
                          time, linke, linke_value, albedo, albedo_value,
                          beam_rad_basename, diff_rad_basename,
                          refl_rad_basename, glob_rad_basename,
                          incidout_basename, suffix, binary, binaryTmpName,
                          rsun_flags)))

        proc_list[proc_count].start()
        proc_count += 1
        suffixes.append(suffix)
        suffixes_all.append(suffix)

        if proc_count == nprocs or proc_count == num_times or count == num_times:
            proc_count = 0
            exitcodes = 0
            for proc in proc_list:
                proc.join()
                exitcodes += proc.exitcode

            if exitcodes != 0:
                core.fatal(_("Error while r.sun computation"))

            # Empty process list
            proc_list = []
            suffixes = []
    # FIXME: how percent really works?
    # core.percent(1, 1, 1)

    # add timestamps either via temporal framework in 7 or r.timestamp in 6.x
    if is_grass_7() and temporal:
        core.info(_("Registering created maps into temporal dataset..."))
        import grass.temporal as tgis

        def registerToTemporal(basename, suffixes, mapset, start_time,
                               time_step, title, desc):
            maps = ','.join(
                [basename + suf + '@' + mapset for suf in suffixes])
            tgis.open_new_stds(basename,
                               type='strds',
                               temporaltype='absolute',
                               title=title,
                               descr=desc,
                               semantic='mean',
                               dbif=None,
                               overwrite=grass.overwrite())
            tgis.register_maps_in_space_time_dataset(type='raster',
                                                     name=basename,
                                                     maps=maps,
                                                     start=start_time,
                                                     end=None,
                                                     increment=time_step,
                                                     dbif=None,
                                                     interval=False)

        # Make sure the temporal database exists
        tgis.init()

        mapset = grass.gisenv()['MAPSET']
        absolute_time = datetime.datetime(year, 1, 1) + \
                        datetime.timedelta(days=day - 1) + \
                        datetime.timedelta(hours=start_time)
        start = absolute_time.strftime("%Y-%m-%d %H:%M:%S")
        step = datetime.timedelta(hours=time_step)
        step = "%d seconds" % step.seconds

        if beam_rad_basename:
            registerToTemporal(
                beam_rad_basename,
                suffixes_all,
                mapset,
                start,
                step,
                title="Beam irradiance",
                desc="Output beam irradiance raster maps [W.m-2]")
        if diff_rad_basename:
            registerToTemporal(
                diff_rad_basename,
                suffixes_all,
                mapset,
                start,
                step,
                title="Diffuse irradiance",
                desc="Output diffuse irradiance raster maps [W.m-2]")
        if refl_rad_basename:
            registerToTemporal(
                refl_rad_basename,
                suffixes_all,
                mapset,
                start,
                step,
                title="Reflected irradiance",
                desc="Output reflected irradiance raster maps [W.m-2]")
        if glob_rad_basename:
            registerToTemporal(
                glob_rad_basename,
                suffixes_all,
                mapset,
                start,
                step,
                title="Total irradiance",
                desc="Output total irradiance raster maps [W.m-2]")
        if incidout_basename:
            registerToTemporal(incidout_basename,
                               suffixes_all,
                               mapset,
                               start,
                               step,
                               title="Incidence angle",
                               desc="Output incidence angle raster maps")

    else:
        absolute_time = datetime.datetime(year, 1, 1) + \
                        datetime.timedelta(days=day - 1)
        for i, time in enumerate(times):
            grass_time = format_grass_time(absolute_time +
                                           datetime.timedelta(hours=time))
            if beam_rad_basename:
                set_time_stamp(beam_rad_basename + suffixes_all[i],
                               time=grass_time)
            if diff_rad_basename:
                set_time_stamp(diff_rad_basename + suffixes_all[i],
                               time=grass_time)
            if refl_rad_basename:
                set_time_stamp(refl_rad_basename + suffixes_all[i],
                               time=grass_time)
            if glob_rad_basename:
                set_time_stamp(glob_rad_basename + suffixes_all[i],
                               time=grass_time)
            if incidout_basename:
                set_time_stamp(incidout_basename + suffixes_all[i],
                               time=grass_time)

    if beam_rad_basename:
        maps = [beam_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if diff_rad_basename:
        maps = [diff_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if refl_rad_basename:
        maps = [refl_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if glob_rad_basename:
        maps = [glob_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if incidout_basename:
        maps = [incidout_basename + suf for suf in suffixes_all]
        set_color_table(maps)
Beispiel #32
0
def coarsen_region(factor=3):
    gs.run_command('g.region',
        rows=gs.region()['rows']/3,
        cols=gs.region()['cols']/3)
Beispiel #33
0
def cleanup():
    gs.message("Removing temporary files...", flag='i')
    for rast in tmp_rast:
        gs.run_command("g.remove", rast=rast, quiet=True)
Beispiel #34
0
def usped(elevation, erosion, flow_accumulation, r_factor, c_factor, k_factor,
          ls_factor, m_coeff, n_coeff):
    """The USPED (Unit Stream Power Erosion Deposition) model
    for transport limited erosion regimes"""

    # assign variables
    slope = 'slope'
    aspect = 'aspect'
    flowacc = 'flowacc'
    qsx = 'qsx'
    qsxdx = 'qsxdx'
    qsy = 'qsy'
    qsydy = 'qsydy'
    grow_slope = 'grow_slope'
    grow_aspect = 'grow_aspect'
    grow_qsxdx = 'grow_qsxdx'
    grow_qsydy = 'grow_qsydy'
    sedflow = 'sedflow'
    sediment_flux = 'sediment_flux'

    # compute slope and aspect
    gscript.run_command('r.slope.aspect',
                        elevation=elevation,
                        slope=slope,
                        aspect=aspect,
                        overwrite=True)

    # grow border to fix edge effects of moving window computations
    gscript.run_command('r.grow.distance',
                        input=slope,
                        value=grow_slope,
                        overwrite=True)
    gscript.run_command('r.mapcalc',
                        expression="{slope}={grow_slope}".format(
                            slope=slope, grow_slope=grow_slope),
                        overwrite=True)
    gscript.run_command('r.grow.distance',
                        input=aspect,
                        value=grow_aspect,
                        overwrite=True)
    gscript.run_command('r.mapcalc',
                        expression="{aspect}={grow_aspect}".format(
                            aspect=aspect, grow_aspect=grow_aspect),
                        overwrite=True)

    # compute flow accumulation
    gscript.run_command('r.watershed',
                        elevation=elevation,
                        accumulation=flowacc,
                        flags="a",
                        overwrite=True)
    region = gscript.parse_command('g.region', flags='g')
    res = region['nsres']
    gscript.run_command('r.mapcalc',
                        expression="{depth}"
                        "=({flowacc}*{res})".format(depth=flow_accumulation,
                                                    flowacc=flowacc,
                                                    res=res),
                        overwrite=True)
    gscript.run_command('r.colors', map=flow_accumulation, raster=flowacc)
    # add depression parameter to r.watershed
    # derive from landcover class

    # compute dimensionless topographic factor
    gscript.run_command('r.mapcalc',
                        expression="{ls_factor}"
                        "=({flowacc}^{m})*(sin({slope})^{n})".format(
                            ls_factor=ls_factor,
                            m=m_coeff,
                            flowacc=flow_accumulation,
                            slope=slope,
                            n=n_coeff),
                        overwrite=True)

    # compute sediment flow at sediment transport capacity
    """
    T = R * K * C * P * LST
    where
    T is sediment flow at transport capacity
    R is rainfall factor
    K is soil erodibility factor
    C is a dimensionless land cover factor
    P is a dimensionless prevention measures factor
    LST is the topographic component of sediment transport capacity
    of overland flow
    """
    gscript.run_command('r.mapcalc',
                        expression="{sedflow}"
                        "={r_factor}"
                        "*{k_factor}"
                        "*{c_factor}"
                        "*{ls_factor}".format(r_factor=r_factor,
                                              k_factor=k_factor,
                                              c_factor=c_factor,
                                              ls_factor=ls_factor,
                                              sedflow=sedflow),
                        overwrite=True)

    # # convert sediment flow from tons/ha/s to kg/m^2/s
    # gscript.run_command(
    #     'r.mapcalc',
    #     expression="{converted_sedflow}"
    #     "={sedflow}"
    #     "*{ton_to_kg}"
    #     "/{ha_to_m2}".format(
    #         converted_sedflow=sediment_flux,
    #         sedflow=sedflow,
    #         ton_to_kg=1000.,
    #         ha_to_m2=10000.),
    #     overwrite=True)

    # convert sediment flow from tons/ha/yr to kg/m^2s
    gscript.run_command('r.mapcalc',
                        expression="{converted_sedflow}"
                        "={sedflow}"
                        "*{ton_to_kg}"
                        "/{ha_to_m2}"
                        "/{yr_to_s}".format(converted_sedflow=sediment_flux,
                                            sedflow=sedflow,
                                            ton_to_kg=1000.,
                                            ha_to_m2=10000.,
                                            yr_to_s=31557600.),
                        overwrite=True)

    # compute sediment flow rate in x direction (m^2/s)
    gscript.run_command('r.mapcalc',
                        expression="{qsx}={sedflow}*cos({aspect})".format(
                            sedflow=sediment_flux, aspect=aspect, qsx=qsx),
                        overwrite=True)

    # compute sediment flow rate in y direction (m^2/s)
    gscript.run_command('r.mapcalc',
                        expression="{qsy}={sedflow}*sin({aspect})".format(
                            sedflow=sediment_flux, aspect=aspect, qsy=qsy),
                        overwrite=True)

    # compute change in sediment flow in x direction
    # as partial derivative of sediment flow field
    gscript.run_command('r.slope.aspect',
                        elevation=qsx,
                        dx=qsxdx,
                        overwrite=True)

    # compute change in sediment flow in y direction
    # as partial derivative of sediment flow field
    gscript.run_command('r.slope.aspect',
                        elevation=qsy,
                        dy=qsydy,
                        overwrite=True)

    # grow border to fix edge effects of moving window computations
    gscript.run_command('r.grow.distance',
                        input=qsxdx,
                        value=grow_qsxdx,
                        overwrite=True)
    gscript.run_command('r.mapcalc',
                        expression="{qsxdx}={grow_qsxdx}".format(
                            qsxdx=qsxdx, grow_qsxdx=grow_qsxdx),
                        overwrite=True)
    gscript.run_command('r.grow.distance',
                        input=qsydy,
                        value=grow_qsydy,
                        overwrite=True)
    gscript.run_command('r.mapcalc',
                        expression="{qsydy}={grow_qsydy}".format(
                            qsydy=qsydy, grow_qsydy=grow_qsydy),
                        overwrite=True)

    # compute net erosion-deposition (kg/m^2s)
    # as divergence of sediment flow
    gscript.run_command('r.mapcalc',
                        expression="{erdep} = {qsxdx} + {qsydy}".format(
                            erdep=erosion, qsxdx=qsxdx, qsydy=qsydy),
                        overwrite=True)

    # set color tables
    gscript.write_command('r.colors',
                          map=ls_factor,
                          rules='-',
                          stdin=lsfactor_colors)
    gscript.write_command('r.colors',
                          map=erosion,
                          rules='-',
                          stdin=erosion_colors)

    # remove temporary maps
    gscript.run_command('g.remove',
                        type='raster',
                        name=[
                            'slope', 'aspect', 'flowacc', 'qsx', 'qsy',
                            'qsxdx', 'qsydy', 'grow_slope', 'grow_aspect',
                            'grow_qsxdx', 'grow_qsydy', 'sedflow',
                            'sediment_flux'
                        ],
                        flags='f')
Beispiel #35
0
def rusle(elevation, erosion, flow_accumulation, r_factor, c_factor, k_factor,
          ls_factor, m_coeff, n_coeff):
    """The RUSLE3D
    (Revised Universal Soil Loss Equation for Complex Terrain) model
    for detachment limited soil erosion regimes"""

    # assign variables
    slope = 'slope'
    grow_slope = 'grow_slope'
    flowacc = 'flowacc'
    sedflow = 'sedflow'

    # compute slope
    gscript.run_command('r.slope.aspect',
                        elevation=elevation,
                        slope=slope,
                        overwrite=True)

    # grow border to fix edge effects of moving window computations
    gscript.run_command('r.grow.distance',
                        input=slope,
                        value=grow_slope,
                        overwrite=True)
    gscript.run_command('r.mapcalc',
                        expression="{slope}={grow_slope}".format(
                            slope=slope, grow_slope=grow_slope),
                        overwrite=True)

    # compute flow accumulation
    gscript.run_command('r.watershed',
                        elevation=elevation,
                        accumulation=flowacc,
                        flags="a",
                        overwrite=True)
    region = gscript.parse_command('g.region', flags='g')
    res = region['nsres']
    gscript.run_command('r.mapcalc',
                        expression="{depth}"
                        "=({flowacc}*{res})".format(depth=flow_accumulation,
                                                    flowacc=flowacc,
                                                    res=res),
                        overwrite=True)
    gscript.run_command('r.colors', map=flow_accumulation, raster=flowacc)

    # compute dimensionless topographic factor
    gscript.run_command('r.mapcalc',
                        expression="{ls_factor}"
                        "=({m}+1.0)"
                        "*(({flowacc}/22.1)^{m})"
                        "*((sin({slope})/5.14)^{n})".format(
                            ls_factor=ls_factor,
                            m=m_coeff,
                            flowacc=flow_accumulation,
                            slope=slope,
                            n=n_coeff),
                        overwrite=True)

    # compute sediment flow
    """E = R * K * LS * C * P
    where
    E is average annual soil loss
    R is erosivity factor
    K is soil erodibility factor
    LS is a dimensionless topographic (length-slope) factor
    C is a dimensionless land cover factor
    P is a dimensionless prevention measures factor
    """
    gscript.run_command('r.mapcalc',
                        expression="{sedflow}"
                        "={r_factor}"
                        "*{k_factor}"
                        "*{ls_factor}"
                        "*{c_factor}".format(sedflow=sedflow,
                                             r_factor=r_factor,
                                             k_factor=k_factor,
                                             ls_factor=ls_factor,
                                             c_factor=c_factor),
                        overwrite=True)

    # # convert sediment flow from tons*ha^-1*s^-1 to kg*m^-2^s^-1
    # gscript.run_command(
    #     'r.mapcalc',
    #     expression="{converted_sedflow}"
    #     "={sedflow}*{ton_to_kg}/{ha_to_m2}".format(
    #         converted_sedflow=erosion,
    #         sedflow=sedflow,
    #         ton_to_kg=1000.,
    #         ha_to_m2=10000.),
    #     overwrite=True)

    # convert sediment flow from tons/ha/yr to kg/m^2s
    gscript.run_command('r.mapcalc',
                        expression="{converted_sedflow}"
                        "={sedflow}"
                        "*{ton_to_kg}"
                        "/{ha_to_m2}"
                        "/{yr_to_s}".format(converted_sedflow=erosion,
                                            sedflow=sedflow,
                                            ton_to_kg=1000.,
                                            ha_to_m2=10000.,
                                            yr_to_s=31557600.),
                        overwrite=True)

    # set color tables
    gscript.write_command('r.colors',
                          map=ls_factor,
                          rules='-',
                          stdin=lsfactor_colors)
    gscript.write_command('r.colors',
                          map=erosion,
                          rules='-',
                          stdin=sedflux_colors)

    # remove temporary maps
    gscript.run_command('g.remove',
                        type='raster',
                        name=['slope', 'grow_slope', 'flowacc', 'sedflow'],
                        flags='f')
Beispiel #36
0
def event_based_r_factor(rain_intensity, rain_duration):
    """compute event-based erosivity (R) factor (MJ mm ha^-1 hr^-1 yr^-1)"""

    # assign variables
    rain_energy = 'rain_energy'
    rain_volume = 'rain_volume'
    erosivity = 'erosivity'
    r_factor = 'r_factor'

    # derive rainfall energy (MJ ha^-1 mm^-1)
    gscript.run_command('r.mapcalc',
                        expression="{rain_energy}"
                        "=0.29*(1.-(0.72*exp(-0.05*{rain_intensity})))".format(
                            rain_energy=rain_energy,
                            rain_intensity=rain_intensity),
                        overwrite=True)

    # derive rainfall volume
    """
    rainfall volume (mm)
    = rainfall intensity (mm/hr)
    * (rainfall duration (min)
    * (1 hr / 60 min))
    """
    gscript.run_command('r.mapcalc',
                        expression="{rain_volume}"
                        "= {rain_intensity}"
                        "*({rain_duration}"
                        "/60.)".format(rain_volume=rain_volume,
                                       rain_intensity=rain_intensity,
                                       rain_duration=rain_duration),
                        overwrite=True)

    # derive event erosivity index (MJ mm ha^-1 hr^-1)
    gscript.run_command('r.mapcalc',
                        expression="{erosivity}"
                        "=({rain_energy}"
                        "*{rain_volume})"
                        "*{rain_intensity}"
                        "*1.".format(erosivity=erosivity,
                                     rain_energy=rain_energy,
                                     rain_volume=rain_volume,
                                     rain_intensity=rain_intensity),
                        overwrite=True)

    # # derive R factor (MJ mm ha^-1 hr^-1 s^-1)
    # """
    # R factor (MJ mm ha^-1 hr^-1 s^-1)
    # = EI (MJ mm ha^-1 hr^-1)
    # / (rainfall interval (min)
    # * (1 min / 60 sec))
    # """
    # gscript.run_command(
    #     'r.mapcalc',
    #     expression="{r_factor}"
    #     "={erosivity}"
    #     "/({rain_duration}"
    #     "/60.)".format(
    #         r_factor=r_factor,
    #         erosivity=erosivity,
    #         rain_duration=rain_duration),
    #     overwrite=True)

    # derive R factor (MJ mm ha^-1 hr^-1 yr^-1)
    """
    R factor (MJ mm ha^-1 hr^-1 yr^-1)
    = EI (MJ mm ha^-1 hr^-1)
    / (rainfall interval (min)
    * (1 yr / 525600 min))
    """
    gscript.run_command('r.mapcalc',
                        expression="{r_factor}"
                        "={erosivity}"
                        "/({rain_duration}"
                        "/525600.)".format(r_factor=r_factor,
                                           erosivity=erosivity,
                                           rain_duration=rain_duration),
                        overwrite=True)

    # remove temporary maps
    gscript.run_command('g.remove',
                        type='raster',
                        name=['rain_energy', 'rain_volume', 'erosivity'],
                        flags='f')

    return r_factor
Beispiel #37
0
def main():
    global unique

    # user keys
    in_raster = options['input']  # in_raster = 'srtm_1sec_amazonia'
    out_raster = options['output']  # out_raster = 'teste_dnoise'
    iterations = options['iterations']
    threshold = options['threshold']
    epsg = options['epsg']

    # check if input file exists
    if not grass.find_file(in_raster)['file']:
        grass.fatal(_("Raster map <%s> not found") % in_raster)

    # check if current location is in a projected coordinate system
    reproject = check_proj(epsg)

    # define projections
    loc_proj = grass.read_command('g.proj', flags='jf')
    loc_proj = pyproj.Proj(loc_proj.strip())
    epsg_proj = pyproj.Proj(init='epsg:' + str(epsg))

    # name the files
    tmp_xyz = 'tmp_xyz_%s.xyz' % unique
    tmp_xyz_orig = 'tmp_xyz_%s.xyz' % unique
    tmp_xyz_proj = 'tmp_xyz_proj_%s.xyz' % unique
    tmp_out_dnoise = 'tmp_xyz_dnoise_%s.xyz' % unique
    # tmp_out_dnoise_proj = 'tmp_xyz_dnoise_proj_%s.xyz' % unique
    tmp_xyz_merge = 'tmp_xyz_merge_%s.xyz' % unique
    # list for cleanup
    tmp_rmaps = [
        tmp_xyz, tmp_xyz_orig, tmp_xyz_proj, tmp_out_dnoise, tmp_xyz_merge
    ]

    # Export the map to xyz points.
    grass.message(_("Exporting points..."))
    grass.run_command('r.stats',
                      flags='1g',
                      input=in_raster,
                      output=tmp_xyz,
                      overwrite=True)

    # Reproject if necessary
    if reproject:
        do_proj(xyz_in=tmp_xyz,
                xyz_out=tmp_xyz_proj,
                in_proj=loc_proj,
                out_proj=epsg_proj)
        tmp_xyz = tmp_xyz_proj

    # Denoise.  The -z flag preserves the xy positions of the points.
    grass.message(_("Denoising..."))
    cmd = ['mdenoise'] + ['-i'] + [tmp_xyz] + ['-t'] + [str(threshold)] + [
        '-n'
    ] + [str(iterations)] + ['-z'] + ['-o'] + [tmp_out_dnoise]
    grass.call(cmd)

    # If reprojected, it is necessary to return to original coordinate system.
    #### actually, there's no need for this, since we will use only the Z coordinates from the denoised data ####
    # if reproject:
    #     do_proj(xyz_in=tmp_out_dnoise, xyz_out=tmp_out_dnoise_proj, in_proj=epsg_proj, out_proj=loc_proj)
    #     tmp_out_dnoise = tmp_out_dnoise_proj

    # As only the z coordinates have changed in denoising, to prevent rounding
    # errors, the new z coordinates are combined with the original xy coordinates.
    f_merged = open(tmp_xyz_merge, 'w')  # new, merged
    #read input coordinates file
    with open(tmp_out_dnoise) as f_dnoise, open(tmp_xyz_orig) as f_orig:
        for line_dnoise, line_orig in izip(f_dnoise, f_orig):
            xyz_dnoise = line_dnoise.split()  # denoised
            xyz_orig = line_orig.split()  # original
            f_merged.write('%s %s %s\n' %
                           (xyz_orig[0], xyz_orig[1], xyz_dnoise[2]))

    # close files
    f_merged.close()

    # Reload data
    grass.message(_("Reloading data..."))
    grass.run_command('r.in.xyz',
                      flags='i',
                      input=tmp_xyz_merge,
                      output=out_raster,
                      method='min',
                      x=1,
                      y=2,
                      z=3,
                      separator='space',
                      overwrite=True)

    # Edit metadata to record denoising parameters
    grass.run_command('r.support',
                      map=out_raster,
                      title="A denoised version of <%s>" % in_raster)
    grass.run_command(
        'r.support',
        map=out_raster,
        history="Generated by: r.denoise %s iterations=%s threshold=%s" %
        (in_raster, str(threshold), str(iterations)))

    # clean tmp files
    grass.message(_("Removing temporary files..."))
    tmp_rmaps = [
        fname for fname in os.listdir('.') if fname.endswith('%s.xyz' % unique)
    ]
    try:
        for fname in tmp_rmaps:
            os.remove(fname)
    except OSError:
        pass
Beispiel #38
0
def main():
    # parameters
    infile = options['input']
    raster_reference = options['raster_reference']
    raster_file = options['raster_file']
    outfile = options['output']
    resolution = options['resolution']
    method = options['method']
    zrange = options['zrange']
    zscale = options['zscale']
    output_type = options['type']
    percent = options['percent']
    pth = options['pth']
    trim = options['trim']
    footprint = options['footprint']
    # flags
    scan = flags['s']
    shell_script_style = flags['g']

    # overwrite auf true setzen
    os.environ['GRASS_OVERWRITE'] = '1'

    # to hide non-error messages from subprocesses
    if grass.verbosity() <= 2:
        outdev = open(os.devnull, 'w')
    else:
        outdev = sys.stdout

    # use temporary region
    grass.use_temp_region()

    # scan -s or shell_script_style -g:
    if scan:
        if not grass.find_program('pdal', 'info --summary'):
            grass.fatal(
                _("The pdal program is not in the path " +
                  "and executable. Please install first"))
        command_scan = ['pdal', 'info', '--summary', infile]
        tmp_scan = grass.tempfile()
        if tmp_scan is None:
            grass.fatal("Unable to create temporary files")
        fh = open(tmp_scan, 'wb')
        p = grass.call(command_scan, stdout=fh)
        fh.close()
        summary = True
        if p != 0:
            command_scan = ['pdal', 'info', infile]
            fh = open(tmp_scan, 'wb')
            p = grass.call(command_scan, stdout=fh)
            fh.close()
            summary = False
        if p != 0:
            # check to see if pdal executed properly
            os.remove(tmp_scan)
            grass.fatal(
                _("pdal cannot determine metadata " +
                  "for unsupported format of <%s>") % infile)
        data = json.load(open(tmp_scan))
        if summary:
            str1 = u'summary'
            str2 = u'bounds'
            y_str = u'Y'
            x_str = u'X'
            z_str = u'Z'
            min_str = u'min'
            max_str = u'max'
            try:
                n = str(data[str1][str2][y_str][max_str])
                s = str(data[str1][str2][y_str][min_str])
                w = str(data[str1][str2][x_str][min_str])
                e = str(data[str1][str2][x_str][max_str])
                t = str(data[str1][str2][z_str][max_str])
                b = str(data[str1][str2][z_str][min_str])
            except:
                ymin_str = u'miny'
                xmin_str = u'minx'
                zmin_str = u'minz'
                ymax_str = u'maxy'
                xmax_str = u'maxx'
                zmax_str = u'maxz'
                n = str(data[str1][str2][ymax_str])
                s = str(data[str1][str2][ymin_str])
                w = str(data[str1][str2][xmin_str])
                e = str(data[str1][str2][xmax_str])
                t = str(data[str1][str2][zmax_str])
                b = str(data[str1][str2][zmin_str])
        else:
            str1 = u'stats'
            str2 = u'bbox'
            str3 = u'native'
            str4 = u'bbox'
            n = str(data[str1][str2][str3][str4][u'maxy'])
            s = str(data[str1][str2][str3][str4][u'miny'])
            w = str(data[str1][str2][str3][str4][u'minx'])
            e = str(data[str1][str2][str3][str4][u'maxx'])
            t = str(data[str1][str2][str3][str4][u'maxz'])
            b = str(data[str1][str2][str3][str4][u'minz'])
        if not shell_script_style:
            grass.message(
                _("north: %s\nsouth: %s\nwest: %s\neast: %s\ntop: %s\nbottom: %s"
                  ) % (n, s, w, e, t, b))
        else:
            grass.message(
                _("n=%s s=%s w=%s e=%s t=%s b=%s") % (n, s, w, e, t, b))
    elif footprint:
        footprint_to_vectormap(infile, footprint)
    else:
        # get region with pdal
        footprint_to_vectormap(infile, 'tiles')

        if raster_file:
            raster_reference = 'img'
            grass.run_command('r.external',
                              input=raster_file,
                              flags='o',
                              output=raster_reference)
            result = grass.find_file(name=raster_reference, element='raster')
            if result[u'fullname'] == u'':
                raster_reference = 'img.1'
        # first pass: set region to extent of tiles while aligning pixel
        # geometry to raster_reference
        grass.run_command('g.region', vector='tiles', flags='p')
        if raster_reference:
            grass.run_command('g.region',
                              vector='tiles',
                              flags='ap',
                              align=raster_reference)
        # second pass: change raster resolution to final resolution while best
        # effort aligning to pixel geometry
        grass.run_command('g.region',
                          vector='tiles',
                          flags='ap',
                          res=resolution)

        # . pdal pipline laz2json (STDOUT) | r.in.xyz
        bn = os.path.basename(infile)
        infile_format = bn.split('.')[-1]
        # format_reader from https://pdal.io/stages/readers.html
        format_reader = ''
        if infile_format.lower() == 'laz' or infile_format.lower() == 'las':
            format_reader = 'readers.las'
        # pts: not tested
        elif infile_format.lower() == 'pts':
            format_reader = 'readers.pts'
        else:
            grass.run_command('g.remove',
                              flags='f',
                              type='vector',
                              name='tiles',
                              quiet=True)
            grass.fatal(_("Format .%s is not supported.." % infile_format))
        tmp_file_json = grass.tempfile()
        if tmp_file_json is None:
            grass.fatal("Unable to create temporary files")
        data = {}
        data['pipeline'] = []
        data['pipeline'].append({'type': format_reader, 'filename': infile})
        data['pipeline'].append({
            'type': 'writers.text',
            'format': 'csv',
            'order': 'X,Y,Z',
            'keep_unspecified': 'false',
            'filename': 'STDOUT',
            'quote_header': 'false'
        })
        with open(tmp_file_json, 'w') as f:
            json.dump(data, f)

        tmp_xyz = grass.tempfile()
        if tmp_xyz is None:
            grass.fatal("Unable to create temporary files")
        command_pdal1 = ['pdal', 'pipeline', '--input', tmp_file_json]
        command_pdal2 = [
            'r.in.xyz', 'input=' + tmp_xyz, 'output=' + outfile, 'skip=1',
            'separator=comma', 'method=' + method
        ]

        if zrange:
            command_pdal2.append('zrange=' + zrange)
        if zscale:
            command_pdal2.append('zscale=' + zscale)
        if output_type:
            command_pdal2.append('type=' + output_type)
        if percent:
            command_pdal2.append('percent=' + percent)
        if pth:
            command_pdal2.append('pth=' + pth)
        if trim:
            command_pdal2.append('trim=' + trim)

        fh = open(tmp_xyz, 'wb')
        p2 = grass.call(command_pdal1, stdout=fh)
        fh.close()
        if p2 != 0:
            # check to see if pdal pipeline executed properly
            grass.fatal(_("pdal pipeline is broken..."))

        p3 = grass.call(command_pdal2, stdout=outdev)
        if p3 != 0:
            # check to see if r.in.xyz executed properly
            os.remove(tmp_xyz)
            grass.fatal(_("r.in.xyz is broken..."))

        # metadata
        empty_history = grass.tempfile()
        if empty_history is None:
            grass.fatal("Unable to create temporary files")
        f = open(empty_history, 'w')
        f.close()
        grass.run_command('r.support',
                          map=outfile,
                          source1=infile,
                          description='generated by r.in.pdal',
                          loadhistory=empty_history)
        grass.run_command('r.support',
                          map=outfile,
                          history=os.environ['CMDLINE'])
        os.remove(empty_history)

        # Cleanup
        grass.message(_("Cleaning up..."))
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name='tiles',
                          quiet=True)
        os.remove(tmp_file_json)
        os.remove(tmp_xyz)
        grass.message(_("Generating output raster map <%s>...") % outfile)
        grass.del_temp_region()
Beispiel #39
0
def main():
    # Get the options
    input = options["input"]
    input_name = input.split('@')[0]
    output = options["output"]
    method = options["method"]
    min_cat = None
    max_cat = None
    point = None
    overwrite = grass.overwrite()

    quiet = True

    if grass.verbosity() > 2:
        quiet = False

    in_info = grass.vector_info(input)
    # check for wild mixture of vector types
    if in_info['points'] > 0 and in_info['boundaries'] > 0:
        grass.fatal(
            _("The input vector map contains both polygons and points,"
              " cannot handle mixed types"))

    pid = os.getpid()
    # process points via triangulation, then exit
    if in_info['points'] > 0:
        point = True
        layer = 1  # hardcoded for now
        out_temp = '{inp}_point_tmp_{pid}'.format(inp=input_name, pid=pid)
        if method == 'delaunay':
            grass.message(
                _("Processing point data (%d points found)...") %
                in_info['points'])
            grass.run_command('v.delaunay',
                              input=input,
                              layer=layer,
                              output=out_temp,
                              quiet=quiet)

        grass.run_command('v.db.addtable', map=out_temp, quiet=True)
        input = out_temp
        in_info = grass.vector_info(input)

    # process areas
    if in_info['areas'] == 0 and in_info['boundaries'] == 0:
        grass.fatal(_("The input vector map does not contain polygons"))

    out_type = '{inp}_type_{pid}'.format(inp=input_name, pid=pid)
    input_tmp = '{inp}_tmp_{pid}'.format(inp=input_name, pid=pid)
    remove_names = "%s,%s" % (out_type, input_tmp)
    grass.message(
        _("Processing area data (%d areas found)...") % in_info['areas'])

    try:
        grass.run_command('v.category',
                          layer="2",
                          type='boundary',
                          option='add',
                          input=input,
                          out=input_tmp,
                          quiet=quiet)
    except CalledModuleError:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=input_tmp,
                          quiet=quiet)
        grass.fatal(_("Error creating layer 2"))
    try:
        grass.run_command('v.db.addtable',
                          map=input_tmp,
                          layer="2",
                          columns="left integer,right integer",
                          quiet=quiet)
    except CalledModuleError:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=input_tmp,
                          quiet=quiet)
        grass.fatal(_("Error creating new table for layer 2"))
    try:
        grass.run_command('v.to.db',
                          map=input_tmp,
                          option="sides",
                          columns="left,right",
                          layer="2",
                          quiet=quiet)
    except CalledModuleError:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=input_tmp,
                          quiet=quiet)
        grass.fatal(_("Error populating new table for layer 2"))
    try:
        grass.run_command('v.type',
                          input=input_tmp,
                          output=out_type,
                          from_type='boundary',
                          to_type='line',
                          quiet=quiet,
                          layer="2")
    except CalledModuleError:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=remove_names,
                          quiet=quiet)
        grass.fatal(_("Error converting polygon to line"))
    report = grass.read_command('v.category',
                                flags='g',
                                input=out_type,
                                option='report',
                                quiet=quiet).split('\n')
    for r in report:
        if r.find('centroid') != -1:
            min_cat = report[0].split()[-2]
            max_cat = report[0].split()[-1]
            break
    if min_cat and max_cat:
        try:
            grass.run_command('v.edit',
                              map=out_type,
                              tool='delete',
                              type='centroid',
                              layer=2,
                              quiet=quiet,
                              cats='{mi}-{ma}'.format(mi=min_cat, ma=max_cat))
        except CalledModuleError:
            grass.run_command('g.remove',
                              flags='f',
                              type='vector',
                              name=remove_names,
                              quiet=quiet)
            grass.fatal(_("Error removing centroids"))

    try:
        try:
            # TODO: fix magic numbers for layer here and there
            grass.run_command('v.db.droptable',
                              map=out_type,
                              layer=1,
                              flags='f',
                              quiet=True)
        except CalledModuleError:
            grass.run_command('g.remove',
                              flags='f',
                              type='vector',
                              name=remove_names,
                              quiet=quiet)
            grass.fatal(_("Error removing table from layer 1"))
    # TODO: when this except is happaning, it seems that never, so it seems wrong
    except:
        grass.warning(_("No table for layer %d" % 1))
    try:
        grass.run_command('v.category',
                          input=out_type,
                          option='transfer',
                          output=output,
                          layer="2,1",
                          quiet=quiet,
                          overwrite=overwrite)
    except CalledModuleError:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=remove_names,
                          quiet=quiet)
        grass.fatal(_("Error adding categories"))
    grass.run_command('g.remove',
                      flags='f',
                      type='vector',
                      name=remove_names,
                      quiet=quiet)
    if point:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=out_temp,
                          quiet=quiet)
Beispiel #40
0
def set_time_stamp(raster, time):
    grass.run_command('r.timestamp', map=raster, date=time, quiet=True)
Beispiel #41
0
def cleanup():
    """Remove any intermediate rasters if execution fails"""
    for rast in tmp_rast:
        gs.run_command("g.remove", name=rast, type="raster", flags="f", quiet=True)
Beispiel #42
0
def footprint_to_vectormap(infile, footprint):
    """ The function generates a footprint as vectormap of the input las-file.
    It uses pdal info --boundary.

    Args:
        infile(string): Name of LAS input file
        footprint(string): Footprint of the data as vector map
    """
    if not grass.find_program('pdal', 'info --boundary'):
        grass.fatal(
            _("The pdal executable is not available."
              " Install PDAL or put the pdal executable on path."))
    command_fp = ['pdal', 'info', '--boundary', infile]
    tmp_fp = grass.tempfile()
    if tmp_fp is None:
        grass.fatal("Unable to create temporary files")
    fh = open(tmp_fp, 'wb')
    p = grass.call(command_fp, stdout=fh)
    fh.close()
    if p != 0:
        # check to see if pdal info executed properly
        os.remove(tmp_fp)
        grass.fatal(_("pdal info broken..."))

    data = json.load(open(tmp_fp))
    xy_in = ''
    str1 = u'boundary'
    try:
        str2 = u'boundary_json'
        str3 = u'coordinates'
        coord = data[str1][str2][str3][0][0]
        for xy in coord:
            xy_in += str(xy[0]) + ',' + str(xy[1]) + '\n'
    except Exception:
        coord_str = str(data[str1][str1])
        coord = coord_str[coord_str.find('((') + 2:coord_str.find('))')]
        x_y = coord.split(', ')
        for xy in x_y:
            xy_in += xy.replace(' ', ',') + '\n'

    tmp_xy = grass.tempfile()
    if tmp_xy is None:
        grass.fatal("Unable to create temporary files")
    f = open(tmp_xy, 'w')
    f.write(xy_in[:-1])
    f.close()
    grass.run_command('v.in.lines',
                      input=tmp_xy,
                      output='footprint_line',
                      separator='comma')
    grass.run_command('g.region', vector='footprint_line')
    grass.run_command('v.type',
                      input='footprint_line',
                      out='footprint_boundary',
                      from_type='line',
                      to_type='boundary')
    grass.run_command('v.centroids', input='footprint_boundary', out=footprint)
    grass.run_command('v.db.addtable',
                      map=footprint,
                      columns='name varchar(50)')
    grass.run_command('v.db.update',
                      map=footprint,
                      column='name',
                      value=infile)

    # Cleaning up
    grass.message(_("Cleaning up..."))
    os.remove(tmp_fp)
    os.remove(tmp_xy)
    grass.run_command('g.remove',
                      flags='f',
                      type='vector',
                      name='footprint_line',
                      quiet=True)
    grass.run_command('g.remove',
                      flags='f',
                      type='vector',
                      name='footprint_boundary',
                      quiet=True)

    # metadata
    grass.run_command('v.support',
                      map=footprint,
                      comment='in ' + os.environ['CMDLINE'])

    grass.message(_("Generating output vector map <%s>...") % footprint)
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    quantile = options["quantile"]
    order = options["order"]
    where = options["where"]
    add_time = flags["t"]
    nulls = flags["n"]

    # Check if number of methods and output maps matches
    print((method.split(',')))
    print(len(list(filter(None, quantile.split(',')))))
    print((output.split(',')))

    if (len(list(filter(None, quantile.split(',')))) +
            len(method.split(','))) != len(output.split(',')):
        grass.fatal(
            _('Number requested methods and output maps do not match.'))

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, order, None)

    if rows:
        # Create the r.series input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flag = ""
        if len(rows) > 1000:
            grass.warning(
                _("Processing over 1000 maps: activating -z flag of r.series which slows down processing"
                  ))
            flag += "z"
        if nulls:
            flag += "n"

        try:
            grass.run_command("r.series",
                              flags=flag,
                              file=filename,
                              output=output,
                              overwrite=grass.overwrite(),
                              method=method,
                              quantile=quantile)
        except CalledModuleError:
            grass.fatal(
                _("%s failed. Check above error messages.") % 'r.series')

        if not add_time:

            # We need to set the temporal extent from the subset of selected maps
            maps = sp.get_registered_maps_as_objects(where=where,
                                                     order=order,
                                                     dbif=None)
            first_map = maps[0]
            last_map = maps[-1]
            start_a, end_a = first_map.get_temporal_extent_as_tuple()
            start_b, end_b = last_map.get_temporal_extent_as_tuple()

            if end_b is None:
                end_b = start_b

            if first_map.is_time_absolute():
                extent = tgis.AbsoluteTemporalExtent(start_time=start_a,
                                                     end_time=end_b)
            else:
                extent = tgis.RelativeTemporalExtent(
                    start_time=start_a,
                    end_time=end_b,
                    unit=first_map.get_relative_time_unit())

            for out_map in output.split(','):

                # Create the time range for the output map
                if out_map.find("@") >= 0:
                    id = out_map
                else:
                    mapset = grass.gisenv()["MAPSET"]
                    id = out_map + "@" + mapset

                map = sp.get_new_map_instance(id)
                map.load()

                map.set_temporal_extent(extent=extent)

                # Register the map in the temporal database
                if map.is_in_db():
                    map.update_all()
                else:
                    map.insert()
Beispiel #44
0
def main():
    import matplotlib  # required by windows

    matplotlib.use("wxAGG")  # required by windows
    import matplotlib.pyplot as plt

    input = options["input"]
    output = None
    if options["csv_output"]:
        output = options["csv_output"]
    plot_output = None
    if options["plot_output"]:
        plot_output = options["plot_output"]
    min_cells = False
    if options["min_cells"]:
        min_cells = int(options["min_cells"])
    target_res = None
    if options["max_size"]:
        target_res = float(options["max_size"])
    step = float(options["step"])

    global temp_resamp_map, temp_variance_map
    temp_resamp_map = "temp_resamp_map_%d" % os.getpid()
    temp_variance_map = "temp_variance_map_%d" % os.getpid()
    resolutions = []
    variances = []

    region = gscript.parse_command("g.region", flags="g")
    cells = int(region["cells"])
    res = (float(region["nsres"]) + float(region["ewres"])) / 2
    north = float(region["n"])
    south = float(region["s"])
    west = float(region["w"])
    east = float(region["e"])

    if res % 1 == 0 and step % 1 == 0:
        template_string = "%d,%f\n"
    else:
        template_string = "%f,%f\n"

    if min_cells:
        target_res_cells = int(
            sqrt(((east - west) * (north - south)) / min_cells))
        if target_res > target_res_cells:
            target_res = target_res_cells
            gscript.message(
                _("Max resolution leads to less cells than defined by 'min_cells' (%d)."
                  % min_cells))
            gscript.message(_("Max resolution reduced to %d" % target_res))

    nb_iterations = target_res - res / step
    if nb_iterations < 3:
        message = _("Less than 3 iterations. Cannot determine maxima.\n")
        message += _("Please increase max_size or reduce min_cells.")
        gscript.fatal(_(message))

    gscript.use_temp_region()

    gscript.message(_("Calculating variance at different resolutions"))
    while res <= target_res:
        gscript.percent(res, target_res, step)
        gscript.run_command(
            "r.resamp.stats",
            input=input,
            output=temp_resamp_map,
            method="average",
            quiet=True,
            overwrite=True,
        )
        gscript.run_command(
            "r.neighbors",
            input=temp_resamp_map,
            method="variance",
            output=temp_variance_map,
            quiet=True,
            overwrite=True,
        )
        varianceinfo = gscript.parse_command("r.univar",
                                             map_=temp_variance_map,
                                             flags="g",
                                             quiet=True)
        resolutions.append(res)
        variances.append(float(varianceinfo["mean"]))
        res += step
        region = gscript.parse_command("g.region",
                                       res=res,
                                       n=north,
                                       s=south,
                                       w=west,
                                       e=east,
                                       flags="ag")
        cells = int(region["cells"])

    indices, differences = FindMaxima(variances)
    max_resolutions = [resolutions[x] for x in indices]
    gscript.message(_("resolution,min_diff"))
    for i in range(len(max_resolutions)):
        print("%g,%g" % (max_resolutions[i], differences[i]))

    if output:
        header = "resolution,variance\n"
        of = open(output, "w")
        of.write(header)
        for i in range(len(resolutions)):
            output_string = template_string % (resolutions[i], variances[i])
            of.write(output_string)
        of.close()

    if plot_output:
        plt.plot(resolutions, variances)
        plt.xlabel("Resolution")
        plt.ylabel("Variance")
        plt.grid(True)
        if plot_output == "-":
            plt.show()
        else:
            plt.savefig(plot_output)
def main():
    """Do the main work"""

    alias_output = options["alias_output"]

    bgr_mask = options["bgr_mask"]

    null_value = options["null_value"]

    bgr_output = options["bgr_output"]
    species_output = options["species_output"]

    alias, parameters = parse_bgr_input(
        options["alias_input"], options["env_maps"], options["alias_names"]
    )

    species_dict = parse_species_input(
        options["species_masks"], options["species_names"]
    )

    # Check if a mask file allready exists
    if RasterRow("MASK", Mapset().name).exist():
        gscript.verbose(
            _("A mask allready exists. Renaming existing mask to old_MASK...")
        )
        gscript.run_command(
            "g.rename", rast="MASK,{}_MASK".format(TMP_NAME), quiet=True
        )

    # Build parameter header if necessary
    header = ",".join(alias)

    # Write alias output if requested
    if alias_output:
        with open(alias_output, "w") as alias_out:
            for idx, name in enumerate(alias):
                alias_out.write("{},{}\n".format(name, parameters[idx]))

    # Check if specie output is requested and produce it
    if species_output and species_dict:
        # Write header to species output SWD file
        species_header = "species,X,Y,{}\n".format(header)

        with open(species_output, "w") as sp_out:
            sp_out.write(species_header)

        # Parse species input variables
        for species in species_dict:

            species_map = species_dict[species]
            # Zoom region to match specie map if requested
            if flags["z"]:
                gscript.verbose(
                    _("Zooming region to species {} temporarily.".format(species))
                )
                gscript.use_temp_region()
                gscript.run_command(
                    "g.region", align="@".join(species_map), zoom="@".join(species_map)
                )
            #
            # Apply specie mask
            gscript.run_command(
                "r.mask", raster="@".join(species_map), overwrite=True, quiet=True
            )

            # Export data using r.stats
            gscript.verbose(_("Producing output for species {}".format(species)))
            stats = gscript.pipe_command(
                "r.stats",
                flags="1gN",
                verbose=True,
                input=",".join(parameters),
                separator=",",
                null_value=null_value,
            )

            with open(species_output, "a") as sp_out:
                for row in stats.stdout:
                    sp_out.write("{},{}".format(species, gscript.decode(row)))

            # Redo zoom region to match specie map if it had been requested
            if flags["z"]:
                gscript.del_temp_region()
            # Remove mask
            gscript.run_command("r.mask", flags="r", quiet=True)

    # Write header to background output SWD file
    bgr_header = "bgr,X,Y,{}\n".format(",".join(alias))

    with open(bgr_output, "w") as bgr_out:
        bgr_out.write(bgr_header)

    # Process map data for background
    # Check if a mask file allready exists
    if bgr_mask:
        gscript.verbose(
            _("Using map {} as mask for the background landscape...".format(bgr_mask))
        )
        # Apply mask
        gscript.run_command("r.mask", raster=bgr_mask, overwrite=True, quiet=True)
    #
    # Export data using r.stats
    gscript.verbose(_("Producing output for background landscape"))
    stats = gscript.pipe_command(
        "r.stats",
        flags="1gN",
        input=",".join(parameters),
        separator=",",
        null_value=null_value,
    )

    with open(bgr_output, "a") as bgr_out:
        for row in stats.stdout:
            bgr_out.write("bgr,{}".format(gscript.decode(row)))

    cleanup()
def main():
    # Starting
    t0 = DT.now()
    print("\n   %s - Beginning process" % t0)

    # Import vector
    input_vect = options['input']
    tiff_dir = options['tiff_dir']
    output_csv = options['output_csv']
    vect_name = os.path.splitext(os.path.split(input_vect)[1])[0]

    # Make sure no invalid characters in file name
    vect_name = vect_name.replace(" ", "").replace(".", "_")
    gscript.run_command('v.import',
                        input_=input_vect,
                        output=vect_name,
                        overwrite=True)

    # Assume Geotiffs are named with tif extension (lowercase)
    tiff_list = glob(os.path.join(tiff_dir, "*.tif"))
    for i in range(len(tiff_list)):
        # ahhhha thats how you set up a loop - i is the counter
        # and i guess len is the firet dimention of tiff_list
        """
        MS:
            Exactly.
            Just to note that in python, lists are indexed from 0
            so the first element of tiff_list is tiff_list[0]
            and "range(len(a_list))" goes from 0 to (length-1)
        """

        # Import one tiff
        t = tiff_list[i]

        rast_name = os.path.splitext(os.path.split(t)[1])[0]
        # Make sure no invalid characters in file name
        rast_name = rast_name.replace(" ", "").replace(".", "_")
        rast_name.replace("-", "_")  # replace - with _
        gscript.run_command('r.import',
                            input_=t,
                            output=rast_name,
                            overwrite=True)
        # Always set the region
        gscript.run_command('g.region', rast=rast_name, flags="a")

        # Extract value for all points in vector
        # =============================================================================
        #   now extract file name procedure
        # =============================================================================
        #        col_name = "tiff_" + str(i).zfill(2)

        components = rast_name.split("_")

        # =============================================================================
        #   make the column name
        # =============================================================================
        col = (components[0], components[2])
        col_name = "_".join(col)
        # =============================================================================
        #   here is the important stuff
        # =============================================================================

        gscript.run_command('v.what.rast',
                            map_=vect_name,
                            raster=rast_name,
                            column=col_name)
        # Remove the raster
        gscript.run_command('g.remove',
                            type_="rast",
                            name=rast_name,
                            flags="f")

    # Loop complete, now export vector attrib table to CSV
    gscript.run_command('v.out.ogr',
                        input_=vect_name,
                        output=output_csv,
                        format="CSV")

    # Starting
    t1 = DT.now()
    ttl_secs = (t1 - t0).seconds
    print("\n   %s - Process complet after %d seconds" % (t0, ttl_secs))
Beispiel #47
0
def main():
    map = options["map"]
    layer = options["layer"]
    column = options["column"]
    otable = options["other_table"]
    ocolumn = options["other_column"]
    if options["subset_columns"]:
        scolumns = options["subset_columns"].split(",")
    else:
        scolumns = None

    try:
        f = grass.vector_layer_db(map, layer)
    except CalledModuleError:
        sys.exit(1)

    # Include mapset into the name, so we avoid multiple messages about
    # found in more mapsets. The following generates an error message, while the code
    # above does not. However, the above checks that the map exists, so we don't
    # check it here.
    map = grass.find_file(map, element="vector")["fullname"]

    maptable = f["table"]
    database = f["database"]
    driver = f["driver"]

    if driver == "dbf":
        grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    if not maptable:
        grass.fatal(
            _("There is no table connected to this map. Unable to join any column."
              ))

    # check if column is in map table
    if column not in grass.vector_columns(map, layer):
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (column, maptable))

    # describe other table
    all_cols_ot = grass.db_describe(otable, driver=driver,
                                    database=database)["cols"]

    # check if ocolumn is on other table
    if ocolumn not in [ocol[0] for ocol in all_cols_ot]:
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (ocolumn, otable))

    # determine columns subset from other table
    if not scolumns:
        # select all columns from other table
        cols_to_add = all_cols_ot
    else:
        cols_to_add = []
        # check if scolumns exists in the other table
        for scol in scolumns:
            found = False
            for col_ot in all_cols_ot:
                if scol == col_ot[0]:
                    found = True
                    cols_to_add.append(col_ot)
                    break
            if not found:
                grass.warning(
                    _("Column <%s> not found in table <%s>") % (scol, otable))

    all_cols_tt = grass.vector_columns(map, int(layer)).keys()
    # This is used for testing presence (and potential name conflict) with
    # the newly added columns, but the test needs to case-insensitive since it
    # is SQL, so we lowercase the names here and in the test.
    all_cols_tt = [name.lower() for name in all_cols_tt]

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in cols_to_add:
        # skip the vector column which is used for join
        colname = col[0]
        if colname == column:
            continue

        use_len = False
        if len(col) > 2:
            use_len = True
            # Sqlite 3 does not support the precision number any more
            if driver == "sqlite":
                use_len = False
            # MySQL - expect format DOUBLE PRECISION(M,D), see #2792
            elif driver == "mysql" and col[1] == "DOUBLE PRECISION":
                use_len = False

        if use_len:
            coltype = "%s(%s)" % (col[1], col[2])
        else:
            coltype = "%s" % col[1]

        colspec = "%s %s" % (colname, coltype)

        # add only the new column to the table
        if colname.lower() not in all_cols_tt:
            try:
                grass.run_command("v.db.addcolumn",
                                  map=map,
                                  columns=colspec,
                                  layer=layer)
            except CalledModuleError:
                grass.fatal(_("Error creating column <%s>") % colname)

        stmt = template.substitute(
            table=maptable,
            column=column,
            otable=otable,
            ocolumn=ocolumn,
            colname=colname,
        )
        grass.debug(stmt, 1)
        grass.verbose(
            _("Updating column <%s> of vector map <%s>...") % (colname, map))
        try:
            grass.write_command("db.execute",
                                stdin=stmt,
                                input="-",
                                database=database,
                                driver=driver)
        except CalledModuleError:
            grass.fatal(_("Error filling column <%s>") % colname)

    # write cmd history
    grass.vector_history(map)

    return 0
def main():
    """Do the main processing
    """

    # Parse input options:
    patch_map = options['input']
    patches = patch_map.split('@')[0]
    patches_mapset = patch_map.split('@')[1] if len(patch_map.split('@')) > 1 else None
    pop_proxy = options['pop_proxy']
    layer = options['layer']
    costs = options['costs']
    cutoff = float(options['cutoff'])
    border_dist = int(options['border_dist'])
    conefor_dir = options['conefor_dir']
    memory = int(options['memory'])

    # Parse output options:
    prefix = options['prefix']
    edge_map = '{}_edges'.format(prefix)
    vertex_map = '{}_vertices'.format(prefix)
    shortest_paths = '{}_shortest_paths'.format(prefix)

    # Parse flags:
    p_flag = flags['p']
    t_flag = flags['t']
    r_flag = flags['r']

    dist_flags = 'kn' if flags['k'] else 'n'

    lin_cat = 1
    zero_dist = None

    folder = grass.tempdir()
    if not os.path.exists(folder):
        os.makedirs(folder)

    # Setup counter for progress message
    counter = 0

    # Check if location is lat/lon (only in lat/lon geodesic distance
    # measuring is supported)
    if grass.locn_is_latlong():
        grass.verbose("Location is lat/lon: Geodesic distance \
                      measure is used")

    # Check if prefix is legal GRASS name
    if not grass.legal_name(prefix):
        grass.fatal('{} is not a legal name for GRASS \
                    maps.'.format(prefix))

    if prefix[0].isdigit():
        grass.fatal('Tables names starting with a digit are not SQL \
                    compliant.'.format(prefix))

    # Check if output maps not already exists or could be overwritten
    for output in [edge_map, vertex_map, shortest_paths]:
        if grass.db.db_table_exist(output) and not grass.overwrite():
            grass.fatal('Vector map <{}> already exists'.format(output))

    # Check if input has required attributes
    in_db_connection = grass.vector.vector_db(patch_map)
    if not int(layer) in in_db_connection.keys():
        grass.fatal('No attribute table connected vector map {} at \
                    layer {}.'.format(patches, layer))

    #Check if cat column exists
    pcols = grass.vector.vector_columns(patch_map, layer=layer)

    #Check if cat column exists
    if 'cat' not in pcols.keys():
        grass.fatal('Cannot find the reqired column cat in vector map \
                    {}.'.format(patches))

    #Check if pop_proxy column exists
    if pop_proxy not in pcols.keys():
        grass.fatal('Cannot find column {} in vector map \
                    {}'.format(pop_proxy, patches))

    #Check if pop_proxy column is numeric type
    if not pcols[pop_proxy]['type'] in ['INTEGER', 'REAL',
                                        'DOUBLE PRECISION']:
        grass.fatal('Column {} is of type {}. Only numeric types \
                    (integer or double precision) \
                    allowed!'.format(pop_proxy,
                                     pcols[pop_proxy]['type']))

    #Check if pop_proxy column does not contain values <= 0
    pop_vals = np.fromstring(grass.read_command('v.db.select',
                                                flags='c',
                                                map=patches,
                                                columns=pop_proxy,
                                                nv=-9999
                                                ).rstrip('\n'),
                             dtype=float, sep='\n')

    if np.min(pop_vals) <= 0:
        grass.fatal('Column {} contains values <= 0 or NULL. Neither \
                    values <= 0 nor NULL allowed!}'.format(pop_proxy))

    ##############################################
    # Use pygrass region instead of grass.parse_command !?!
    start_reg = grass.parse_command('g.region', flags='ugp')

    max_n = start_reg['n']
    min_s = start_reg['s']
    max_e = start_reg['e']
    min_w = start_reg['w']
    # cost_nsres = reg['nsres']
    # cost_ewres = reg['ewres']

    # Rasterize patches
    # http://www.gdal.org/gdal_tutorial.html
    # http://geoinformaticstutorial.blogspot.no/2012/11/convert-
    # shapefile-to-raster-with-gdal.html
    if t_flag:
        # Rasterize patches with "all-touched" mode using GDAL
        # Read region-settings (not needed canuse max_n, min_s, max_e,
        # min_w nsres, ewres...
        prast = os.path.join(folder, 'patches_rast.tif')

        # Check if GDAL-GRASS plugin is installed
        if ogr.GetDriverByName('GRASS'):
            #With GDAL-GRASS plugin
            #Locate file for patch vector map
            pfile = grass.parse_command('g.findfile', element='vector',
                                        file=patches,
                                        mapset=patches_mapset)['file']
            pfile = os.path.join(pfile, 'head')

        else:
            # Without GDAL-GRASS-plugin
            grass.warning("Cannot find GDAL-GRASS plugin. Consider \
                          installing it in order to save time for \
                          all-touched rasterisation")
            pfile = os.path.join(folder, 'patches_vect.gpkg')
            # Export patch vector map to temp-file in a GDAL-readable
            # format (shp)
            grass.run_command('v.out.ogr', flags='m', quiet=True,
                              input=patch_map, type='area',
                              layer=layer, output=pfile,
                              lco='GEOMETRY_NAME=geom')

        # Rasterize vector map with all-touched option
        os.system('gdal_rasterize -l {} -at -tr {} {} \
                  -te {} {} {} {} -ot Uint32 -a cat \
                  {} {} -q'.format(patches, start_reg['ewres'],
                                   start_reg['nsres'],
                                   start_reg['w'],
                                   start_reg['s'],
                                   start_reg['e'],
                                   start_reg['n'],
                                   pfile,
                                   prast))

        if not ogr.GetDriverByName('GRASS'):
            # Remove vector temp-file
            os.remove(os.path.join(folder, 'patches_vect.gpkg'))

        # Import rasterized patches
        grass.run_command('r.external', flags='o',
                          quiet=True,
                          input=prast,
                          output='{}_patches_pol'.format(TMP_PREFIX))

    else:
        # Simple rasterisation (only area)
        # in G 7.6 also with support for 'centroid'
        if float(grass.version()['version'][:3]) >= 7.6:
            conv_types = ['area', 'centroid']
        else:
            conv_types = ['area']
        grass.run_command('v.to.rast', quiet=True,
                          input=patches, use='cat',
                          type=conv_types,
                          output='{}_patches_pol'.format(TMP_PREFIX))

    # Extract boundaries from patch raster map
    grass.run_command('r.mapcalc', expression='{p}_patches_boundary=if(\
    {p}_patches_pol,\
    if((\
    (isnull({p}_patches_pol[-1,0])||| \
    {p}_patches_pol[-1,0]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[0,1])||| \
    {p}_patches_pol[0,1]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[1,0])||| \
    {p}_patches_pol[1,0]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[0,-1])||| \
    {p}_patches_pol[0,-1]!={p}_patches_pol)), \
    {p}_patches_pol,null()), null())'.format(p=TMP_PREFIX), quiet=True)

    rasterized_cats = grass.read_command('r.category', separator='newline',
                                         map='{p}_patches_boundary'.format(p=TMP_PREFIX)
                                         ).replace('\t','').strip('\n')
    rasterized_cats = list(map(int, set([x for x in rasterized_cats.split('\n')  if x != ''])))

    #Init output vector maps if they are requested by user
    network = VectorTopo(edge_map)
    network_columns = [(u'cat', 'INTEGER PRIMARY KEY'),
                       (u'from_p', 'INTEGER'),
                       (u'to_p', 'INTEGER'),
                       (u'min_dist', 'DOUBLE PRECISION'),
                       (u'dist', 'DOUBLE PRECISION'),
                       (u'max_dist', 'DOUBLE PRECISION')]
    network.open('w',
                 tab_name=edge_map,
                 tab_cols=network_columns)

    vertex = VectorTopo(vertex_map)
    vertex_columns = [(u'cat', 'INTEGER PRIMARY KEY'),
                      (pop_proxy, 'DOUBLE PRECISION'),]
    vertex.open('w',
                tab_name=vertex_map,
                tab_cols=vertex_columns)

    if p_flag:
        # Init cost paths file for start-patch
        grass.run_command('v.edit', quiet=True, map=shortest_paths,
                          tool='create')
        grass.run_command('v.db.addtable', quiet=True,
                          map=shortest_paths,
                          columns="cat integer,\
                                   from_p integer,\
                                   to_p integer,\
                                   dist_min double precision,\
                                   dist double precision,\
                                   dist_max double precision")

    start_region_bbox = Bbox(north=float(max_n), south=float(min_s),
                             east=float(max_e), west=float(min_w))
    vpatches = VectorTopo(patches, mapset=patches_mapset)
    vpatches.open('r', layer=int(layer))

    ###Loop through patches
    vpatch_ids = np.array(vpatches.features_to_wkb_list(feature_type="centroid",
                                                        bbox=start_region_bbox),
                          dtype=[('vid', 'uint32'),
                                 ('cat', 'uint32'),
                                 ('geom', '|S10')])
    cats = set(vpatch_ids['cat'])
    n_cats = len(cats)
    if n_cats < len(vpatch_ids['cat']):
        grass.verbose('At least one MultiPolygon found in patch map.\n \
                      Using average coordinates of the centroids for \
                      visual representation of the patch.')

    for cat in cats:
        if cat not in rasterized_cats:
            grass.warning('Patch {} has not been rasterized and will \
                          therefore not be treated as part of the \
                          network. Consider using t-flag or change \
                          resolution.'.format(cat))

            continue
        grass.verbose("Calculating connectivity-distances for patch \
                      number {}".format(cat))

        # Filter
        from_vpatch = vpatch_ids[vpatch_ids['cat'] == cat]

        # Get patch ID
        if from_vpatch['vid'].size == 1:
            from_centroid = Centroid(v_id=int(from_vpatch['vid']),
                                     c_mapinfo=vpatches.c_mapinfo)
            from_x = from_centroid.x
            from_y = from_centroid.y

            # Get centroid
            if not from_centroid:
                continue
        else:
            xcoords = []
            ycoords = []
            for f_p in from_vpatch['vid']:
                from_centroid = Centroid(v_id=int(f_p),
                                         c_mapinfo=vpatches.c_mapinfo)
                xcoords.append(from_centroid.x)
                ycoords.append(from_centroid.y)

                # Get centroid
                if not from_centroid:
                    continue
            from_x = np.average(xcoords)
            from_y = np.average(ycoords)

        # Get BoundingBox
        from_bbox = grass.parse_command('v.db.select', map=patch_map,
                                        flags='r',
                                        where='cat={}'.format(cat))

        attr_filter = vpatches.table.filters.select(pop_proxy)
        attr_filter = attr_filter.where("cat={}".format(cat))
        proxy_val = vpatches.table.execute().fetchone()

        # Prepare start patch
        start_patch = '{}_patch_{}'.format(TMP_PREFIX, cat)
        reclass_rule = grass.encode('{} = 1\n* = NULL'.format(cat))
        recl = grass.feed_command('r.reclass', quiet=True,
                                  input='{}_patches_boundary'.format(TMP_PREFIX),
                                  output=start_patch,
                                  rules='-')
        recl.stdin.write(reclass_rule)
        recl.stdin.close()
        recl.wait()

        # Check if patch was rasterised (patches smaller raster resolution and close to larger patches may not be rasterised)
        #start_check = grass.parse_command('r.info', flags='r', map=start_patch)
        #start_check = grass.parse_command('r.univar', flags='g', map=start_patch)
        #print(start_check)
        """if start_check['min'] != '1':
            grass.warning('Patch {} has not been rasterized and will \
                          therefore not be treated as part of the \
                          network. Consider using t-flag or change \
                          resolution.'.format(cat))

            grass.run_command('g.remove', flags='f', vector=start_patch,
                              raster=start_patch, quiet=True)
            grass.del_temp_region()
            continue"""

        # Prepare stop patches
        ############################################
        reg = grass.parse_command('g.region', flags='ug', quiet=True,
                                  raster=start_patch,
                                  n=float(from_bbox['n']) + float(cutoff),
                                  s=float(from_bbox['s']) - float(cutoff),
                                  e=float(from_bbox['e']) + float(cutoff),
                                  w=float(from_bbox['w']) - float(cutoff),
                                  align='{}_patches_pol'.format(TMP_PREFIX))

        north = reg['n'] if max_n > reg['n'] else max_n
        south = reg['s'] if min_s < reg['s'] else min_s
        east = reg['e'] if max_e < reg['e'] else max_e
        west = reg['w'] if min_w > reg['w'] else min_w

        # Set region to patch search radius
        grass.use_temp_region()
        grass.run_command('g.region', quiet=True,
                          n=north, s=south, e=east, w=west,
                          align='{}_patches_pol'.format(TMP_PREFIX))

        # Create buffer around start-patch as a mask
        # for cost distance analysis
        grass.run_command('r.buffer', quiet=True,
                          input=start_patch,
                          output='MASK', distances=cutoff)
        grass.run_command('r.mapcalc', quiet=True,
                          expression='{pf}_patch_{p}_neighbours_contur=\
                                     if({pf}_patches_boundary=={p},\
                                     null(),\
                                     {pf}_patches_boundary)'.format(pf=TMP_PREFIX, p=cat))
        grass.run_command('r.mask', flags='r', quiet=True)

        # Calculate cost distance
        cost_distance_map = '{}_patch_{}_cost_dist'.format(prefix, cat)
        grass.run_command('r.cost', flags=dist_flags, quiet=True,
                          overwrite=True, input=costs,
                          output=cost_distance_map,
                          start_rast=start_patch, memory=memory)

        #grass.run_command('g.region', flags='up')
        # grass.raster.raster_history(cost_distance_map)
        cdhist = History(cost_distance_map)
        cdhist.clear()
        cdhist.creator = os.environ['USER']
        cdhist.write()
        # History object cannot modify description
        grass.run_command('r.support',
                          map=cost_distance_map,
                          description='Generated by r.connectivity.distance',
                          history=os.environ['CMDLINE'])


        # Export distance at boundaries
        maps = '{0}_patch_{1}_neighbours_contur,{2}_patch_{1}_cost_dist'
        maps = maps.format(TMP_PREFIX, cat, prefix),

        connections = grass.encode(grass.read_command('r.stats',
                                                          flags='1ng',
                                                          quiet=True,
                                                          input=maps,
                                                          separator=';').rstrip('\n'))
        if connections:
            con_array = np.genfromtxt(BytesIO(connections), delimiter=';',
                                      dtype=None,
                                      names=['x', 'y', 'cat', 'dist'])
        else:
            grass.warning('No connections for patch {}'.format(cat))

            # Write centroid to vertex map
            vertex.write(Point(from_x, from_y),
                         cat=int(cat),
                         attrs=proxy_val)
            vertex.table.conn.commit()

            # Remove temporary map data
            grass.run_command('g.remove', quiet=True, flags='f',
                              type=['raster', 'vector'],
                              pattern="{}*{}*".format(TMP_PREFIX, cat))
            grass.del_temp_region()
            continue

        #Find closest points on neigbour patches
        to_cats = set(np.atleast_1d(con_array['cat']))
        to_coords = []
        for to_cat in to_cats:
            connection = con_array[con_array['cat'] == to_cat]
            connection.sort(order=['dist'])
            pixel = border_dist if len(connection) > border_dist else len(connection) - 1
            # closest_points_x = connection['x'][pixel]
            # closest_points_y = connection['y'][pixel]
            closest_points_to_cat = to_cat
            closest_points_min_dist = connection['dist'][0]
            closest_points_dist = connection['dist'][pixel]
            closest_points_max_dist = connection['dist'][-1]
            to_patch_ids = vpatch_ids[vpatch_ids['cat'] == int(to_cat)]['vid']

            if len(to_patch_ids) == 1:
                to_centroid = Centroid(v_id=to_patch_ids,
                                       c_mapinfo=vpatches.c_mapinfo)
                to_x = to_centroid.x
                to_y = to_centroid.y
            elif len(to_patch_ids) >= 1:
                xcoords = []
                ycoords = []
                for t_p in to_patch_ids:
                    to_centroid = Centroid(v_id=int(t_p),
                                             c_mapinfo=vpatches.c_mapinfo)
                    xcoords.append(to_centroid.x)
                    ycoords.append(to_centroid.y)

                    # Get centroid
                    if not to_centroid:
                        continue
                to_x = np.average(xcoords)
                to_y = np.average(ycoords)

            to_coords.append('{},{},{},{},{},{}'.format(connection['x'][0],
                                                  connection['y'][0],
                                                  to_cat,
                                                  closest_points_min_dist,
                                                  closest_points_dist,
                                                  closest_points_max_dist
                                                        ))

            #Save edges to network dataset
            if closest_points_dist <= 0:
                zero_dist = 1

            # Write data to network
            network.write(Line([(from_x, from_y),
                                (to_x, to_y)]),
                          cat=lin_cat,
                          attrs=(cat,
                                 int(closest_points_to_cat),
                                 closest_points_min_dist,
                                 closest_points_dist,
                                 closest_points_max_dist,))
            network.table.conn.commit()

            lin_cat = lin_cat + 1

        # Save closest points and shortest paths through cost raster as
        # vector map (r.drain limited to 1024 points) if requested
        if p_flag:
            grass.verbose('Extracting shortest paths for patch number \
                          {}...'.format(cat))

            points_n = len(to_cats)

            tiles = int(points_n / 1024.0)
            rest = points_n % 1024
            if not rest == 0:
                tiles = tiles + 1

            tile_n = 0
            while tile_n < tiles:
                tile_n = tile_n + 1
                #Import closest points for start-patch in 1000er blocks
                sp = grass.feed_command('v.in.ascii', flags='nr',
                                  overwrite=True, quiet=True,
                                  input='-', stderr=subprocess.PIPE,
                                  output="{}_{}_cp".format(TMP_PREFIX,
                                                           cat),
                                  separator=",",
                                  columns="x double precision,\
                                           y double precision,\
                                           to_p integer,\
                                           dist_min double precision,\
                                           dist double precision,\
                                           dist_max double precision")
                sp.stdin.write(grass.encode("\n".join(to_coords)))
                sp.stdin.close()
                sp.wait()

                # Extract shortest paths for start-patch in chunks of
                # 1024 points
                cost_paths = "{}_{}_cost_paths".format(TMP_PREFIX, cat)
                start_points = "{}_{}_cp".format(TMP_PREFIX, cat)
                grass.run_command('r.drain', overwrite=True, quiet=True,
                                  input=cost_distance_map,
                                  output=cost_paths,
                                  drain=cost_paths,
                                  start_points=start_points)

                grass.run_command('v.db.addtable',
                                  map=cost_paths,
                                  quiet=True,
                                  columns="cat integer,\
                                   from_p integer,\
                                   to_p integer,\
                                   dist_min double precision,\
                                   dist double precision,\
                                   dist_max double precision")
                grass.run_command('v.db.update', map=cost_paths,
                                  column='from_p', value=cat,
                                  quiet=True)
                grass.run_command('v.distance', quiet=True,
                                  from_=cost_paths,
                                  to=start_points,
                                  upload='to_attr',
                                  column='to_p',
                                  to_column='to_p')
                grass.run_command('v.db.join', quiet=True,
                                  map=cost_paths,
                                  column='to_p', other_column='to_p',
                                  other_table=start_points,
                                  subset_columns='dist_min,dist,dist_max')

                #grass.run_command('v.info', flags='c',
                #                  map=cost_paths)
                grass.run_command('v.patch', flags='ae', overwrite=True,
                                  quiet=True,
                                  input=cost_paths,
                                  output=shortest_paths)

                # Remove temporary map data
                grass.run_command('g.remove', quiet=True, flags='f',
                                  type=['raster', 'vector'],
                                  pattern="{}*{}*".format(TMP_PREFIX,
                                                          cat))

        # Remove temporary map data for patch
        if r_flag:
            grass.run_command('g.remove', flags='f', type='raster',
                              name=cost_distance_map,
                              quiet=True)

        vertex.write(Point(from_x, from_y),
                     cat=int(cat),
                     attrs=proxy_val)

        vertex.table.conn.commit()

        # Print progress message
        grass.percent(i=int((float(counter) / n_cats) * 100),
                      n=100,
                      s=3)

        # Update counter for progress message
        counter = counter + 1

    if zero_dist:
        grass.warning('Some patches are directly adjacent to others. \
                       Minimum distance set to 0.0000000001')

    # Close vector maps and build topology
    network.close()
    vertex.close()

    # Add vertex attributes
    # grass.run_command('v.db.addtable', map=vertex_map)
    # grass.run_command('v.db.join', map=vertex_map, column='cat',
    #                   other_table=in_db_connection[int(layer)]['table'],
    #                   other_column='cat', subset_columns=pop_proxy,
    #                   quiet=True)

    # Add history and meta data to produced maps
    grass.run_command('v.support', flags='h', map=edge_map,
                      person=os.environ['USER'],
                      cmdhist=os.environ['CMDLINE'])

    grass.run_command('v.support', flags='h', map=vertex_map,
                      person=os.environ['USER'],
                      cmdhist=os.environ['CMDLINE'])

    if p_flag:
        grass.run_command('v.support', flags='h', map=shortest_paths,
                          person=os.environ['USER'],
                          cmdhist=os.environ['CMDLINE'])

    # Output also Conefor files if requested
    if conefor_dir:
        query = """SELECT p_from, p_to, avg(dist) FROM
                 (SELECT
                 CASE
                 WHEN from_p > to_p THEN to_p
                 ELSE from_p END AS p_from,
                    CASE
                 WHEN from_p > to_p THEN from_p
                 ELSE to_p END AS p_to,
                 dist
                 FROM {}) AS x
                 GROUP BY p_from, p_to""".format(edge_map)
        with open(os.path.join(conefor_dir,
                               'undirected_connection_file'),
                  'w') as edges:
            edges.write(grass.read_command('db.select', sql=query,
                                           separator=' '))
        with open(os.path.join(conefor_dir,
                               'directed_connection_file'),
                  'w') as edges:
            edges.write(grass.read_command('v.db.select', map=edge_map,
                                           separator=' ', flags='c'))
        with open(os.path.join(conefor_dir, 'node_file'), 'w') as nodes:
            nodes.write(grass.read_command('v.db.select',
                                           map=vertex_map,
                                           separator=' ', flags='c'))
Beispiel #49
0
def main():

    #--------------------------------------------------------------------------
    # Variables
    #--------------------------------------------------------------------------

    # Layers
    grass.info("Preparing...")
    OUT = options['output']
    IN = options['input']
    IN = IN.split(',')
    CheckLayer(IN)

    # Diversity indici
    flag_r = flags['r']
    flag_s = flags['s']
    flag_h = flags['h']
    flag_e = flags['e']
    flag_p = flags['p']
    flag_g = flags['g']
    flag_n = flags['n']
    flag_t = flags['t']
    if options['alpha']:
        Qtmp = map(float, options['alpha'].split(','))
    else:
        Qtmp = map(float, [])
    Q = list(Qtmp)
    Qoriginal = Q

    #--------------------------------------------------------------------------
    # Create list of what need to be computed
    #--------------------------------------------------------------------------
    if not flag_r:
        flag_r = []
    if flag_s and 0.0 not in Q:
        Q.append(0.0)
    if flag_h and 1.0 not in Q:
        Q.append(1.0)
    if flag_e and 0.0 not in Q:
        Q.append(0.0)
    if flag_e and 1.0 not in Q:
        Q.append(1.0)
    if flag_p and 2.0 not in Q:
        Q.append(2.0)
    if flag_g and 2.0 not in Q:
        Q.append(2.0)
    if flag_n and 1.0 not in Q:
        Q.append(1.0)

    #--------------------------------------------------------------------------
    # Renyi entropy
    #--------------------------------------------------------------------------
    tmp_1 = tmpname("sht")
    clean_rast.add(tmp_1)
    grass.info(
        "Computing the sum across all input layers (this may take a while)")
    grass.run_command("r.series",
                      quiet=True,
                      output=tmp_1,
                      input=IN,
                      method="sum")

    # Create mask for all areas with sum=0, incorporate existing mask
    replmask = replacemask(inmap=tmp_1)

    for n in range(len(Q)):
        grass.info(_("Computing alpha = {n}").format(n=Q[n]))
        Qn = str(Q[n])
        Qn = Qn.replace('.', '_')
        renyi = OUT + "_Renyi_" + Qn
        if Q[n] == 1:
            # If alpha = 1
            # TODO See email 14-01-16 about avoiding loop below
            grass.mapcalc("$renyi = 0", renyi=renyi, quiet=True)
            for i in range(len(IN)):
                grass.info(
                    _("Computing map {j} from {n} maps").format(j=i + 1,
                                                                n=len(IN)))
                tmp_2 = tmpname("sht")
                clean_rast.add(tmp_2)
                grass.mapcalc(
                    "$tmp_2 = if($inl == 0, $renyi, $renyi - (($inl/$tmp_1) * log(($inl/$tmp_1))))",
                    renyi=renyi,
                    tmp_2=tmp_2,
                    inl=IN[i],
                    tmp_1=tmp_1,
                    quiet=True)
                grass.run_command("g.rename",
                                  raster="{0},{1}".format(tmp_2, renyi),
                                  overwrite=True,
                                  quiet=True)
        else:
            # If alpha != 1
            tmp_3 = tmpname("sht")
            clean_rast.add(tmp_3)
            tmp_4 = tmpname("sht")
            clean_rast.add(tmp_4)
            grass.mapcalc("$tmp_3 = 0", tmp_3=tmp_3, quiet=True)
            for i in range(len(IN)):
                grass.info(
                    _("Computing map {j} from {n} maps").format(j=i + 1,
                                                                n=len(IN)))
                grass.mapcalc(
                    "$tmp_4 = if($inl == 0, $tmp_3, $tmp_3 + (pow($inl/$tmp_1,$alpha)))",
                    tmp_3=tmp_3,
                    tmp_4=tmp_4,
                    tmp_1=tmp_1,
                    inl=IN[i],
                    alpha=Q[n],
                    quiet=True)
                grass.run_command("g.rename",
                                  raster="{0},{1}".format(tmp_4, tmp_3),
                                  overwrite=True,
                                  quiet=True)
            grass.mapcalc("$outl = (1/(1-$alpha)) * log($tmp_3)",
                          outl=renyi,
                          tmp_3=tmp_3,
                          alpha=Q[n],
                          quiet=True)
            grass.run_command("g.remove",
                              type="raster",
                              name=tmp_3,
                              flags="f",
                              quiet=True)

    #--------------------------------------------------------------------------
    # Species richness, add 0 for areas with no observations
    #--------------------------------------------------------------------------

    # Remove mask  (or restore if there was one)
    if replmask == 'nomask':
        grass.run_command("r.mask", flags="r", quiet=True)
    elif replmask is not 'samemask':
        grass.run_command("g.rename", raster=(replmask, 'MASK1'), quiet=True)
    if flag_s:
        grass.info("Computing species richness map")
        out_div = OUT + "_richness"
        in_div = OUT + "_Renyi_0_0"
        grass.mapcalc("$out_div = if($tmp_1==0,0,exp($in_div))",
                      out_div=out_div,
                      in_div=in_div,
                      tmp_1=tmp_1,
                      quiet=True)
        if 0.0 not in Qoriginal and not flag_e:
            grass.run_command("g.remove",
                              flags="f",
                              type="raster",
                              name=in_div,
                              quiet=True)

    # Create mask for all areas with sum=0, incorporate existing mask
    replmask = replacemask(inmap=tmp_1)

    #--------------------------------------------------------------------------
    # Shannon index
    #--------------------------------------------------------------------------
    if flag_h:
        grass.info("Computing Shannon index map")
        out_div = OUT + "_shannon"
        in_div = OUT + "_Renyi_1_0"
        if 1.0 in Qoriginal or flag_e or flag_n:
            grass.run_command("g.copy", raster=(in_div, out_div), quiet=True)
        else:
            grass.run_command("g.rename", raster=(in_div, out_div), quiet=True)

    #--------------------------------------------------------------------------
    # Shannon Effective Number of Species (ENS)
    #--------------------------------------------------------------------------
    if flag_n:
        grass.info("Computing ENS map")
        out_div = OUT + "_ens"
        in_div = OUT + "_Renyi_1_0"
        grass.mapcalc("$out_div = exp($in_div)",
                      out_div=out_div,
                      in_div=in_div,
                      quiet=True)
        if 1.0 not in Qoriginal and not flag_e:
            grass.run_command("g.remove",
                              flags="f",
                              type="raster",
                              name=in_div,
                              quiet=True)

    #--------------------------------------------------------------------------
    # Eveness
    #--------------------------------------------------------------------------
    if flag_e:
        grass.info("Computing Eveness map")
        out_div = OUT + "_eveness"
        in_div1 = OUT + "_Renyi_0_0"
        in_div2 = OUT + "_Renyi_1_0"
        grass.mapcalc("$out_div = $in_div2 / $in_div1",
                      out_div=out_div,
                      in_div1=in_div1,
                      in_div2=in_div2,
                      quiet=True)
        if 0.0 not in Qoriginal:
            grass.run_command("g.remove",
                              flags="f",
                              type="raster",
                              name=in_div1,
                              quiet=True)
        if 1.0 not in Qoriginal:
            grass.run_command("g.remove",
                              flags="f",
                              type="raster",
                              name=in_div2,
                              quiet=True)
    #--------------------------------------------------------------------------
    # Inversed Simpson index
    #--------------------------------------------------------------------------
    if flag_p:
        grass.info("Computing inverse simpson map")
        out_div = OUT + "_invsimpson"
        in_div = OUT + "_Renyi_2_0"
        grass.mapcalc("$out_div = exp($in_div)",
                      out_div=out_div,
                      in_div=in_div,
                      quiet=True)
        if 2.0 not in Qoriginal and not flag_g:
            grass.run_command("g.remove",
                              flags="f",
                              type="raster",
                              name=in_div,
                              quiet=True)

    #--------------------------------------------------------------------------
    # Gini Simpson index
    #--------------------------------------------------------------------------
    if flag_g:
        grass.info("Computing Gini simpson map")
        out_div = OUT + "_ginisimpson"
        in_div = OUT + "_Renyi_2_0"
        grass.mapcalc("$out_div = 1.0 - (1.0 / exp($in_div))",
                      out_div=out_div,
                      in_div=in_div,
                      quiet=True)
        if 2.0 not in Qoriginal:
            grass.run_command("g.remove",
                              flags="f",
                              type="raster",
                              name=in_div,
                              quiet=True)

    #--------------------------------------------------------------------------
    # Remove mask  (or restore if there was one)
    #--------------------------------------------------------------------------
    if replmask == 'nomask':
        grass.run_command("r.mask", flags="r", quiet=True)
    elif replmask is not 'samemask':
        grass.run_command("g.rename", raster=(replmask, 'MASK1'), quiet=True)

    #--------------------------------------------------------------------------
    # Total count (base unit, like individuals)
    #--------------------------------------------------------------------------
    if flag_t:
        rast = OUT + "_count"
        grass.run_command("g.rename", raster=(tmp_1, rast), quiet=True)
    else:
        grass.run_command("g.remove",
                          type="raster",
                          name=tmp_1,
                          flags="f",
                          quiet=True)
Beispiel #50
0
def main():

    # Temporary map names
    global tmp, t, mapset
    tmp = {}
    mapset = gscript.gisenv()["MAPSET"]
    mapset2 = "@{}".format(mapset)
    processid = os.getpid()
    processid = str(processid)
    tmp["shadow_temp"] = "shadow_temp" + processid
    tmp["cloud_v"] = "cloud_v_" + processid
    tmp["shadow_temp_v"] = "shadow_temp_v_" + processid
    tmp["shadow_temp_mask"] = "shadow_temp_mask_" + processid
    tmp["centroid"] = "centroid_" + processid
    tmp["dissolve"] = "dissolve_" + processid
    tmp["delcat"] = "delcat_" + processid
    tmp["addcat"] = "addcat_" + processid
    tmp["cl_shift"] = "cl_shift_" + processid
    tmp["overlay"] = "overlay_" + processid

    # Check temporary map names are not existing maps
    for key, value in tmp.items():
        if gscript.find_file(value, element="vector", mapset=mapset)["file"]:
            gscript.fatal(
                ("Temporary vector map <{}> already exists.").format(value))
        if gscript.find_file(value, element="cell", mapset=mapset)["file"]:
            gscript.fatal(
                ("Temporary raster map <{}> already exists.").format(value))

    # Input files
    mtd_file = options["mtd_file"]
    metadata_file = options["metadata"]
    bands = {}
    error_msg = "Syntax error in the txt file. See the manual for further information about the right syntax."
    if options["input_file"] == "":
        bands["blue"] = options["blue"]
        bands["green"] = options["green"]
        bands["red"] = options["red"]
        bands["nir"] = options["nir"]
        bands["nir8a"] = options["nir8a"]
        bands["swir11"] = options["swir11"]
        bands["swir12"] = options["swir12"]
    else:
        txt_bands = []
        with open(options["input_file"], "r") as input_file:
            for line in input_file:
                a = line.split("=")
                if len(a) != 2:
                    gscript.fatal(error_msg)
                elif a[0] == "MTD_TL.xml" and not mtd_file:
                    mtd_file = a[1].strip()
                elif a[0] == "metadata" and not metadata_file:
                    metadata_file = a[1].strip()
                elif a[0] in [
                        "blue",
                        "green",
                        "red",
                        "nir",
                        "nir8a",
                        "swir11",
                        "swir12",
                ]:
                    txt_bands.append(a[0])
                    bands[a[0]] = a[1].strip()
            if len(txt_bands) < 7:
                gscript.fatal((
                    "One or more bands are missing in the input text file.\n Only these bands have been found: {}"
                ).format(txt_bands))
            if mtd_file and metadata_file != "default":
                gscript.fatal((
                    "Metadata json file and mtd_file are both given as input text files.\n Only one of these should be specified."
                ))

    # we want cloud and shadows: check input and output for shadow mask
    if not flags["c"]:
        if mtd_file != "":
            if not os.path.isfile(mtd_file):
                gscript.fatal(
                    "Metadata file <{}> not found. Please select the right .xml file"
                    .format(mtd_file))
        elif metadata_file == "default":
            # use default json
            env = gscript.gisenv()
            json_standard_folder = os.path.join(env["GISDBASE"],
                                                env["LOCATION_NAME"],
                                                env["MAPSET"], "cell_misc")
            for key, value in bands.items():
                metadata_file = os.path.join(json_standard_folder, value,
                                             "description.json")
                if os.path.isfile(metadata_file):
                    break
                else:
                    metadata_file = None
            if not metadata_file:
                gscript.fatal(
                    "No default metadata files found. Did you use -j in i.sentinel.import?"
                )
        elif metadata_file:
            if not os.path.isfile(metadata_file):
                gscript.fatal(
                    "Metadata file <{}> not found. Please select the right file"
                    .format(metadata_file))
        else:
            gscript.fatal(
                "Metadata (file) is required for shadow mask computation. Please specify it"
            )

    d = "double"
    f_bands = {}
    scale_fac = options["scale_fac"]
    cloud_threshold = options["cloud_threshold"]
    shadow_threshold = options["shadow_threshold"]
    raster_max = {}
    check_cloud = 1  # by default the procedure finds clouds
    check_shadow = 1  # by default the procedure finds shadows

    if options["cloud_raster"]:
        cloud_raster = options["cloud_raster"]
    else:
        tmp["cloud_def"] = "cloud_def" + processid
        cloud_raster = tmp["cloud_def"]
    if options["cloud_mask"]:
        cloud_mask = options["cloud_mask"]
        if "." in options["cloud_mask"]:
            gscript.fatal("Name for cloud_mask output \
                           is not SQL compliant".format(options["cloud_mask"]))
    else:
        tmp["cloud_mask"] = "cloud_mask" + processid
        cloud_mask = tmp["cloud_mask"]
    if options["shadow_mask"]:
        shadow_mask = options["shadow_mask"]
        if "." in options["shadow_mask"]:
            gscript.fatal("Name for shadow_mask output \
                           is not SQL compliant".format(
                options["shadow_mask"]))
    else:
        tmp["shadow_mask"] = "shadow_mask" + processid
        shadow_mask = tmp["shadow_mask"]
    shadow_raster = options["shadow_raster"]

    # Check if all required input bands are specified in the text file
    if (bands["blue"] == "" or bands["green"] == "" or bands["red"] == ""
            or bands["nir"] == "" or bands["nir8a"] == ""
            or bands["swir11"] == "" or bands["swir12"] == ""):
        gscript.fatal(
            "All input bands (blue, green, red, nir, nir8a, swir11, swir12) are required"
        )

    # Check if input bands exist
    for key, value in bands.items():
        if not gscript.find_file(value, element="cell", mapset=mapset)["file"]:
            gscript.fatal(("Raster map <{}> not found.").format(value))

    if flags["r"]:
        gscript.use_temp_region()
        gscript.run_command("g.region", rast=bands.values(), flags="a")
        gscript.message(
            _("--- The computational region has been temporarily set to image max extent ---"
              ))
    else:
        gscript.warning(
            _("All subsequent operations will be limited to the current computational region"
              ))

    if flags["s"]:
        gscript.message(_("--- Start rescaling bands ---"))
        check_b = 0
        for key, b in bands.items():
            gscript.message(b)
            b = gscript.find_file(b, element="cell")["name"]
            tmp["band_double{}".format(check_b)] = "{}_{}".format(b, d)
            band_double = tmp["band_double{}".format(check_b)]
            gscript.mapcalc("{r} = 1.0 * ({b})/{scale_fac}".format(
                r=(band_double), b=b, scale_fac=scale_fac))
            f_bands[key] = band_double
            check_b += 1
        gscript.message(f_bands.values())
        gscript.message(_("--- All bands have been rescaled ---"))
    else:
        gscript.warning(_("No rescale factor has been applied"))
        for key, b in bands.items():
            if (gscript.raster_info(b)["datatype"] != "DCELL"
                    and gscript.raster_info(b)["datatype"] != "FCELL"):
                gscript.fatal("Raster maps must be DCELL o FCELL")
            else:
                f_bands = bands

    gscript.message(_("--- Start computing maximum values of bands ---"))
    for key, fb in f_bands.items():
        gscript.message(fb)
        stats = gscript.parse_command("r.univar", flags="g", map=fb)
        raster_max[key] = float(stats["max"])
    gscript.message("--- Computed maximum value: {} ---".format(
        raster_max.values()))
    gscript.message(_("--- Statistics have been computed! ---"))

    # Start of Clouds detection  (some rules from litterature)
    gscript.message(_("--- Start clouds detection procedure ---"))
    gscript.message(_("--- Computing cloud mask... ---"))
    first_rule = "(({} > (0.08*{})) && ({} > (0.08*{})) && ({} > (0.08*{})))".format(
        f_bands["blue"],
        raster_max["blue"],
        f_bands["green"],
        raster_max["green"],
        f_bands["red"],
        raster_max["red"],
    )
    second_rule = "(({} < ((0.08*{})*1.5)) && ({} > {}*1.3))".format(
        f_bands["red"], raster_max["red"], f_bands["red"], f_bands["swir12"])
    third_rule = "(({} < (0.1*{})) && ({} < (0.1*{})))".format(
        f_bands["swir11"], raster_max["swir11"], f_bands["swir12"],
        raster_max["swir12"])
    fourth_rule = "(if({} == max({}, 2 * {}, 2 * {}, 2 * {})))".format(
        f_bands["nir8a"],
        f_bands["nir8a"],
        f_bands["blue"],
        f_bands["green"],
        f_bands["red"],
    )
    fifth_rule = "({} > 0.2)".format(f_bands["blue"])
    cloud_rules = (
        "({} == 1) && ({} == 0) && ({} == 0) && ({} == 0) && ({} == 1)".format(
            first_rule, second_rule, third_rule, fourth_rule, fifth_rule))
    expr_c = "{} = if({}, 0, null())".format(cloud_raster, cloud_rules)
    gscript.mapcalc(expr_c, overwrite=True)
    gscript.message(_("--- Converting raster cloud mask into vector map ---"))
    gscript.run_command("r.to.vect",
                        input=cloud_raster,
                        output=tmp["cloud_v"],
                        type="area",
                        flags="s")
    info_c = gscript.parse_command("v.info", map=tmp["cloud_v"], flags="t")
    if info_c["areas"] == "0":
        gscript.warning(_("No clouds have been detected"))
        check_cloud = 0
    else:
        gscript.message(_("--- Cleaning geometries ---"))
        gscript.run_command(
            "v.clean",
            input=tmp["cloud_v"],
            output=cloud_mask,
            tool="rmarea",
            threshold=cloud_threshold,
        )
        info_c_clean = gscript.parse_command("v.info",
                                             map=cloud_mask,
                                             flags="t")
        if info_c_clean["areas"] == "0":
            gscript.warning(_("No clouds have been detected"))
            check_cloud = 0
        else:
            check_cloud = 1
    gscript.message(_("--- Finish cloud detection procedure ---"))
    # End of Clouds detection

    if options["shadow_mask"] or options["shadow_raster"]:
        # Start of shadows detection
        gscript.message(_("--- Start shadows detection procedure ---"))
        gscript.message(_("--- Computing shadow mask... ---"))
        sixth_rule = "((({} > {}) && ({} < {}) && ({} < 0.1) && ({} < 0.1)) \
        || (({} < {}) && ({} < {}) && ({} < 0.1) && ({} < 0.1) && ({} < 0.1)))".format(
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["blue"],
            f_bands["nir"],
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["blue"],
            f_bands["nir"],
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["nir"],
        )
        seventh_rule = "({} - {})".format(f_bands["green"], f_bands["blue"])
        shadow_rules = "(({} == 1) && ({} < 0.007))".format(
            sixth_rule, seventh_rule)
        expr_s = "{} = if({}, 0, null())".format(tmp["shadow_temp"],
                                                 shadow_rules)
        gscript.mapcalc(expr_s, overwrite=True)
        gscript.message(
            _("--- Converting raster shadow mask into vector map ---"))
        gscript.run_command(
            "r.to.vect",
            input=tmp["shadow_temp"],
            output=tmp["shadow_temp_v"],
            type="area",
            flags="s",
            overwrite=True,
        )
        info_s = gscript.parse_command("v.info",
                                       map=tmp["shadow_temp_v"],
                                       flags="t")
        if info_s["areas"] == "0":
            gscript.warning(_("No shadows have been detected"))
            check_shadow = 0
        else:
            gscript.message(_("--- Cleaning geometries ---"))
            gscript.run_command(
                "v.clean",
                input=tmp["shadow_temp_v"],
                output=tmp["shadow_temp_mask"],
                tool="rmarea",
                threshold=shadow_threshold,
            )
            info_s_clean = gscript.parse_command("v.info",
                                                 map=tmp["shadow_temp_mask"],
                                                 flags="t")
            if info_s_clean["areas"] == "0":
                gscript.warning(_("No shadows have been detected"))
                check_shadow = 0
            else:
                check_shadow = 1
            gscript.message(_("--- Finish Shadows detection procedure ---"))
            # End of shadows detection

            # START shadows cleaning Procedure (remove shadows misclassification)
            # Start shadow mask preparation
            if check_shadow == 1 and check_cloud == 1:
                gscript.message(
                    _("--- Start removing misclassification from the shadow mask ---"
                      ))
                gscript.message(_("--- Data preparation... ---"))
                gscript.run_command(
                    "v.centroids",
                    input=tmp["shadow_temp_mask"],
                    output=tmp["centroid"],
                    quiet=True,
                )
                gscript.run_command("v.db.droptable",
                                    map=tmp["centroid"],
                                    flags="f",
                                    quiet=True)
                gscript.run_command("v.db.addtable",
                                    map=tmp["centroid"],
                                    columns="value",
                                    quiet=True)
                gscript.run_command(
                    "v.db.update",
                    map=tmp["centroid"],
                    layer=1,
                    column="value",
                    value=1,
                    quiet=True,
                )
                gscript.run_command(
                    "v.dissolve",
                    input=tmp["centroid"],
                    column="value",
                    output=tmp["dissolve"],
                    quiet=True,
                )
                gscript.run_command(
                    "v.category",
                    input=tmp["dissolve"],
                    type="point,line,boundary,centroid,area,face,kernel",
                    output=tmp["delcat"],
                    option="del",
                    cat=-1,
                    quiet=True,
                )
                gscript.run_command(
                    "v.category",
                    input=tmp["delcat"],
                    type="centroid,area",
                    output=tmp["addcat"],
                    option="add",
                    quiet=True,
                )
                gscript.run_command("v.db.droptable",
                                    map=tmp["addcat"],
                                    flags="f",
                                    quiet=True)
                gscript.run_command("v.db.addtable",
                                    map=tmp["addcat"],
                                    columns="value",
                                    quiet=True)

                # End shadow mask preparation
                # Start cloud mask preparation

                gscript.run_command("v.db.droptable",
                                    map=cloud_mask,
                                    flags="f",
                                    quiet=True)
                gscript.run_command("v.db.addtable",
                                    map=cloud_mask,
                                    columns="value",
                                    quiet=True)

                # End cloud mask preparation
                # Shift cloud mask using dE e dN
                # Start reading mean sun zenith and azimuth from xml file to compute
                # dE and dN automatically
                gscript.message(
                    _("--- Reading mean sun zenith and azimuth from metadata file to compute clouds shift ---"
                      ))
                if mtd_file != "":
                    try:
                        xml_tree = et.parse(mtd_file)
                        root = xml_tree.getroot()
                        ZA = []
                        try:
                            for elem in root[1]:
                                for subelem in elem[1]:
                                    ZA.append(subelem.text)
                            if ZA == ["0", "0"]:
                                zenith_val = (root[1].find("Tile_Angles").find(
                                    "Sun_Angles_Grid").find("Zenith").find(
                                        "Values_List"))
                                ZA[0] = numpy.mean([
                                    numpy.array(elem.text.split(" "),
                                                dtype=numpy.float)
                                    for elem in zenith_val
                                ])
                                azimuth_val = (
                                    root[1].find("Tile_Angles").find(
                                        "Sun_Angles_Grid").find(
                                            "Azimuth").find("Values_List"))
                                ZA[1] = numpy.mean([
                                    numpy.array(elem.text.split(" "),
                                                dtype=numpy.float)
                                    for elem in azimuth_val
                                ])
                            z = float(ZA[0])
                            a = float(ZA[1])
                            gscript.message(
                                "--- the mean sun Zenith is: {:.3f} deg ---".
                                format(z))
                            gscript.message(
                                "--- the mean sun Azimuth is: {:.3f} deg ---".
                                format(a))
                        except:
                            gscript.fatal(
                                "The selected input metadata file is not the right one. Please check the manual page."
                            )
                    except:
                        gscript.fatal(
                            "The selected input metadata file is not an .xml file. Please check the manual page."
                        )
                elif metadata_file != "":
                    with open(metadata_file) as json_file:
                        data = json.load(json_file)
                    z = float(data["MEAN_SUN_ZENITH_ANGLE"])
                    a = float(data["MEAN_SUN_AZIMUTH_ANGLE"])

                # Stop reading mean sun zenith and azimuth from xml file to compute dE
                # and dN automatically
                # Start computing the east and north shift for clouds and the
                # overlapping area between clouds and shadows at steps of 100m
                gscript.message(
                    _("--- Start computing the east and north clouds shift at steps of 100m of clouds height---"
                      ))
                H = 1000
                dH = 100
                HH = []
                dE = []
                dN = []
                AA = []
                while H <= 4000:
                    z_deg_to_rad = math.radians(z)
                    tan_Z = math.tan(z_deg_to_rad)
                    a_deg_to_rad = math.radians(a)
                    cos_A = math.cos(a_deg_to_rad)
                    sin_A = math.sin(a_deg_to_rad)

                    E_shift = -H * tan_Z * sin_A
                    N_shift = -H * tan_Z * cos_A
                    dE.append(E_shift)
                    dN.append(N_shift)

                    HH.append(H)
                    H = H + dH

                    gscript.run_command(
                        "v.transform",
                        input=cloud_mask,
                        output=tmp["cl_shift"],
                        xshift=E_shift,
                        yshift=N_shift,
                        overwrite=True,
                        quiet=True,
                        stderr=subprocess.DEVNULL,
                    )
                    gscript.run_command(
                        "v.overlay",
                        ainput=tmp["addcat"],
                        binput=tmp["cl_shift"],
                        operator="and",
                        output=tmp["overlay"],
                        overwrite=True,
                        quiet=True,
                        stderr=subprocess.DEVNULL,
                    )
                    gscript.run_command(
                        "v.db.addcolumn",
                        map=tmp["overlay"],
                        columns="area double",
                        quiet=True,
                    )
                    area = gscript.read_command(
                        "v.to.db",
                        map=tmp["overlay"],
                        option="area",
                        columns="area",
                        flags="c",
                        quiet=True,
                    )
                    area2 = gscript.parse_key_val(area, sep="|")
                    AA.append(float(area2["total area"]))

                # Find the maximum overlapping area between clouds and shadows
                index_maxAA = numpy.argmax(AA)

                # Clouds are shifted using the clouds height corresponding to the
                # maximum overlapping area then are intersected with shadows
                gscript.run_command(
                    "v.transform",
                    input=cloud_mask,
                    output=tmp["cl_shift"],
                    xshift=dE[index_maxAA],
                    yshift=dN[index_maxAA],
                    overwrite=True,
                    quiet=True,
                )
                gscript.run_command(
                    "v.select",
                    ainput=tmp["addcat"],
                    atype="point,line,boundary,centroid,area",
                    binput=tmp["cl_shift"],
                    btype="point,line,boundary,centroid,area",
                    output=shadow_mask,
                    operator="intersects",
                    quiet=True,
                )
                if gscript.find_file(name=shadow_mask,
                                     element="vector")["file"]:
                    info_cm = gscript.parse_command("v.info",
                                                    map=shadow_mask,
                                                    flags="t")
                else:
                    info_cm = None
                    gscript.warning(_("No cloud shadows detected"))

                if options["shadow_raster"] and info_cm:
                    if info_cm["areas"] > "0":
                        gscript.run_command(
                            "v.to.rast",
                            input=tmp["shadow_temp_mask"],
                            output=shadow_raster,
                            use="val",
                        )
                    else:
                        gscript.warning(_("No cloud shadows detected"))

                gscript.message(
                    "--- the estimated clouds height is: {} m ---".format(
                        HH[index_maxAA]))
                gscript.message(
                    "--- the estimated east shift is: {:.2f} m ---".format(
                        dE[index_maxAA]))
                gscript.message(
                    "--- the estimated north shift is: {:.2f} m ---".format(
                        dN[index_maxAA]))
            else:
                if options["shadow_raster"]:
                    gscript.run_command(
                        "v.to.rast",
                        input=tmp["shadow_temp_mask"],
                        output=shadow_raster,
                        use="val",
                    )
                if options["shadow_mask"]:
                    gscript.run_command("g.rename",
                                        vector=(tmp["shadow_temp_mask"],
                                                shadow_mask))
                gscript.warning(
                    _("The removing misclassification procedure from shadow mask was not performed since no cloud have been detected"
                      ))
    else:
        if shadow_mask != "":
            gscript.warning(_("No shadow mask will be computed"))
def main():

    global rm_regions, rm_rasters, rm_vectors, tmpfolder

    # parameters
    if options['s2names']:
        s2names = options['s2names'].split(',')
        if os.path.isfile(s2names[0]):
            with open(s2names[0], 'r') as f:
                s2namesstr = f.read()
        else:
            s2namesstr = ','.join(s2names)
    tmpdirectory = options['directory']

    test_nprocs_memory()

    if not grass.find_program('i.sentinel.download', '--help'):
        grass.fatal(
            _("The 'i.sentinel.download' module was not found, install it first:"
              ) + "\n" + "g.extension i.sentinel")
    if not grass.find_program('i.sentinel.import', '--help'):
        grass.fatal(
            _("The 'i.sentinel.import' module was not found, install it first:"
              ) + "\n" + "g.extension i.sentinel")
    if not grass.find_program('i.sentinel.parallel.download', '--help'):
        grass.fatal(
            _("The 'i.sentinel.parallel.download' module was not found, install it first:"
              ) + "\n" + "g.extension i.sentinel")
    if not grass.find_program('i.zero2null', '--help'):
        grass.fatal(
            _("The 'i.zero2null' module was not found, install it first:") +
            "\n" + "g.extension i.zero2null")

    # create temporary directory to download data
    if tmpdirectory:
        if not os.path.isdir(tmpdirectory):
            try:
                os.makedirs(tmpdirectory)
            except:
                grass.fatal(_("Unable to create temp dir"))

    else:
        tmpdirectory = grass.tempdir()
        tmpfolder = tmpdirectory

    # make distinct download and sen2cor directories
    try:
        download_dir = os.path.join(tmpdirectory,
                                    'download_{}'.format(os.getpid()))
        os.makedirs(download_dir)
    except Exception as e:
        grass.fatal(_('Unable to create temp dir {}').format(download_dir))

    if not options['input_dir']:
        # auxiliary variable showing whether each S2-scene lies in an
        # individual folder
        single_folders = True

        download_args = {
            'settings': options['settings'],
            'nprocs': options['nprocs'],
            'output': download_dir,
            'datasource': options['datasource'],
            'flags': 'f'
        }
        if options['limit']:
            download_args['limit'] = options['limit']
        if options['s2names']:
            download_args['flags'] += 's'
            download_args['scene_name'] = s2namesstr.strip()
            if options['datasource'] == 'USGS_EE':
                if flags['e']:
                    download_args['flags'] += 'e'
                download_args['producttype'] = 'S2MSI1C'
        else:
            download_args['clouds'] = options['clouds']
            download_args['start'] = options['start']
            download_args['end'] = options['end']
            download_args['producttype'] = options['producttype']

        grass.run_command('i.sentinel.parallel.download', **download_args)
    else:
        download_dir = options['input_dir']
        single_folders = False

    number_of_scenes = len(os.listdir(download_dir))
    nprocs_final = min(number_of_scenes, int(options['nprocs']))

    # run atmospheric correction
    if flags['a']:
        sen2cor_folder = os.path.join(tmpdirectory,
                                      'sen2cor_{}'.format(os.getpid()))
        try:
            os.makedirs(sen2cor_folder)
        except Exception as e:
            grass.fatal(
                _("Unable to create temporary sen2cor folder {}").format(
                    sen2cor_folder))
        grass.message(
            _('Starting atmospheric correction with sen2cor...').format(
                nprocs_final))
        queue_sen2cor = ParallelModuleQueue(nprocs=nprocs_final)
        for idx, subfolder in enumerate(os.listdir(download_dir)):
            if single_folders is False:
                if subfolder.endswith('.SAFE'):
                    filepath = os.path.join(download_dir, subfolder)
            else:
                folderpath = os.path.join(download_dir, subfolder)
                for file in os.listdir(folderpath):
                    if file.endswith('.SAFE'):
                        filepath = os.path.join(folderpath, file)
            output_dir = os.path.join(sen2cor_folder,
                                      'sen2cor_result_{}'.format(idx))
            try:
                os.makedirs(output_dir)
            except Exception:
                grass.fatal(
                    _('Unable to create directory {}').format(output_dir))
            sen2cor_module = Module(
                'i.sentinel-2.sen2cor',
                input_file=filepath,
                output_dir=output_dir,
                sen2cor_path=options['sen2cor_path'],
                nprocs=1,
                run_=False
                # all remaining sen2cor parameters can be left as default
            )
            queue_sen2cor.put(sen2cor_module)
        queue_sen2cor.wait()
        download_dir = sen2cor_folder
        single_folders = True

    grass.message(_("Importing Sentinel scenes ..."))
    env = grass.gisenv()
    start_gisdbase = env['GISDBASE']
    start_location = env['LOCATION_NAME']
    start_cur_mapset = env['MAPSET']
    ### save current region
    id = str(os.getpid())
    currentregion = 'tmp_region_' + id
    rm_regions.append(currentregion)
    grass.run_command('g.region', save=currentregion, flags='p')

    queue_import = ParallelModuleQueue(nprocs=nprocs_final)
    memory_per_proc = round(float(options['memory']) / nprocs_final)
    mapsetids = []
    importflag = 'rn'
    if flags['i']:
        importflag += 'i'
    if flags['c']:
        importflag += 'c'
    json_standard_folder = os.path.join(env['GISDBASE'], env['LOCATION_NAME'],
                                        env['MAPSET'], 'cell_misc')

    if not os.path.isdir(json_standard_folder):
        os.makedirs(json_standard_folder)
    for idx, subfolder in enumerate(os.listdir(download_dir)):
        if os.path.exists(os.path.join(download_dir, subfolder)):
            mapsetid = 'S2_import_%s' % (str(idx + 1))
            mapsetids.append(mapsetid)
            import_kwargs = {
                "mapsetid": mapsetid,
                "memory": memory_per_proc,
                "pattern": options["pattern"],
                "flags": importflag,
                "region": currentregion,
                "metadata": json_standard_folder
            }
            if single_folders is True:
                directory = os.path.join(download_dir, subfolder)
            else:
                directory = download_dir
                if subfolder.endswith(".SAFE"):
                    pattern_file = subfolder.split(".SAFE")[0]
                elif subfolder.endswith(".zip"):
                    pattern_file = subfolder.split(".zip")[0]
                    if ".SAFE" in pattern_file:
                        pattern_file = pattern_file.split(".SAFE")[0]
                else:
                    grass.warning(
                        _("{} is not in .SAFE or .zip format, "
                          "skipping...").format(
                              os.path.join(download_dir, subfolder)))
                    continue
                import_kwargs["pattern_file"] = pattern_file
            import_kwargs["input"] = directory
            i_sentinel_import = Module("i.sentinel.import.worker",
                                       run_=False,
                                       **import_kwargs)
            queue_import.put(i_sentinel_import)
    queue_import.wait()
    grass.run_command('g.remove', type='region', name=currentregion, flags='f')
    # verify that switching the mapset worked
    env = grass.gisenv()
    gisdbase = env['GISDBASE']
    location = env['LOCATION_NAME']
    cur_mapset = env['MAPSET']
    if cur_mapset != start_cur_mapset:
        grass.fatal("New mapset is <%s>, but should be <%s>" %
                    (cur_mapset, start_cur_mapset))
    # copy maps to current mapset
    maplist = []
    cloudlist = []
    for new_mapset in mapsetids:
        for vect in grass.parse_command('g.list',
                                        type='vector',
                                        mapset=new_mapset):
            cloudlist.append(vect)
            grass.run_command('g.copy',
                              vector=vect + '@' + new_mapset + ',' + vect)
        for rast in grass.parse_command('g.list',
                                        type='raster',
                                        mapset=new_mapset):
            maplist.append(rast)
            grass.run_command('g.copy',
                              raster=rast + '@' + new_mapset + ',' + rast)
        grass.utils.try_rmdir(os.path.join(gisdbase, location, new_mapset))
    # space time dataset
    grass.message(_("Creating STRDS of Sentinel scenes ..."))
    if options['strds_output']:
        strds = options['strds_output']
        grass.run_command('t.create',
                          output=strds,
                          title="Sentinel-2",
                          desc="Sentinel-2",
                          quiet=True)

        # check GRASS version
        g79_or_higher = False
        gversion = grass.parse_command("g.version", flags="g")["version"]
        gversion_base = gversion.split(".")[:2]
        gversion_base_int = tuple([int(a) for a in gversion_base])
        if gversion_base_int >= tuple((7, 9)):
            g79_or_higher = True

        # create register file
        registerfile = grass.tempfile()
        file = open(registerfile, 'w')
        for imp_rast in list(set(maplist)):
            band_str_tmp1 = imp_rast.split("_")[2]
            band_str = band_str_tmp1.replace("B0", "").replace("B", "")
            date_str1 = imp_rast.split('_')[1].split('T')[0]
            date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6],
                                      date_str1[6:])
            time_str = imp_rast.split('_')[1].split('T')[1]
            clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4],
                                       time_str[4:])
            write_str = "%s|%s %s" % (imp_rast, date_str2, clock_str2)
            if g79_or_higher is True:
                write_str += "|S2_%s" % band_str
            file.write("%s\n" % write_str)
        file.close()
        grass.run_command('t.register',
                          input=strds,
                          file=registerfile,
                          quiet=True)
        # remove registerfile
        grass.try_remove(registerfile)

        if flags['c']:
            stvdsclouds = strds + '_clouds'
            grass.run_command('t.create',
                              output=stvdsclouds,
                              title="Sentinel-2 clouds",
                              desc="Sentinel-2 clouds",
                              quiet=True,
                              type='stvds')
            registerfileclouds = grass.tempfile()
            fileclouds = open(registerfileclouds, 'w')
            for imp_clouds in cloudlist:
                date_str1 = imp_clouds.split('_')[1].split('T')[0]
                date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6],
                                          date_str1[6:])
                time_str = imp_clouds.split('_')[1].split('T')[1]
                clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4],
                                           time_str[4:])
                fileclouds.write("%s|%s %s\n" %
                                 (imp_clouds, date_str2, clock_str2))
            fileclouds.close()
            grass.run_command('t.register',
                              type='vector',
                              input=stvdsclouds,
                              file=registerfileclouds,
                              quiet=True)
            grass.message("<%s> is created" % (stvdsclouds))
            # remove registerfile
            grass.try_remove(registerfileclouds)

        # extract strds for each band
        bands = []
        pattern = options['pattern']
        if "(" in pattern:
            global beforebrackets, afterbrackets
            beforebrackets = re.findall(r"(.*?)\(", pattern)[0]
            inbrackets = re.findall(r"\((.*?)\)", pattern)[0]
            afterbrackets = re.findall(r"\)(.*)", pattern)[0]
            bands = [
                "%s%s%s" % (beforebrackets, x, afterbrackets)
                for x in inbrackets.split('|')
            ]
        else:
            bands = pattern.split('|')

        for band in bands:
            if flags['i'] and ('20' in band or '60' in band):
                band.replace('20', '10').replace('60', '10')
            grass.run_command('t.rast.extract',
                              input=strds,
                              where="name like '%" + band + "%'",
                              output="%s_%s" % (strds, band),
                              quiet=True)
            grass.message("<%s_%s> is created" % (strds, band))
Beispiel #52
0
            charcoalmap)  # save name of charcoal map to use later
        charcstats = grass.parse_command('r.univar',
                                         flags='g',
                                         map=charcoalmap,
                                         zones=basinmap)
        averagecharc.append(float(charcstats['mean']) * scalar)

recodeto.close()  # close and delete temporary rules file
#change impact maps to artifact densities
if anthropogenic:
    grass.message('Generating artifact densities.')
    artifactmaps = []
    for farmingmap, grazingmap in zip(farmingmaps, grazingmaps):
        grass.run_command("r.surf.random",
                          quiet=True,
                          overwrite=True,
                          flags='i',
                          max=2,
                          output="Temporary_random_surface")
        artifactmap = "%s_Artifact_densities_%s" % (
            outprefix, farmingmap.split('_Farming_Impacts_Map')[0])
        grass.mapcalc(
            "${artifactmap}=if(isnull(${farmingmap}) && isnull(${grazingmap}), 0, if(isnull(${grazingmap}), ${randmap}+2, ${randmap}))",
            overwrite=True,
            quiet=True,
            artifactmap=artifactmap,
            farmingmap=farmingmap,
            grazingmap=grazingmap,
            randmap="Temporary_random_surface")
        artifactmaps.append(artifactmap)
#now build a pandas dataframe to hold the yearly information about elevation changes and various proxy information, etc.
def main():
    input_A = options['input_a']
    input_B = options['input_b']
    output = options['output']
    overlap = options['overlap']
    smooth_dist = options['smooth_dist']
    angle = options['transition_angle']
    blend_mask = options['blend_mask']
    simple = not flags['s']
    # smooth values of closest difference
    smooth_closest_difference_size = int(options['parallel_smoothing'])
    if smooth_closest_difference_size % 2 == 0:
        gscript.fatal(_("Option 'parallel_smoothing' requires odd number"))
    difference_reach = int(options['difference_reach'])

    postfix = str(os.getpid())
    tmp_absdiff = "tmp_absdiff_" + postfix
    tmp_absdiff_smooth = "tmp_absdiff_smooth" + postfix
    tmp_grow = "tmp_grow" + postfix
    tmp_diff_overlap_1px = "tmp_diff_overlap_1px" + postfix
    tmp_value = "tmp_value" + postfix
    tmp_value_smooth = "tmp_value_smooth" + postfix
    tmp_stretch_dist = "tmp_stretch_dist" + postfix
    tmp_overlap = "tmp_overlap" + postfix
    TMP.extend([
        tmp_absdiff, tmp_absdiff_smooth, tmp_grow, tmp_diff_overlap_1px,
        tmp_value, tmp_value_smooth, tmp_stretch_dist, tmp_overlap
    ])

    gscript.run_command('r.grow.distance',
                        flags='n',
                        input=input_A,
                        distance=tmp_grow)
    if simple and blend_mask:
        tmp_mask1 = "tmp_mask1"
        tmp_mask2 = "tmp_mask2"
        tmp_mask3 = "tmp_mask3"
        tmp_mask4 = "tmp_mask4"
        TMP.extend([tmp_mask1, tmp_mask2, tmp_mask3, tmp_mask4])
        # derive 1-pixel wide edge of A inside of the provided mask
        gscript.mapcalc(
            "{new} = if ({dist} > 0 && {dist} <= 1.5*nsres() && ! isnull({blend_mask}), 1, null())"
            .format(new=tmp_mask1, dist=tmp_grow, blend_mask=blend_mask))
        # create buffer around it
        gscript.run_command('r.grow',
                            input=tmp_mask1,
                            output=tmp_mask2,
                            flags='m',
                            radius=smooth_dist,
                            old=1,
                            new=1)
        # patch the buffer and A
        gscript.mapcalc(
            "{new} = if(! isnull({mask2}) || ! isnull({A}), 1, null())".format(
                A=input_A, mask2=tmp_mask2, new=tmp_mask3))
        # inner grow
        gscript.run_command('r.grow.distance',
                            flags='n',
                            input=tmp_mask3,
                            distance=tmp_mask4)
        # replace the distance inside the buffered area with 0
        gscript.mapcalc('{new} = if(! isnull({A}), {m4}, 0)'.format(
            new=tmp_grow, A=input_A, m4=tmp_mask4),
                        overwrite=True)

    if simple:
        gscript.mapcalc(
            "{out} = if({grow} > {smooth}, {A}, if({grow} == 0, {B},"
            "if (isnull({B}) && ! isnull({A}), {A},"
            "(1 - {grow}/{smooth}) * {B} + ({grow}/{smooth} * {A}))))".format(
                out=output,
                grow=tmp_grow,
                smooth=smooth_dist,
                A=input_A,
                B=input_B))
        return

    # difference
    gscript.mapcalc("{new} = abs({A} - {B})".format(new=tmp_absdiff,
                                                    A=input_A,
                                                    B=input_B))

    # take maximum difference from near cells
    difference_reach = (difference_reach - 1) * 2 + 1
    gscript.run_command('r.neighbors',
                        flags='c',
                        input=tmp_absdiff,
                        output=tmp_absdiff_smooth,
                        method='maximum',
                        size=difference_reach)

    # closest value of difference
    if blend_mask:
        # set the edge pixels to almost 0 where the mask is, results in no blending
        gscript.mapcalc(
            "{new} = if ({dist} > 0 && {dist} <= 1.5*nsres(), if(isnull({blend_mask}), {diff}, 0.00001), null())"
            .format(new=tmp_diff_overlap_1px,
                    dist=tmp_grow,
                    diff=tmp_absdiff_smooth,
                    blend_mask=blend_mask))
    else:
        gscript.mapcalc(
            "{new} = if ({dist} > 0 && {dist} <= 1.5*nsres(), {diff}, null())".
            format(new=tmp_diff_overlap_1px,
                   dist=tmp_grow,
                   diff=tmp_absdiff_smooth))
    # closest value of difference
    gscript.run_command('r.grow.distance',
                        input=tmp_diff_overlap_1px,
                        value=tmp_value)

    # smooth closest value
    gscript.run_command('r.neighbors',
                        flags='c',
                        input=tmp_value,
                        output=tmp_value_smooth,
                        method='average',
                        size=smooth_closest_difference_size)

    # stretch 10cm height difference per 5 meters
    gscript.mapcalc("{stretch} = {value}/tan({alpha})".format(
        stretch=tmp_stretch_dist, value=tmp_value_smooth, alpha=angle))

    # spatially variable overlap width s
    gscript.mapcalc(
        "{s} = if (isnull({B}) && ! isnull({A}), 1, {dist} / {stretch})".
        format(s=tmp_overlap,
               B=input_B,
               A=input_A,
               dist=tmp_grow,
               stretch=tmp_stretch_dist))
    # fusion
    gscript.mapcalc(
        "{fused} = if({s} >= 1, {A} , if({s} == 0,  {B},  (1 - {s}) * {B} +  {A} * {s}))"
        .format(fused=output, s=tmp_overlap, B=input_B, A=input_A))
    # visualize overlap
    if overlap:
        gscript.mapcalc(
            "{s_trim} = if ({s}>=1, null(), if({s}<=0, null(), {s}))".format(
                s_trim=overlap, s=tmp_overlap))
Beispiel #54
0
    def ImportMapIntoGRASS(self, raster):
        """!Import raster into GRASS.
        """
        # importing temp_map into GRASS
        try:
            # do not use -o flag !
            grass.run_command('r.in.gdal',
                              flags='o',
                              quiet=True,
                              overwrite=True,
                              input=raster,
                              output=self.opt_output)
        except CalledModuleError:
            grass.fatal(_('%s failed') % 'r.in.gdal')

        # information for destructor to cleanup temp_layers, created
        # with r.in.gdal

        # setting region for full extend of imported raster
        if grass.find_file(self.opt_output + '.red',
                           element='cell',
                           mapset='.')['file']:
            region_map = self.opt_output + '.red'
        else:
            region_map = self.opt_output
        os.environ['GRASS_REGION'] = grass.region_env(rast=region_map)

        # mask created from alpha layer, which describes real extend
        # of warped layer (may not be a rectangle), also mask contains
        # transparent parts of raster
        if grass.find_file(self.opt_output + '.alpha',
                           element='cell',
                           mapset='.')['name']:
            # saving current mask (if exists) into temp raster
            if grass.find_file('MASK', element='cell', mapset='.')['name']:
                try:
                    mask_copy = self.opt_output + self.original_mask_suffix
                    grass.run_command('g.copy',
                                      quiet=True,
                                      raster='MASK,' + mask_copy)
                except CalledModuleError:
                    grass.fatal(_('%s failed') % 'g.copy')

            # info for destructor
            self.cleanup_mask = True
            try:
                grass.run_command('r.mask',
                                  quiet=True,
                                  overwrite=True,
                                  maskcats="0",
                                  flags='i',
                                  raster=self.opt_output + '.alpha')
            except CalledModuleError:
                grass.fatal(_('%s failed') % 'r.mask')

            if not self.cleanup_bands:
                # use the MASK to set NULL vlues
                for suffix in ('.red', '.green', '.blue'):
                    rast = self.opt_output + suffix
                    if grass.find_file(rast, element='cell',
                                       mapset='.')['file']:
                        grass.run_command('g.rename',
                                          rast='%s,%s' %
                                          (rast, rast + '_null'),
                                          quiet=True)
                        grass.run_command('r.mapcalc',
                                          expression='%s = %s' %
                                          (rast, rast + '_null'),
                                          quiet=True)
                        grass.run_command('g.remove',
                                          type='raster',
                                          name='%s' % (rast + '_null'),
                                          flags='f',
                                          quiet=True)

        # TODO one band + alpha band?
        if grass.find_file(self.opt_output + '.red',
                           element='cell',
                           mapset='.')['file'] and self.cleanup_bands:
            try:
                grass.run_command('r.composite',
                                  quiet=True,
                                  overwrite=True,
                                  red=self.opt_output + '.red',
                                  green=self.opt_output + '.green',
                                  blue=self.opt_output + '.blue',
                                  output=self.opt_output)
            except CalledModuleError:
                grass.fatal(_('%s failed') % 'r.composite')
def main():
    input = options["input"]
    output = options["output"]
    column = options["column"]
    ftype = options["type"]
    xtiles = int(options["x"])
    ytiles = int(options["y"])

    rtvflags = ""
    for key in "sbtvz":
        if flags[key]:
            rtvflags += key

    # check options
    if xtiles <= 0:
        grass.fatal(_("Number of tiles in x direction must be > 0"))
    if ytiles < 0:
        grass.fatal(_("Number of tiles in y direction must be > 0"))
    if grass.find_file(name=input)["name"] == "":
        grass.fatal(_("Input raster %s not found") % input)

    grass.use_temp_region()
    curr = grass.region()
    width = int(curr["cols"] / xtiles)
    if width <= 1:
        grass.fatal("The requested number of tiles in x direction is too large")
    height = int(curr["rows"] / ytiles)
    if height <= 1:
        grass.fatal("The requested number of tiles in y direction is too large")

    do_clip = False
    overlap = 0
    if flags["s"] and ftype == "area":
        do_clip = True
        overlap = 2

    ewres = curr["ewres"]
    nsres = curr["nsres"]
    xoverlap = overlap * ewres
    yoverlap = overlap * nsres
    xoverlap2 = (overlap / 2) * ewres
    yoverlap2 = (overlap / 2) * nsres

    e = curr["e"]
    w = curr["w"] + xoverlap
    if w >= e:
        grass.fatal(_("Overlap is too large"))
    n = curr["n"] - yoverlap
    s = curr["s"]
    if s >= n:
        grass.fatal(_("Overlap is too large"))

    datatype = grass.raster_info(input)["datatype"]
    vtiles = None

    # north to south
    for ytile in range(ytiles):
        n = curr["n"] - ytile * height * nsres
        s = n - height * nsres - yoverlap
        if ytile == ytiles - 1:
            s = curr["s"]
        # west to east
        for xtile in range(xtiles):
            w = curr["w"] + xtile * width * ewres
            e = w + width * ewres + xoverlap

            if xtile == xtiles - 1:
                e = curr["e"]

            grass.run_command("g.region", n=n, s=s, e=e, w=w, nsres=nsres, ewres=ewres)

            if do_clip:
                tilename = output + "_stile_" + str(ytile) + str(xtile)
            else:
                tilename = output + "_tile_" + str(ytile) + str(xtile)

            outname = output + "_tile_" + str(ytile) + str(xtile)

            grass.run_command(
                "r.to.vect",
                input=input,
                output=tilename,
                type=ftype,
                column=column,
                flags=rtvflags,
            )

            if do_clip:
                n2 = curr["n"] - ytile * height * nsres - yoverlap2
                s2 = n2 - height * nsres
                if ytile == 0:
                    n2 = curr["n"]
                    s2 = n2 - height * nsres - yoverlap2
                if ytile == ytiles - 1:
                    s2 = curr["s"]

                w2 = curr["w"] + xtile * width * ewres + xoverlap2
                e2 = w2 + width * ewres
                if xtile == 0:
                    w2 = curr["w"]
                    e2 = w2 + width * ewres + xoverlap2
                if xtile == xtiles - 1:
                    e2 = curr["e"]

                tilename = output + "_stile_" + str(ytile) + str(xtile)
                if grass.vector_info_topo(tilename)["areas"] > 0:
                    grass.run_command(
                        "g.region", n=n2, s=s2, e=e2, w=w2, nsres=nsres, ewres=ewres
                    )

                    extname = "extent_tile_" + str(ytile) + str(xtile)
                    grass.run_command("v.in.region", output=extname, flags="d")
                    outname = output + "_tile_" + str(ytile) + str(xtile)
                    grass.run_command(
                        "v.overlay",
                        ainput=tilename,
                        binput=extname,
                        output=outname,
                        operator="and",
                        olayer="0,1,0",
                    )
                    grass.run_command(
                        "g.remove", flags="f", type="vector", name=extname, quiet=True
                    )

                    if vtiles is None:
                        vtiles = outname
                    else:
                        vtiles = vtiles + "," + outname

                grass.run_command(
                    "g.remove", flags="f", type="vector", name=tilename, quiet=True
                )

            else:
                # write cmd history:
                grass.vector_history(outname)
                if vtiles is None:
                    vtiles = outname
                else:
                    vtiles = vtiles + "," + outname

    if flags["p"]:
        grass.run_command("v.patch", input=vtiles, output=output, flags="e")

        grass.run_command("g.remove", flags="f", type="vector", name=vtiles, quiet=True)

        if grass.vector_info_topo(output)["boundaries"] > 0:
            outpatch = output + "_patch"
            grass.run_command("g.rename", vector=(output, outpatch))
            grass.run_command(
                "v.clean", input=outpatch, output=output, tool="break", flags="c"
            )
            grass.run_command("g.remove", flags="f", type="vector", name=outpatch)

    grass.message(_("%s complete") % "r.to.vect.tiled")

    return 0
Beispiel #56
0
def main():
    global TMPLOC, SRCGISRC, GISDBASE
    overwrite = grass.overwrite()

    # list formats and exit
    if flags['f']:
        grass.run_command('v.in.ogr', flags='f')
        return 0

    # list layers and exit
    if flags['l']:
        try:
            grass.run_command('v.in.ogr', flags='l', input=options['input'])
        except CalledModuleError:
            return 1
        return 0

    OGRdatasource = options['input']
    output = options['output']
    layers = options['layer']

    vflags = ''
    if options['extent'] == 'region':
        vflags += 'r'
    if flags['o']:
        vflags += 'o'

    vopts = {}
    if options['encoding']:
        vopts['encoding'] = options['encoding']

    if options['datum_trans'] and options['datum_trans'] == '-1':
        # list datum transform parameters
        if not options['epsg']:
            grass.fatal(_("Missing value for parameter <%s>") % 'epsg')

        return grass.run_command('g.proj',
                                 epsg=options['epsg'],
                                 datum_trans=options['datum_trans'])

    grassenv = grass.gisenv()
    tgtloc = grassenv['LOCATION_NAME']
    tgtmapset = grassenv['MAPSET']
    GISDBASE = grassenv['GISDBASE']
    tgtgisrc = os.environ['GISRC']
    SRCGISRC = grass.tempfile()

    TMPLOC = 'temp_import_location_' + str(os.getpid())

    f = open(SRCGISRC, 'w')
    f.write('MAPSET: PERMANENT\n')
    f.write('GISDBASE: %s\n' % GISDBASE)
    f.write('LOCATION_NAME: %s\n' % TMPLOC)
    f.write('GUI: text\n')
    f.close()

    tgtsrs = grass.read_command('g.proj', flags='j', quiet=True)

    # create temp location from input without import
    grass.verbose(_("Creating temporary location for <%s>...") % OGRdatasource)
    if layers:
        vopts['layer'] = layers
    if output:
        vopts['output'] = output
    vopts['snap'] = options['snap']

    # try v.in.ogr directly
    if flags['o'] or grass.run_command('v.in.ogr',
                                       input=OGRdatasource,
                                       flags='j',
                                       errors='status',
                                       quiet=True,
                                       overwrite=overwrite,
                                       **vopts) == 0:
        try:
            grass.run_command('v.in.ogr',
                              input=OGRdatasource,
                              flags=vflags,
                              overwrite=overwrite,
                              **vopts)
            grass.message(
                _("Input <%s> successfully imported without reprojection") %
                OGRdatasource)
            return 0
        except CalledModuleError:
            grass.fatal(_("Unable to import <%s>") % OGRdatasource)

    try:
        grass.run_command('v.in.ogr',
                          input=OGRdatasource,
                          location=TMPLOC,
                          flags='i',
                          quiet=True,
                          overwrite=overwrite,
                          **vopts)
    except CalledModuleError:
        grass.fatal(
            _("Unable to create location from OGR datasource <%s>") %
            OGRdatasource)

    # switch to temp location
    os.environ['GISRC'] = str(SRCGISRC)

    if options['epsg']:  # force given EPSG
        kwargs = {}
        if options['datum_trans']:
            kwargs['datum_trans'] = options['datum_trans']
        grass.run_command('g.proj', flags='c', epsg=options['epsg'], **kwargs)

    # switch to target location
    os.environ['GISRC'] = str(tgtgisrc)

    # make sure target is not xy
    if grass.parse_command('g.proj',
                           flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for current location <%s>"
              ) % tgtloc)

    # switch to temp location
    os.environ['GISRC'] = str(SRCGISRC)

    # print projection at verbose level
    grass.verbose(grass.read_command('g.proj', flags='p').rstrip(os.linesep))

    # make sure input is not xy
    if grass.parse_command('g.proj',
                           flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for input <%s>") %
            OGRdatasource)

    if options['extent'] == 'region':
        # switch to target location
        os.environ['GISRC'] = str(tgtgisrc)

        # v.in.region in tgt
        vreg = 'vreg_' + str(os.getpid())
        grass.run_command('v.in.region', output=vreg, quiet=True)

        # reproject to src
        # switch to temp location
        os.environ['GISRC'] = str(SRCGISRC)
        try:
            grass.run_command('v.proj',
                              input=vreg,
                              output=vreg,
                              location=tgtloc,
                              mapset=tgtmapset,
                              quiet=True,
                              overwrite=overwrite)
        except CalledModuleError:
            grass.fatal(_("Unable to reproject to source location"))

        # set region from region vector
        grass.run_command('g.region', res='1')
        grass.run_command('g.region', vector=vreg)

    # import into temp location
    grass.message(_("Importing <%s> ...") % OGRdatasource)
    try:
        grass.run_command('v.in.ogr',
                          input=OGRdatasource,
                          flags=vflags,
                          overwrite=overwrite,
                          **vopts)
    except CalledModuleError:
        grass.fatal(_("Unable to import OGR datasource <%s>") % OGRdatasource)

    # if output is not define check source mapset
    if not output:
        output = grass.list_grouped('vector')['PERMANENT'][0]

    # switch to target location
    os.environ['GISRC'] = str(tgtgisrc)

    # check if map exists
    if not grass.overwrite() and \
       grass.find_file(output, element='vector', mapset='.')['mapset']:
        grass.fatal(_("option <%s>: <%s> exists.") % ('output', output))

    if options['extent'] == 'region':
        grass.run_command('g.remove',
                          type='vector',
                          name=vreg,
                          flags='f',
                          quiet=True)

    # v.proj
    grass.message(_("Reprojecting <%s>...") % output)
    try:
        grass.run_command('v.proj',
                          location=TMPLOC,
                          mapset='PERMANENT',
                          input=output,
                          overwrite=overwrite)
    except CalledModuleError:
        grass.fatal(_("Unable to to reproject vector <%s>") % output)

    return 0
Beispiel #57
0
def main():
    proj = options['project']
    mmax = options['mmax']
    nmax = options['nmax']
    nzs = options['nzs']
    tx = options['tx']
    t = options['t']
    min_slope = options['min_slope']
    zmin = options['zmin']    
    runoff_map = options['runoff']
    minsf = options['minsf']
    z_minsf = options['z_minsf']    
    p_minsf = options['p_minsf']   
    infiltr_rate = options['infiltr_rate']   
    basal_flux = options['basal_flux']   
    times = options['times']    
    flow = options['flow_direction']    
    psi = options['psi']  
    list_out = options['list_out']  




######### tools ############

    def InfoCurrentRegion():
        region = grass.region()
        north = [region[key] for key in "nsew"][0]
        sud = [region[key] for key in "nsew"][1]
        est = [region[key] for key in "nsew"][2]
        west = [region[key] for key in "nsew"][3]
        nsres =  (region['nsres'])
        ewres =  (region['ewres'])
        cols = (region['cols'])
        rows = (region['rows'])
        return north,sud,west,est,nsres,ewres,rows,cols   
    
    def CheckVar(input_data,name):
        if input_data:
            pass
        else:
            grass.fatal('A %s value is required'%name)
    
    
    def SetOut(map):            
        if len(map)==0:
            check_map='F'
        else:
            check_map='T'
        return check_map
    
    
################## check input data #########################

    if len(proj)==0:
        grass.fatal('A project path is required')
    else:
        if os.path.exists(proj):
            pass
        else:
            grass.fatal('The project specified does not exist')        
            
            
    CheckVar(tx,'tx')        
    CheckVar(nzs,'nzs')
    CheckVar(t,'t')
    
    if len(times)==0:
        grass.fatal('Parameter times is required')
        
        
################# import projlog.log #######################

    print proj
    sys.path.append('%s'%proj)
    import projlog
################## create new_folder ######################
    if not os.path.exists(proj+'/TrigrsRes'):
        os.makedirs(proj+'/TrigrsRes')
################## check region ############################

    if not flags['r']:
        (n,s,w,e,nsr,ewr,nr,nc) = InfoCurrentRegion()   

        if n==projlog.log['north'] and s==projlog.log['sud'] and w==projlog.log['west'] and e==projlog.log['est'] and nsr==projlog.log['nsres'] and ewr==projlog.log['ewres'] and nr==projlog.log['rows'] and nc==projlog.log['columns']:
            print 'region ok'
        else:
            grass.fatal ('The current region is not the project region: set the flag r')

################## modify region ############################

    if flags['r']:
        grass.run_command("g.region",n=projlog.log['north'],s=projlog.log['sud'],w=projlog.log['west'],e=projlog.log['est'],nsres=projlog.log['nsres'],ewres=projlog.log['ewres'])
        #grass.run_command("g.region","p")

################## extract TopoIndex data ####################

    lines = open(proj+'/'+'TopoIndexLog.txt', 'r').readlines()
    line = lines[-5]
    a = space.split(line.strip())
    imax=a[0]
    row=a[1]
    col=a[2]
    nwf=a[3]

################# extract n_times data ###################

    list_times = list(times.split(','))  
    n_times =len(list_times)  

################## output ################################

    check_runoff_map = SetOut(runoff_map)
    check_minsf = SetOut(minsf)
    check_z_minsf = SetOut(z_minsf)
    check_p_minsf = SetOut(p_minsf)
    check_infiltr_rate = SetOut(infiltr_rate)
    check_basal_flux = SetOut(basal_flux)
    check_list_out = SetOut(list_out)
    
    if flags['a']:
        check_fill = 'T'
    else:
        check_fill = 'F'
        
    if psi=='the initial water table':
        check_psi='F'
    else:
        check_psi='T'
        
    if flags['m']:
        check_mass = 'T'
    else:
        check_mass = 'F'

        
    
    print check_runoff_map
    print check_minsf
    print check_z_minsf
    print check_p_minsf
    print check_infiltr_rate
    print check_basal_flux
    print check_list_out
      
################## write tr_in  ##########################

    f = open(proj+'/tr_in.txt', 'w')
    f.write('Name of project (up to 255 characters)\n')
    f.write('%s\n'%projlog.log['proj'])
    f.write('imax, row, col, nwf, tx, nmax\n')
    f.write('%s,\t%s,\t%s,\t%s,\t%s,\t%s\n'%(imax,row,col,nwf,tx,nmax))
    f.write('nzs, mmax, nper,  zmin,  uww,     t, zones\n')
    f.write('%s,\t%s,\t%s,\t%s,\t9.8e3,\t%s,\t%s\n'%(nzs,mmax,projlog.log['n_per'],zmin,t,projlog.log['n_zone']))
    f.write('zmax,   depth,   rizero,  Min_Slope_Angle (degrees)\n')
    f.write('-3.001,\t-2.4,\t-1.0e-9,\t%s\n'%min_slope)
    for x in projlog.log['range_cat_zone']:
        f.write('zone,\t%s\n'%x)
        f.write('cohesion,phi,  uws,   diffus,   K-sat, Theta-sat,Theta-res,Alpha\n')
        f.write(projlog.log['cohes_%s'%x]+',\t'+projlog.log['phi_%s'%x]+',\t'+projlog.log['uws_%s'%x]+',\t'+projlog.log['diffus_%s'%x]+',\t'+projlog.log['ksat_%s'%x]+',\t'+projlog.log['thetasat_%s'%x]+',\t'+projlog.log['thetares_%s'%x]+',\t'+projlog.log['a_%s'%x]+'\n')
    f.write('cri(1), cri(2), ..., cri(nper)\n')
    y=1
    while y<projlog.log['n_per']:
        f.write('-9.e-5,\t')
        y+=1
    f.write('-3.e-7\n')
    f.write('capt(1), capt(2), ..., capt(n), capt(n+1)\n')
    f.write('0,\t'+',\t'.join(projlog.log['list_capt'])+'\n')
    f.write('File name of slope angle grid (slofil)\n')
    f.write(projlog.log['slope_path']+'\n')
    f.write('File name of property zone grid (zonfil)\n')
    f.write(projlog.log['vzones_path']+'\n')
    f.write('File name of depth grid (zfil)\n')
    f.write(projlog.log['zmax_path']+'\n')
    f.write('File name of initial depth of water table grid   (depfil)\n')
    f.write(projlog.log['depthwt_path']+'\n')
    f.write('File name of initial infiltration rate grid   (rizerofil)\n')
    f.write(projlog.log['rizero_path']+'\n')
    f.write('List of file name(s) of rainfall intensity for each period, (rifil())\n')
    for item in projlog.log['rifil_path']:
        f.write(item+'\n')
    f.write('File name of grid of D8 runoff receptor cell numbers (nxtfil)\n')
    f.write(proj+'/'+'TIdscelGrid_%s.asc\n'%projlog.log['proj'])
    print proj+'/'+'TIdscelGrid_%s.asc\n'%projlog.log['proj']
    f.write('File name of list of defining runoff computation order (ndxfil)\n')
    f.write(proj+'/'+'TIcelindxList_%s.txt\n'%projlog.log['proj'])
    f.write('File name of list of all runoff receptor cells  (dscfil)\n')
    f.write(proj+'/'+'TIdscelList_%s.txt\n'%projlog.log['proj'])
    f.write('File name of list of runoff weighting factors  (wffil)\n')
    f.write(proj+'/'+'TIwfactorList_%s.txt\n'%projlog.log['proj'])
    f.write('Folder where output grid files will be stored  (folder)\n')
    f.write(proj+'/TrigrsRes/\n')
    f.write('Identification code to be added to names of output files (suffix)\n')
    f.write(projlog.log['proj']+'\n')
    f.write('Save grid files of runoff? Enter T (.true.) or F (.false.)\n')
    f.write(check_runoff_map+'\n')
    f.write('Save grid of minimum factor of safety? Enter T (.true.) or F (.false.)\n')
    f.write(check_minsf+'\n')
    f.write('Save grid of depth of minimum factor of safety? Enter T (.true.) or F (.false.)\n')
    f.write(check_z_minsf+'\n')
    f.write('Save grid of pore pressure at depth of minimum factor of safety? Enter T (.true.) or F (.false.)\n')
    f.write(check_p_minsf+'\n')
    f.write('Save grid files of actual infiltration rate? Enter T (.true.) or F (.false.)\n')
    f.write(check_infiltr_rate+'\n')
    f.write('Save grid files of unsaturated zone basal flux? Enter T (.true.) or F (.false.)\n')
    f.write(check_basal_flux+'\n')
    f.write('Save listing of pressure head and factor of safety ("flag")? (Enter -2 detailed, -1 normal, 0 none)\n')
    if check_list_out=='T':
        f.write('-2\n')
    else:
        f.write('0\n')
    f.write('Number of times to save output grids\n')
    f.write('%s\n'%n_times)
    f.write('Times of output grids\n')
    f.write(',\t'.join(list_times)+'\n')
    f.write('Skip other timesteps? Enter T (.true.) or F (.false.)\n')
    f.write('F\n')
    f.write('Use analytic solution for fillable porosity?  Enter T (.true.) or F (.false.)\n')
    f.write(check_fill+'\n')
    f.write('Estimate positive pressure head in rising water table zone (i.e. in lower part of unsat zone)?  Enter T (.true.) or F (.false.)\n')
    f.write('T\n')
    f.write('Use psi0=-1/alpha? Enter T (.true.) or F (.false.) (False selects the default value, psi0=0)\n')
    f.write(check_psi+'\n')
    f.write('Log mass balance results?   Enter T (.true.) or F (.false.)\n')
    f.write(check_mass+'\n')
    f.write('Flow direction (enter "gener", "slope", or "hydro")\n')
    f.write(flow+'\n')   
    f.close()


################## launch trigrs  ##########################


    folder0 = proj+'/'
    proc00 = subprocess.Popen(["/usr/local/Trigrs/Trigrs", "%s"%folder0],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
    res00 = proc00.communicate()[0].split('\n')

################## import trigrs outputs ##########################
    respath=proj+'/TrigrsRes'
    if os.path.exists(respath):
        dirList=os.listdir(respath)
        filelist=[]
        for fname in dirList:
            if not fname.split("_")[0] == "TRnon":
                filelist.append(fname)
            else:
                grass.fatal("Error: solution non-convergent, consider adjust parameters")

        print filelist
        
        if len(filelist)<1:
            grass.fatal("Error: no output generated")
            
        filelist.sort()
               
        """
        if flags["-o"]:
            ov = True
        else:
            ov = False
        """
        
        for f in filelist:
            pref = f.split("_")
            if pref[0] == "TRfs":
                if pref[-1].split(".")[0].isdigit():
                    print int(pref[-1].split(".")[0])
                    print n_times
                    grass.run_command('r.in.arc',input=respath + "/" + f,output=minsf+"_"+list_times[int(pref[-1].split(".")[0])-1],overwrite = True)
                else:
                    grass.run_command('r.in.arc',input= respath + "/" + f,output=minsf,overwrite = True)
            if pref[0] == "TRz":
                if pref[-1].split(".")[0].isdigit():
                    grass.run_command('r.in.arc',input=respath + "/" + f,output=z_minsf+"_"+list_times[int(pref[-1].split(".")[0])-1],overwrite = True)
                else:
                    grass.run_command('r.in.arc',input=respath + "/" + f,output=z_minsf,overwrite = True)
            if pref[0] == "TRp":
                if pref[-1].split(".")[0].isdigit():
                    grass.run_command('r.in.arc',input=respath + "/" + f,output=p_minsf+"_"+list_times[int(pref[-1].split(".")[0])-1],overwrite = True)
                else:
                    grass.run_command('r.in.arc',input=respath + "/" + f,output=p_minsf,overwrite = True)



    
######################## remove trigrs result files ###########################################    

    for item in dirList:
        os.remove(respath+'/'+item)
def cleanup():
    gscript.run_command('g.remove',
                        flags='f',
                        type=['raster', 'vector'],
                        name=TMP,
                        quiet=True)
def calculate_bankfull_width_depth_from_polyline(
    grassdb,
    grass_location,
    qgis_prefix_path,
    path_bkfwidthdepth,
    path_k_c_zone_polygon,
    bkfwd_attributes,
    catinfo,
    input_geo_names,
    k_in=-1,
    c_in=-1,
    return_k_c_only=False,
):
    mask = input_geo_names["mask"]
    cat_ply_info = input_geo_names["cat_ply_info"]

    default_slope = min_riv_slope
    min_manning_n = 0.025
    max_manning_n = 0.15
    default_bkf_width = 1.2345
    default_bkf_depth = 1.2345
    default_bkf_q = 1.2345
    k = -1
    c = -1
    if path_bkfwidthdepth != "#":
        reproject_clip_vectors_by_polygon(
            grassdb=grassdb,
            grass_location=grass_location,
            qgis_prefix_path=qgis_prefix_path,
            mask=os.path.join(grassdb, mask + ".shp"),
            path_polygon=path_bkfwidthdepth,
            ply_name="bkf_width_depth",
        )
    if path_k_c_zone_polygon != '#':
        reproject_clip_vectors_by_polygon(
            grassdb=grassdb,
            grass_location=grass_location,
            qgis_prefix_path=qgis_prefix_path,
            mask=os.path.join(grassdb, mask + ".shp"),
            path_polygon=path_k_c_zone_polygon,
            ply_name="kc_zone",
        )

    import grass.script as grass
    import grass.script.setup as gsetup
    from grass.pygrass.modules import Module
    from grass.pygrass.modules.shortcuts import general as g
    from grass.pygrass.modules.shortcuts import raster as r
    from grass.script import array as garray
    from grass.script import core as gcore
    from grass_session import Session

    os.environ.update(
        dict(GRASS_COMPRESS_NULLS="1",
             GRASS_COMPRESSOR="ZSTD",
             GRASS_VERBOSE="1"))
    PERMANENT = Session()
    PERMANENT.open(gisdb=grassdb, location=grass_location, create_opts="")

    con = sqlite3.connect(
        os.path.join(grassdb, grass_location, "PERMANENT", "sqlite",
                     "sqlite.db"))

    if path_bkfwidthdepth != "#":
        grass.run_command(
            "v.import",
            input=os.path.join(grassdb, "bkf_width_depth" + ".shp"),
            output="bk_full_wid_depth",
            overwrite=True,
        )

    if path_k_c_zone_polygon != "#":
        grass.run_command(
            "v.import",
            input=os.path.join(grassdb, "kc_zone_clip" + ".shp"),
            output="kc_zone",
            overwrite=True,
        )

        grass.run_command(
            "v.overlay",
            ainput="kc_zone",
            alayer=1,
            atype="area",
            binput=cat_ply_info,
            operator="and",
            output=cat_ply_info + "kc",
            overwrite=True,
        )
        grass.run_command(
            "v.to.db",
            map=cat_ply_info + "kc",
            option="area",
            columns="Area_kc",
            units="meters",
            overwrite=True,
        )

    if k_in < 0 and c_in < 0:
        ### read catchment
        if path_bkfwidthdepth != '#':
            sqlstat = "SELECT %s,%s,%s,%s FROM %s" % (
                bkfwd_attributes[3],
                bkfwd_attributes[2],
                bkfwd_attributes[1],
                bkfwd_attributes[0],
                "bk_full_wid_depth",
            )
            bkf_width_depth = pd.read_sql_query(sqlstat, con)
            bkf_width_depth = bkf_width_depth.fillna(-9999)

            da_q = bkf_width_depth[[bkfwd_attributes[3],
                                    bkfwd_attributes[2]]].values

            if len(da_q) > 3:
                k, c = return_k_and_c_in_q_da_relationship(da_q)
            elif len(da_q) > 0 and len(da_q) <= 3:
                k = -1
                c = -1
                default_bkf_width = np.average(
                    bkf_width_depth[bkfwd_attributes[0]])
                default_bkf_depth = np.average(
                    bkf_width_depth[bkfwd_attributes[1]])
                default_bkf_q = np.average(
                    bkf_width_depth[bkfwd_attributes[3]])
            else:
                k = -1
                c = -1

        if path_k_c_zone_polygon != '#':
            k = -1
            c = -1
            sqlstat = "SELECT a_k, a_c,b_Gridcode, Area_kc FROM %s" % (
                cat_ply_info + "kc")
            k_c_sub = pd.read_sql_query(sqlstat, con)
            k_c_sub = k_c_sub.fillna(-9999)
            k_c_sub = k_c_sub.sort_values(by='Area_kc', ascending=False)
            k_c_sub = k_c_sub.drop_duplicates(subset=['b_Gridcode'])

    else:
        k = k_in
        c = c_in

    if return_k_c_only:
        return k, c

    if 'k_c_sub' not in locals() and k == -1:
        return

    idx = catinfo.index
    for i in range(0, len(idx)):
        idx_i = idx[i]
        da = catinfo.loc[idx_i, "DrainArea"] / 1000 / 1000  # m2 to km2
        catid = catinfo.loc[idx_i, "SubId"]
        if k > 0:
            q = func_Q_DA(da, k, c)
            catinfo.loc[idx_i, "BkfWidth"] = max(7.2 * q**0.5, min_bkf_width)
            catinfo.loc[idx_i, "BkfDepth"] = max(0.27 * q**0.3, min_bkf_depth)
            catinfo.loc[idx_i, "Q_Mean"] = q
        elif 'k_c_sub' in locals():
            if len(k_c_sub.loc[k_c_sub["b_Gridcode"] == catid]) < 1:
                k_sub = 0.00450718
                c_sub = 0.98579699
                print("k_sub not found .....")
            else:
                k_sub = k_c_sub.loc[k_c_sub["b_Gridcode"] ==
                                    catid]["a_k"].values[0]
                c_sub = k_c_sub.loc[k_c_sub["b_Gridcode"] ==
                                    catid]["a_c"].values[0]
                if k_sub < 0:
                    k_sub = 0.00450718
                    c_sub = 0.98579699

            q = func_Q_DA(da, k_sub, c_sub)
            catinfo.loc[idx_i, "BkfWidth"] = max(7.2 * q**0.5, min_bkf_width)
            catinfo.loc[idx_i, "BkfDepth"] = max(0.27 * q**0.3, min_bkf_depth)
            catinfo.loc[idx_i, "Q_Mean"] = q

        else:
            catinfo.loc[idx_i, "BkfWidth"] = default_bkf_width
            catinfo.loc[idx_i, "BkfDepth"] = default_bkf_depth
            catinfo.loc[idx_i, "Q_Mean"] = default_bkf_q

        if catinfo.loc[idx_i, "Lake_Cat"] < 2:
            if catinfo.loc[idx_i, "Max_DEM"] < 0:
                catinfo.loc[idx_i, "Max_DEM"] = catinfo.loc[idx_i, "MeanElev"]
                catinfo.loc[idx_i, "Min_DEM"] = catinfo.loc[idx_i, "MeanElev"]

    # adjust channel parameters

    #remove ncl and headwater subbasins
    catinfo_riv = catinfo.loc[(catinfo["Lake_Cat"] < 2)
                              & (catinfo["RivLength"] != -1.2345)].copy(
                                  deep=True)

    Seg_IDS = catinfo_riv["Seg_ID"].values
    Seg_IDS = np.unique(Seg_IDS)

    for iseg in range(0, len(Seg_IDS)):
        i_seg_id = Seg_IDS[iseg]
        i_seg_info = catinfo_riv[catinfo_riv["Seg_ID"] == i_seg_id]
        if len(i_seg_info["RivLength"].values[
                i_seg_info["RivLength"].values > 0]) > 0:
            length_seg = np.sum(i_seg_info["RivLength"].values[
                i_seg_info["RivLength"].values > 0])
        else:
            length_seg = 1
        qmean_seg = np.average(
            i_seg_info["Q_Mean"].values[i_seg_info["Q_Mean"].values > 0])
        width_seg = np.average(
            i_seg_info["BkfWidth"].values[i_seg_info["BkfWidth"].values > 0])
        depth_Seg = np.average(
            i_seg_info["BkfDepth"].values[i_seg_info["BkfDepth"].values > 0])
        floodn_Seg = np.average(
            i_seg_info["FloodP_n"].values[i_seg_info["FloodP_n"].values > 0])
        basslp_Seg = np.average(
            i_seg_info["BasSlope"].values[i_seg_info["BasSlope"].values > 0])
        basasp_Seg = np.average(
            i_seg_info["BasAspect"].values[i_seg_info["BasAspect"].values > 0])
        baselv_Seg = np.average(
            i_seg_info["MeanElev"].values[i_seg_info["MeanElev"].values > 0])

        if len(i_seg_info["Max_DEM"].values[
                i_seg_info["Max_DEM"].values > -9999]) > 0:
            max_elve_seg = np.max(i_seg_info["Max_DEM"].values[
                i_seg_info["Max_DEM"].values > -9999])
            min_elve_seg = np.min(i_seg_info["Min_DEM"].values[
                i_seg_info["Min_DEM"].values > -9999])
        else:
            max_elve_seg = baselv_Seg
            min_elve_seg = baselv_Seg

        slope_seg = (max_elve_seg - min_elve_seg) / length_seg
        if slope_seg < 0.000000001:
            slope_seg = min_riv_slope  #### Needs to update later

        n_seg = calculateChannaln(width_seg, depth_Seg, qmean_seg, slope_seg)

        for i in range(0, len(i_seg_info)):
            subid = i_seg_info["SubId"].values[i]
            max_elve_rch = i_seg_info["Max_DEM"].values[i]
            min_elve_rch = i_seg_info["Min_DEM"].values[i]
            length_rch = max(i_seg_info["RivLength"].values[i], min_riv_lenth)
            qmean_rch = i_seg_info["Q_Mean"].values[i]
            width_rch = i_seg_info["BkfWidth"].values[i]
            depth_rch = i_seg_info["BkfDepth"].values[i]
            floodn_rch = i_seg_info["FloodP_n"].values[i]

            if min_elve_rch < -2000:
                if i_seg_info["MeanElev"].values[i] > -2000:
                    min_elve_rch = i_seg_info["MeanElev"].values[i]
                else:
                    min_elve_rch = baselv_Seg

            if max_elve_rch < -2000:
                if i_seg_info["MeanElev"].values[i] > -2000:
                    max_elve_rch = i_seg_info["MeanElev"].values[i]
                else:
                    max_elve_rch = baselv_Seg

            slope_rch = (max_elve_rch - min_elve_rch) / length_rch

            if slope_rch < min_riv_slope or slope_rch > max_riv_slope:
                if slope_seg >= min_riv_slope and slope_seg <= max_riv_slope:
                    slope_rch = slope_seg

            slope_rch = max(slope_rch, min_riv_slope)
            slope_rch = min(slope_rch, max_riv_slope)

            n_rch = calculateChannaln(width_rch, depth_rch, qmean_rch,
                                      slope_rch)

            if n_rch < min_manning_n or n_rch > max_manning_n:
                if n_seg >= min_manning_n and n_seg <= max_manning_n:
                    n_rch = n_seg

            n_rch = max(n_rch, min_manning_n)
            n_rch = min(n_rch, max_manning_n)

            catinfo.loc[catinfo["SubId"] == subid, "RivSlope"] = slope_rch
            catinfo.loc[catinfo["SubId"] == subid, "Ch_n"] = n_rch
            catinfo.loc[catinfo["SubId"] == subid, "RivLength"] = length_rch

            if subid == 504:
                print(floodn_rch)

            if floodn_rch < 0:
                if floodn_Seg > 0:
                    floodn_rch = floodn_Seg
                else:
                    floodn_rch = DEFALUT_FLOOD_N

            floodn_rch = max(floodn_rch, n_rch)
            catinfo.loc[catinfo["SubId"] == subid, "FloodP_n"] = floodn_rch
            catinfo.loc[catinfo["SubId"] == subid, "Max_DEM"] = max_elve_rch
            catinfo.loc[catinfo["SubId"] == subid, "Min_DEM"] = min_elve_rch

            # if subid == 504:
            #     print(floodn_rch)
            #     asdf

            if i_seg_info["BasSlope"].values[i] <= 0:
                if basslp_Seg > 0:
                    catinfo.loc[catinfo["SubId"] == subid,
                                "BasSlope"] = basslp_Seg
                else:
                    catinfo.loc[catinfo["SubId"] == subid, "BasSlope"] = 0

            if i_seg_info["BasAspect"].values[i] <= 0:
                if basasp_Seg > 0:
                    catinfo.loc[catinfo["SubId"] == subid,
                                "BasAspect"] = basasp_Seg
                else:
                    catinfo.loc[catinfo["SubId"] == subid, "BasAspect"] = 0

            if i_seg_info["MeanElev"].values[i] < 0:
                if baselv_Seg > 0:
                    catinfo.loc[catinfo["SubId"] == subid,
                                "MeanElev"] = baselv_Seg
                else:
                    catelv = catinfo['MeanElev'].values
                    catelv = catelv[catelv > 0]
                    catinfo.loc[catinfo["SubId"] == subid,
                                "MeanElev"] = np.average(catelv)
    PERMANENT.close()
    return catinfo
def main(options, flags):

    # Variables
    in_filename = options["input"]
    out_filename = options["output"]
    suitability_threshold = options["suitability_threshold"]
    percentile_threshold = options["percentile_threshold"]
    minimum_suitability = options["minimum_suitability"]
    minimum_size = float(options["minimum_size"])
    size = int(options["size"])
    if (size % 2) == 0:
        gs.fatal("size should be an odd positive number")
    focal_statistic = options["focal_statistic"]
    maximum_gap = float(options["maximum_gap"])

    # Flags
    if flags["c"]:
        neighbor_flag = "c"
    else:
        neighbor_flag = ""
    if flags["d"]:
        clump_flag = "d"
    else:
        clump_flag = ""
    region_suitability_flag = flags["z"]
    keep_suitable_cells_flag = flags["k"]
    keep_focal_stats_flag = flags["f"]
    clump_areas_flag = flags["a"]

    # Compute neighborhood statistic
    if size > 1 and len(minimum_suitability) == 0:
        gs.message("Computing neighborhood statistic")
        gs.message("================================\n")
        tmp00 = create_temporary_name("tmp00")
        gs.run_command(
            "r.neighbors",
            flags=neighbor_flag,
            input=in_filename,
            output=tmp00,
            method=focal_statistic,
            size=size,
        )
        tmp02 = create_temporary_name("tmp02")
        gs.run_command("r.series",
                       input=[in_filename, tmp00],
                       method="maximum",
                       output=tmp02)
    elif size > 1:
        gs.message("Computing neighborhood statistic")
        gs.message("================================\n")
        try:
            float(minimum_suitability)
        except TypeError:
            gs.fatal("minimum_suitability must be numeric or left empty")
        tmp01 = create_temporary_name("tmp01")
        gs.run_command(
            "r.neighbors",
            flags=neighbor_flag,
            input=in_filename,
            output=tmp01,
            method=focal_statistic,
            size=size,
        )
        tmp00 = create_temporary_name("tmp00")
        gs.run_command("r.series",
                       input=[in_filename, tmp01],
                       method="maximum",
                       output=tmp00)
        tmp02 = create_temporary_name("tmp02")
        gs.run_command(
            "r.mapcalc",
            expression=("{0} = if({1} > {2},{3},null())".format(
                tmp02, in_filename, minimum_suitability, tmp00)),
        )
    else:
        tmp02 = create_temporary_name("tmp02")
        gs.run_command("g.copy", raster=[in_filename, tmp02], quiet=True)

    # Convert suitability to boolean: suitable (1) or not (nodata)
    gs.message("Creating boolean map suitable/none-suitable")
    gs.message("===========================================\n")

    if suitability_threshold == "":
        qrule = (gs.read_command(
            "r.quantile",
            input=in_filename,
            percentiles=percentile_threshold,
            quiet=True,
        ).replace("\n", "").split(":")[2])
        suitability_threshold = float(qrule)
    else:
        suitability_threshold = float(suitability_threshold)

    tmp03 = create_temporary_name("tmp03")
    gs.run_command(
        "r.mapcalc",
        expression=("{} = if({} >= {},1,null())".format(
            tmp03, tmp02, suitability_threshold)),
    )

    # Clump contiguous cells (adjacent celss with same value) and
    # remove clumps that are below user provided size
    gs.message("Clumping continuous cells and removing small fragments")
    gs.message("======================================================\n")
    tmp04 = create_temporary_name("tmp04")
    gs.run_command(
        "r.reclass.area",
        flags=clump_flag,
        input=tmp03,
        output=tmp04,
        value=minimum_size,
        mode="greater",
        method="reclass",
    )

    # Remove gaps within suitable regions with size smaller than maxgap
    # Note, in the reclass.area module below mode 'greater' is used because
    # 1/nodata is reversed. The last step (clump) is to assign unique values
    # to the clumps, which makes it easier to filter and analyse results
    if maximum_gap > 0:
        gs.message("Removing small gaps of non-suitable areas - step 1")
        gs.message("==================================================\n")
        tmp05 = create_temporary_name("tmp05")
        expr = "{} = if(isnull({}),1,null())".format(tmp05, tmp04)
        gs.run_command("r.mapcalc", expression=expr)
        gs.message("Removing small gaps of non-suitable areas - step 2")
        gs.message("==================================================\n")
        tmp06 = create_temporary_name("tmp06")
        gs.run_command(
            "r.reclass.area",
            input=tmp05,
            output=tmp06,
            value=maximum_gap,
            mode="greater",
            method="reclass",
        )
        gs.message("Removing small gaps of non-suitable areas - step 3")
        gs.message("==================================================\n")
        tmp08 = create_temporary_name("tmp08")
        expr3 = "{} = int(if(isnull({}),1,null()))".format(tmp08, tmp06)
        gs.run_command("r.mapcalc", expression=expr3)
        tmp09 = create_temporary_name("tmp09")
        if len(minimum_suitability) > 0:
            bumask = tmpmask(raster=in_filename,
                             absolute_minimum=minimum_suitability)
            gs.run_command(
                "r.mapcalc",
                expression=("{} = if(isnull({}), {}, null())".format(
                    tmp09, bumask, tmp08)),
            )
        else:
            gs.run_command("g.rename", raster=[tmp08, tmp09], quiet=True)

        # Create map with category clump-suitable, clump-unsuitable
        gs.message("Create map with category clump-suitable, clump-unsuitable")
        gs.message("=======================================================\n")
        filledgaps = "{}_filledgaps".format(out_filename)
        gs.run_command(
            "r.series",
            output=filledgaps,
            input=[tmp04, tmp09],
            method="sum",
        )
        RECLASS_FILLEDGAPS = """
        1:filled gaps\n2:suitable areas
        """.strip()
        gs.write_command(
            "r.category",
            map=filledgaps,
            rules="-",
            separator=":",
            stdin=RECLASS_FILLEDGAPS,
        )

        # Assign unique ids to clumps
        gs.message("Assigning unique id's to clumps")
        gs.message("==============================\n")
        gs.run_command("r.clump",
                       flags=clump_flag,
                       input=tmp09,
                       output=out_filename)
        gs.run_command(
            "r.support",
            map=filledgaps,
            title="Regions + filled gaps",
            units="2 = suitable, 1 = filled gaps",
            description=(
                "Map indicating which cells of the",
                "\nidentified regions are suitable,",
                "\nand which are gaps included\n",
            ),
        )
        COLORS_FILLEDGAPS = """
        1 241:241:114
        2 139:205:85
        """.strip()
        gs.write_command("r.colors",
                         rules="-",
                         map=filledgaps,
                         stdin=COLORS_FILLEDGAPS)

    else:
        # Assign unique ids to clumps
        gs.message("Assigning unique id's to clumps")
        gs.message("================================\n")
        gs.run_command("r.clump",
                       flags=clump_flag,
                       input=tmp04,
                       output=out_filename)
    gs.run_command(
        "r.support",
        map=out_filename,
        title="Suitable regions",
        units="IDs of suitable regions",
        description=(
            "Map with potential areas for conservation"
            "\n, Based on the suitability layer {}\n".format(in_filename)),
    )
    # Keep map with all suitable areas
    if clump_areas_flag:
        gs.message("Compute area per clump")
        gs.message("====================\n")
        areastat = "{}_clumpsize".format(out_filename)
        tmp10 = create_temporary_name("tmp10")
        gs.run_command("r.area", input=out_filename, output=tmp10)
        gs.run_command(
            "r.mapcalc",
            expression=("{} = {} * area()/10000".format(areastat, tmp10)),
        )

    # Zonal statistics
    if region_suitability_flag:
        gs.message("Compute average suitability per clump")
        gs.message("=====================================\n")
        zonstat = "{}_averagesuitability".format(out_filename)
        gs.run_command(
            "r.stats.zonal",
            base=out_filename,
            cover=in_filename,
            method="average",
            output=zonstat,
        )
        gs.run_command("r.colors", map=zonstat, color="bgyr")
    gs.message("Done")

    # Vector as output
    if flags["v"]:
        gs.message("Compute vector with statistis")
        gs.message("===========================\n")
        zonstat = "{}_averagesuitability".format(out_filename)
        gs.run_command(
            "r.to.vect",
            flags="v",
            input=out_filename,
            output=out_filename,
            type="area",
        )
        gs.run_command("v.to.db",
                       map=out_filename,
                       option="area",
                       columns="area")
        gs.run_command("v.to.db",
                       map=out_filename,
                       option="compact",
                       columns="compactness")
        gs.run_command("v.to.db", map=out_filename, option="fd", columns="fd")
        gs.run_command(
            "v.rast.stats",
            map=out_filename,
            raster=in_filename,
            column_prefix="AA",
            method="average",
        )
        gs.run_command(
            "v.db.renamecolumn",
            map=out_filename,
            column="{},{}".format("AA_average", "mean_suitability"),
        )

    # Compactness
    if flags["m"]:
        gs.message("compactness, fractal dimension and average suitability")
        gs.message("====================================================\n")
        compactness = "{}_compactness".format(out_filename)
        if flags["v"]:
            gs.run_command(
                "v.to.rast",
                input=out_filename,
                output=compactness,
                use="attr",
                attribute_column="compactness",
            )
        else:
            tmp11 = create_temporary_name("tmp11")
            gs.run_command("r.to.vect",
                           flags="v",
                           input=out_filename,
                           output=tmp11,
                           type="area")
            gs.run_command("v.to.db",
                           map=tmp11,
                           option="compact",
                           columns="compactness")
            gs.run_command(
                "v.to.rast",
                input=tmp11,
                output=compactness,
                use="attr",
                attribute_column="compactness",
            )
            gs.run_command(
                "g.remove",
                type="vector",
                name=tmp11,
                flags="f",
                quiet=True,
            )

    # Keep map with all suitable areas
    if keep_suitable_cells_flag:
        rname = "{}_allsuitableareas".format(out_filename)
        gs.run_command("g.rename", raster=[tmp03, rname], quiet=True)
        COLORS_ALLSUITABLEAREAS = """
        1 230:230:230
        """.strip()
        gs.write_command("r.colors",
                         rules="-",
                         map=rname,
                         stdin=COLORS_ALLSUITABLEAREAS)

    # Keep suitability map based on focal statistic
    if keep_focal_stats_flag:
        rname2 = "{}_focalsuitability".format(out_filename)
        gs.run_command("g.rename", raster=[tmp02, rname2], quiet=True)
        gs.run_command("r.colors", map=rname2, raster=in_filename)

    if options["suitability_threshold"] == "":
        gs.message("\n---------------------------------------------------\n")
        gs.message("Suitability threshold = {}".format(suitability_threshold))
        gs.message("Minimum area = {} hectares".format(minimum_size))
        gs.message("\n\n")