Exemplo n.º 1
0
def GetRefBox(ref,ref_box,k_box,processid):    
    N = grass.region()['n']
    S = grass.region()['s']
    E = grass.region()['e']
    W = grass.region()['w']
    grass.run_command("g.region",vect=k_box,quiet=True)
    ns_ext = (math.ceil(grass.region()['n']-grass.region()['s']))*10/100
    ew_ext = (math.ceil(grass.region()['e']-grass.region()['w']))*10/100
    grass.run_command("g.region",n=grass.region()['n']+ns_ext/2,s=grass.region()['s']-ns_ext/2,w=grass.region()['w']-ew_ext/2,e=grass.region()['e']+ew_ext/2,quiet=True)
    grass.run_command("v.in.region",output="new_box_%s"%processid,quiet=True)
    grass.run_command("v.overlay",ainput=ref,atype="line",binput="new_box_%s"%processid,btype="area",operator="and",output=ref_box,quiet=True)
    grass.run_command("g.remove",type="vect", name="new_box_%s"%processid,flags="f",quiet=True)
    grass.run_command("g.region",n=N,s=S,e=E,w=W,quiet=True)
Exemplo n.º 2
0
Arquivo: digit.py Projeto: caomw/grass
    def _onLeftDown(self, event):
        action = self.toolbar.GetAction()
        if not action:
            return

        region = grass.region()
        e, n = self.Pixel2Cell(event.GetPositionTuple())
        if not ((region['s'] <= n <= region['n']) and (region['w'] <= e <= region['e'])):
            GWarning(parent = self.parent, 
                     message = _("You are trying to create a training area "
                                 "outside the computational region. "
                                 "Please, use g.region to set the appropriate region first."))
            return

        cat = self.GetCurrentCategory()
        
        if cat is None and action == "addLine":
            dlg = wx.MessageDialog(parent = self.parent,
                      message = _("In order to create a training area, "
                                  "you have to select class first.\n\n"
                                  "There is no class yet, "
                                  "do you want to create one?"),
                      caption = _("No class selected"),
                      style = wx.YES_NO)
            if dlg.ShowModal() == wx.ID_YES:
                self.parent.OnCategoryManager(None)
                
            dlg.Destroy()
            event.Skip()
            return
        
        super(IClassVDigitWindow, self)._onLeftDown(event)
Exemplo n.º 3
0
    def _getRegionParams(self,opt_region):
        """!Get region parameters from region specified or active default region

        @return region_params as a dictionary
        """
        self._debug("_getRegionParameters", "started")

        if opt_region:
            reg_spl = opt_region.strip().split('@', 1)
            reg_mapset = '.'
            if len(reg_spl) > 1:
                reg_mapset = reg_spl[1]

            if not gscript.find_file(name = reg_spl[0], element = 'windows',
                                   mapset = reg_mapset)['name']:
                 gscript.fatal(_("Region <%s> not found") % opt_region)

        if opt_region:
            s = gscript.read_command('g.region',
                                    quiet = True,
                                    flags = 'ug',
                                    region = opt_region)
            region_params = gscript.parse_key_val(s, val_type = float)
            gscript.verbose("Using region parameters for region %s" %opt_region)
        else:
            region_params = gscript.region()
            gscript.verbose("Using current grass region")

        self._debug("_getRegionParameters", "finished")
        return region_params
Exemplo n.º 4
0
def main(options, flags): 
    if flags['d']:
        grass.run_command('g.mapset', mapset='PERMANENT', quiet=True)
        grass.run_command('g.region', flags='d')

    reg = grass.region()

    cols = int(options['cols'])  # Count of columns in the mapsets
    rows = int(options['rows'])  # Count of rows in the mapsets
    
    # ew = reg['e'] - reg['w']
    dx = cols * reg['ewres']

    ns = reg['n'] - reg['s']
    dy = rows * reg['nsres']

    west = reg['w']
    south = reg['s']

    i = j = 0
    try:
        while west < reg['e']:
            while south < reg['n']:
                mapset_name = "node_%s_%s" % (j, i)
                grass.run_command('g.mapset', mapset=mapset_name, flags='c', quiet=True)
                grass.run_command('g.region', s=south, n=min(reg['n'], south+dy),
                                  w=west, e=min(reg['e'], west+dx), flags='p')
                south += dy
                j += 1
            west += dx
            i += 1
            j = 0
            south = reg['s']
    finally:
        grass.run_command('g.mapset', mapset='PERMANENT')
Exemplo n.º 5
0
def getRandomGridCoords(n, accessible):
    c = grass.region()
    rows = c['rows']
    cols = c['cols']
    nc = c['n']
    wc = c['w']
    ns = c['nsres']
    ew = c['ewres']
    
    if(rows*cols<n):
        n = rows*cols
    rand_rows = np.random.randint(0,rows, n)
    rand_cols = np.random.randint(0,cols, n)
    if accessible:
        rvals = RasterRow(crmap, mapset)
        rvals.open('r')
        
        for i in xrange(0,n):
            for j in xrange(0,n):
                while rvals[rand_rows[i]][rand_cols[j]] < 0 or rvals[rand_rows[i]][rand_cols[j]]>=999:
                    rand_rows[i] = np.random.randint(0,rows)
                    rand_cols[i] = np.random.randint(0,cols)
        rvals.close()
    
    return [Point(wc + rand_cols[i]*ew + ew/2, nc - rand_rows[i]*ns - ns/2) for i in xrange(0,n)]
Exemplo n.º 6
0
 def set_resolution(self):
   """
   resolution is in dpi, so is a function of figsize
   """
   # Get maximum resolution
   raster_region = self.parse_region( grass.read_command('g.region', rast=self.raster_grid_name, flags='p') )#, flags='up') ) "u" doesn't change the region, so "up" just prints it out
   rast_nlats = float(raster_region['rows'])
   rast_nlons = float(raster_region['cols'])
   self.nlats = int(np.min((rast_nlats, self.figsize[0]*self.resolution)))
   self.nlons = int(np.min((rast_nlons, self.figsize[1]*self.resolution)))
   grass.run_command('g.region', rows=self.nlats, cols=self.nlons)
   self.s = grass.region()['s']
   self.n = grass.region()['n']
   self.w = grass.region()['w']
   self.e = grass.region()['e']
   # And also set the lats and lons for the Basemap grid
   # use np.mean to get the cell centers
   self.lats = self.midpoints( np.linspace(self.s, self.n, self.nlats+1) )
   self.lons = self.midpoints( np.linspace(self.w, self.e, self.nlons+1) )
Exemplo n.º 7
0
 def set_resolution(self):
   """
   resolution is in dpi, so is a function of figsize
   """
   # Get maximum resolution
   raster_region = grass.region()
   rast_nlats = float(raster_region['rows'])
   rast_nlons = float(raster_region['cols'])
   self.nlats = int(np.min((rast_nlats, self.figsize[0]*self.resolution)))
   self.nlons = int(np.min((rast_nlons, self.figsize[1]*self.resolution)))
   grass.run_command('g.region', rows=self.nlats, cols=self.nlons)
   self.s = grass.region()['s']
   self.n = grass.region()['n']
   self.w = grass.region()['w']
   self.e = grass.region()['e']
   # And also set the lats and lons for the Basemap grid
   # use np.mean to get the cell centers
   self.lats = self.midpoints( np.linspace(self.s, self.n, self.nlats+1) )
   self.lons = self.midpoints( np.linspace(self.w, self.e, self.nlons+1) )
Exemplo n.º 8
0
    def Run(self, input, column, output, package, sill, nugget, range, kappa, logger,
            overwrite, model, block, output_var, command, **kwargs):
        """ Wrapper for all functions above. """

        logger.message(_("Processing %d cells. Computing time raises "
                         "exponentially with resolution." % grass.region()['cells']))
        logger.message(_("Importing data..."))

        if self.InputData is None:
            self.InputData = self.ImportMap(input, column)
        # and from here over, InputData refers to the global variable
        #print(robjects.r.slot(InputData, 'data').names)
        logger.message(_("Data successfully imported."))

        GridPredicted = self.CreateGrid(self.InputData)

        logger.message(_("Fitting variogram..."))

        if block is not '':
            self.predictor = 'x+y'
        else:
            self.predictor = '1'
        if self.Variogram is None:
            self.Variogram = self.FitVariogram(robjects.Formula(column + "~" + self.predictor),
                                               self.InputData,
                                               model=model,
                                               sill=sill,
                                               nugget=nugget,
                                               range=range,
                                               kappa=kappa)
        logger.message(_("Variogram fitting complete."))

        logger.message(_("Kriging..."))
        KrigingResult = self.DoKriging(
            robjects.Formula(
                column + "~" + self.predictor),
            self.InputData,
            GridPredicted,
            self.Variogram['variogrammodel'],
            block)  # using global ones
        logger.message(_("Kriging complete."))

        self.ExportMap(map=KrigingResult,
                       column='var1.pred',
                       name=output,
                       overwrite=overwrite,
                       command=command,
                       variograms=self.Variogram)
        if output_var is not '':
            self.ExportMap(map=KrigingResult,
                           column='var1.var',
                           name=output_var,
                           overwrite=overwrite,
                           command=command,
                           variograms=self.Variogram)
Exemplo n.º 9
0
def get_region():
    """Returns current computational region as dictionary.

    Adds long key names.
    """
    region = gs.region()
    region['east'] = region['e']
    region['west'] = region['w']
    region['north'] = region['n']
    region['south'] = region['s']
    return region
Exemplo n.º 10
0
def main():
    global temp_dist, temp_val

    input = options['input']
    output = options['output']
    radius = float(options['radius'])
    metric = options['metric']
    old = options['old']
    new = options['new']
    mapunits = flags['m']

    tmp = str(os.getpid())

    temp_dist = "r.grow.tmp.%s.dist" % tmp

    if new == '':
        temp_val = "r.grow.tmp.%s.val" % tmp
        new = temp_val
    else:
        temp_val = None

    if old == '':
        old = input

    if not mapunits:
        kv = grass.region()
        scale = math.sqrt(float(kv['nsres']) * float(kv['ewres']))
        radius *= scale

    if metric == 'euclidean':
        metric = 'squared'
        radius = radius * radius

    # check if input file exists
    if not grass.find_file(input)['file']:
        grass.fatal(_("Raster map <%s> not found") % input)

    try:
        grass.run_command('r.grow.distance', input=input, metric=metric,
                          distance=temp_dist, value=temp_val)
    except CalledModuleError:
        grass.fatal(_("Growing failed. Removing temporary maps."))

    grass.mapcalc(
        "$output = if(!isnull($input),$old,if($dist < $radius,$new,null()))",
        output=output, input=input, radius=radius,
        old=old, new=new, dist=temp_dist)

    grass.run_command('r.colors', map=output, raster=input)

    # write cmd history:
    grass.raster_history(output)
Exemplo n.º 11
0
    def CreateGrid(self, inputdata):
        Region = grass.region()
        Grid = robjects.r.gmeta2grd()

        # addition of coordinates columns into dataframe.
        coordinatesDF = robjects.r['as.data.frame'](robjects.r.coordinates(Grid))
        data = robjects.r['data.frame'](x=coordinatesDF.rx('s1')[0],
                                        y=coordinatesDF.rx('s2')[0],
                                        k=robjects.r.rep(1, Region['cols'] * Region['rows']))
        GridPredicted = robjects.r.SpatialGridDataFrame(
            Grid, data, proj4string=robjects.r.CRS(
                robjects.r.proj4string(inputdata)))
        return GridPredicted
Exemplo n.º 12
0
def main():
    name = options['output']
    type = options['type']
    dip = float(options['dip'])
    az  = float(options['azimuth'])
    ea  = float(options['easting'])
    no  = float(options['northing'])
    el  = float(options['elevation'])

    reg = grass.region()

    ### test input values ###
    if abs(dip) >= 90:
	grass.fatal(_("dip must be between -90 and 90."))

    if az < 0 or az >= 360:
	grass.fatal(_("azimuth must be between 0 and 360"))

    ### now the actual algorithm
    az_r  = math.radians(-az)
    sinaz = math.sin(az_r)
    cosaz = math.cos(az_r)

    dip_r = math.radians(-dip)
    tandip = math.tan(dip_r)

    kx = sinaz * tandip
    ky = cosaz * tandip
    kz = el - ea * sinaz * tandip - no * cosaz * tandip

    if type == "CELL":
	round = "round"
	dtype = "int"
    elif type == "FCELL":
	round = ""
	dtype = "float"
    else:
	round = ""
	dtype = "double"

    grass.mapcalc("$name = $type($round(x() * $kx + y() * $ky + $kz))",
		  name = name, type = dtype, round = round, kx = kx, ky = ky, kz = kz)

    grass.run_command('r.support', map = name, history = '')
    grass.raster_history(name)

    grass.message(_("Done."))
    t = string.Template("Raster map <$name> generated by r.plane " +
			"at point $ea E, $no N, elevation $el with dip = $dip degrees and " +
			"aspect = $az degrees ccw from north.")
    grass.message(t.substitute(name = name, ea = ea, no = no, el = el, dip = dip, az = az))
Exemplo n.º 13
0
def createRegionDefinition(project):
  ##start of write extent
  call(["mkdir", project+"/region"])

  ##regionInfo = grass.run_command("g.region",flags="g")
  ## create extent with boundary as [project]/extent.geojson and save it under resortExtent
  grass.run_command("g.region",save="resortExtent",n=grass.vector_info('inputJSONextentMask').north,s=grass.vector_info('inputJSONextentMask').south,w=grass.vector_info('inputJSONextentMask').west,e=grass.vector_info('inputJSONextentMask').east)
  reg = grass.region()

  f = open(project+'/region/region.json', 'w+')
  print >>f, '{"n":'+str(reg.n) + ',"s":'+str(reg.s)+',"w":'+str(reg.w) + ',"e":'+str(reg.e)+'}'
  f.close()

  grass.run_command("g.region",rast='inputDEMraster')
Exemplo n.º 14
0
    def __init__(self,**optionsandflags):
        '''Process all arguments and prepare processing'''
        # add all options and flags as attributes (only nonempty ones)
        self.options = {}
        for o in optionsandflags:
            if optionsandflags[o]!='': self.options[o] = optionsandflags[o]
        self.__dict__.update(self.options)

        # get some location infos
        self.env=grass.gisenv()
        self.region=grass.region()
        self.proj=grass.parse_command('g.proj',flags='g')

        # convert res
        self.res = float(self.res)
        return
Exemplo n.º 15
0
def GetRegionParams(opt_region):

    # set region
    if opt_region:
        reg_spl = opt_region.strip().split('@', 1)
        reg_mapset = '.'
        if len(reg_spl) > 1:
            reg_mapset = reg_spl[1]

        if not grass.find_file(name=reg_spl[0], element='windows', mapset=reg_mapset)['name']:
            grass.fatal(_("Region <%s> not found") % opt_region)

    if opt_region:
        s = grass.read_command('g.region',
                               quiet=True,
                               flags='ug',
                               region=opt_region)
        region_params = grass.parse_key_val(s, val_type=float)
    else:
        region_params = grass.region()

    return region_params
Exemplo n.º 16
0
    def __init__(self,**optionsandflags):
        '''Process all arguments and prepare processing'''
        # add all options and flags as attributes (only nonempty ones)
        self.options = {}
        for o in optionsandflags:
            if optionsandflags[o]!='':
                try: self.options[o] = int(optionsandflags[o])             # int
                except ValueError:
                    try: self.options[o] = float(optionsandflags[o])       # float
                    except ValueError: self.options[o] = optionsandflags[o]# str
        self.__dict__.update(self.options)
        # save region for convenience
        self.region = grass.region()

        # INPUT CHECK

        # input dir
        if not os.path.exists(self.datadir): grass.fatal('%s doesnt exisist!' %self.datadir)

        # climate stations columns
        cols=grass.vector_columns(self.climstations)
        for c in [self.fnames, self.stationelevation]:
            if c not in cols: grass.fatal('Cant find %s in table %s' %(c,self.climatestations))
        # subbasins
        if self.elevation not in grass.vector_columns(self.subbasins):
            grass.fatal('Cant find %s in table %s' %(self.elevation,self.subbasins))

        # no extension
        if 'ext' not in self.options: self.ext=''

        # check if INTERPOL.PAR can be written
        self.par = True
        parneeded = ['method','start','end','minnb','maxnb','maxdist','nodata']
        if any([e not in self.options for e in parneeded]):
            grass.warning('''Won't write INTERPOL.PAR as any of these arguments
            is not set %s''' %(parneeded,))
            self.par = False
        return
Exemplo n.º 17
0
    def CreateDatalist(self, raster, coords):
        """Build a list of distance, value pairs for points along transect using r.profile
        """
        datalist = []

        # keep total number of transect points to 500 or less to avoid
        # freezing with large, high resolution maps
        region = grass.region()
        curr_res = min(float(region['nsres']), float(region['ewres']))
        transect_rec = 0
        if self.transect_length / curr_res > 500:
            transect_res = self.transect_length / 500
        else:
            transect_res = curr_res

        ret = RunCommand("r.profile",
                         parent=self,
                         input=raster,
                         coordinates=coords,
                         resolution=transect_res,
                         null="nan",
                         quiet=True,
                         read=True)

        if not ret:
            return []

        for line in ret.splitlines():
            dist, elev = line.strip().split(' ')
            if dist is None or dist == '' or dist == 'nan' or \
                    elev is None or elev == '' or elev == 'nan':
                continue
            dist = float(dist)
            elev = float(elev)
            datalist.append((dist, elev))

        return datalist
Exemplo n.º 18
0
    def mkLonLatGrid(self):
        # make temporary roi
        roi='roi__'
        grass.run_command('v.in.region',output=roi,quiet=True)
        # create temporary lonlat location
        tmpdir=grass.tempdir()
        tmploc='lonlat'
        grass.core.create_location(tmpdir,tmploc,epsg=4326)
        grass.run_command('g.mapset',mapset='PERMANENT',location=tmploc,
                          dbase=tmpdir,quiet=True)
        # reproj roi, smax in meters = 200km per degree
        grass.run_command('v.proj',input=roi,mapset=self.env['MAPSET'],
                          location=self.env['LOCATION_NAME'],dbase=self.env['GISDBASE'],
                          quiet=True)
        grass.run_command('g.region',vector=roi,quiet=True)
        llregion = grass.region()

        # bounds to extend to next resolution break
        extent = {c:int(float(llregion[c])/self.res)*self.res for c in ('s','w')}
        extent.update({c:int((float(llregion[c])+self.res)/self.res)*self.res for c in ('n','e')})
        # set region
        grass.run_command('g.region',res=self.res,**extent)
        grass.message(('Lon/Lat extent of region:',extent))

        # make grid
        grass.run_command('v.mkgrid',map=self.grid, type='area')
        grass.run_command('v.db.addcolumn',map=self.grid,columns='lon double,lat double')
        grass.run_command('v.to.db', map=self.grid, type='centroid',
                          option='coor', columns='lon,lat',quiet=True)

        # back to origional location and reproj
        grass.run_command('g.mapset',mapset=self.env['MAPSET'],location=self.env['LOCATION_NAME'],
                          dbase=self.env['GISDBASE'],quiet=True)
        grass.run_command('v.proj',input=self.grid,mapset='PERMANENT',
                          location=tmploc,dbase=tmpdir,quiet=True,
                          smax=float(self.region['nsres'])+float(self.region['ewres']))
        return 0
Exemplo n.º 19
0
    def initialize(self):
	grass.use_temp_region()

	run('g.region', raster = self.inmap)

	reg = grass.region()
	for k, f in wind_keys.values():
	    self.total[k] = (f)(reg[k])

	if self.cols > self.total['cols']:
	    self.cols = self.total['cols']
	if self.rows > self.total['rows']:
	    self.rows = self.total['rows']

	tempbase = grass.tempfile()
	grass.try_remove(tempbase)

	self.tempfile = tempbase + '.ppm'
	self.tempmap = 'tmp.d.rast.edit'

	atexit.register(self.cleanup)

	run('g.copy', raster = (self.inmap, self.outmap), overwrite = True)
	run('r.colors', map = self.outmap, rast = self.inmap)
Exemplo n.º 20
0
def main():

    # Input for all methods
    global roads
    roads = options["map"]
    global layer
    layer = options["roads_layer"]
    node_layer = options["node_layer"]
    cost_column = options["cost_column"]
    start_points = options["start_points"]
    time_steps = options["time_steps"].split(",")
    isochrones = options["isochrones"]
    global method
    method = options["method"]

    if method == "r.cost":
        offroad_speed = float(options["offroad_speed"])
        if offroad_speed == 0:
            grass.message(_("Offroad speed has to be > 0. Set to 0.00001."))
            offroad_speed = 0.00001

        memory = int(options["memory"])
        # Output
        if options["timemap"]:
            timemap = options["timemap"]
        else:
            timemap = None

        global tmp_cost_map
        global tmp_time_map
        global tmp_region_map
        global tmp_cost_column

        tmp_cost_map = "cost_map_tmp_%d" % os.getpid()
        tmp_time_map = "time_map_tmp_%d" % os.getpid()
        tmp_region_map = "region_map_tmp_%d" % os.getpid()

        # get current resolution
        region = grass.region()
        resolution = math.sqrt(float(region["nsres"]) * float(region["ewres"]))

        if grass.vector_db(roads)[int(layer)]["driver"] == "dbf":
            # add cost column to road vector
            tmp_cost_column = "tmp%d" % os.getpid()
            def_cost_column = tmp_cost_column + " DOUBLE PRECISION"
            grass.run_command(
                "v.db.addcolumn",
                map=roads,
                layer=layer,
                column=def_cost_column,
                quiet=True,
            )

            # calculate cost (in minutes) depending on speed
            # (resolution/(speed (in km/h) * 1000 / 60))
            query_value = "%s / (%s * 1000 / 60)" % (resolution, cost_column)
            grass.run_command("v.db.update",
                              map=roads,
                              column=tmp_cost_column,
                              qcolumn=query_value)
        else:
            tmp_cost_column = "%s / (%s * 1000 / 60)" % (resolution,
                                                         cost_column)

        # transform to raster
        grass.run_command(
            "v.to.rast",
            input=roads,
            output=tmp_cost_map,
            use="attr",
            attrcolumn=tmp_cost_column,
            type="line",
            memory=memory,
        )

        # replace null values with cost for off-road areas
        # (resolution/(off-road speed * 1000 / 60))
        null_value = resolution / (offroad_speed * 1000 / 60)
        grass.run_command("r.null", map=tmp_cost_map, null=null_value)

        # limit the cumulated cost surface calculation to the max time distance
        # requested
        max_cost = time_steps[-1]

        # calculate time distance from starting points
        grass.run_command(
            "r.cost",
            input=tmp_cost_map,
            start_points=start_points,
            output=tmp_time_map,
            max_cost=max_cost,
            memory=memory,
        )

        if timemap:
            grass.run_command("g.copy", raster=(tmp_time_map, timemap))
            grass.run_command("r.colors",
                              map=timemap,
                              color="grey",
                              flags="ne")

        # recode time distance to time steps
        recode_rules = "0:%s:%s\n" % (time_steps[0], time_steps[0])
        for count in range(1, len(time_steps)):
            recode_rules += time_steps[count - 1] + ":"
            recode_rules += time_steps[count] + ":"
            recode_rules += time_steps[count] + "\n"

        grass.write_command(
            "r.recode",
            input=tmp_time_map,
            output=tmp_region_map,
            rules="-",
            stdin=recode_rules,
        )

        # transform to vector areas
        grass.run_command(
            "r.to.vect",
            input=tmp_region_map,
            output=isochrones,
            type="area",
            column="traveltime",
            flags="s",
        )

        # give the polygons a default color table
        grass.run_command("v.colors",
                          map=isochrones,
                          use="attr",
                          column="traveltime",
                          color="grey")

    elif method == "v.net.iso":
        global max_distance
        if options["max_distance"]:
            max_distance = float(options["max_distance"])
        else:
            max_distance = None
        global output_cats
        output_cats = []
        for i in range(1, len(time_steps) + 2):
            output_cats.append(i)
        startpoints = grass.read_command(
            "v.distance",
            from_=start_points,
            to=roads,
            to_type="point",
            to_layer=node_layer,
            upload="cat",
            flags="p",
            quiet=True,
        ).split("\n")[1:-1]

        global isoraw
        isoraw = "isoraw_temp_%d" % os.getpid()
        global isos_final
        isos_final = "isos_final_%d" % os.getpid()

        if flags["i"]:
            for point in startpoints:
                startpoint_cat = point.split("|")[0]
                startnode_cat = point.split("|")[1]
                grass.run_command(
                    "v.net.iso",
                    input_=roads,
                    output=isoraw,
                    center_cats=startnode_cat,
                    costs=time_steps,
                    arc_column=cost_column,
                    overwrite=True,
                )
                isocalc(isoraw)
                outname = isochrones + "_" + startpoint_cat
                grass.run_command("g.rename", vect=isos_final + "," + outname)
                # give the polygons a default color table
                grass.run_command("v.colors",
                                  map=outname,
                                  use="cat",
                                  color="grey")

        else:
            startnodes = []
            for point in startpoints:
                startnodes.append(point.split("|")[1])
            grass.run_command(
                "v.net.iso",
                input_=roads,
                output=isoraw,
                center_cats=startnodes,
                costs=time_steps,
                arc_column=cost_column,
                overwrite=True,
            )
            isocalc(isoraw)
            grass.run_command("g.rename", vect=isos_final + "," + isochrones)
            # give the polygons a default color table
            grass.run_command("v.colors",
                              map=isochrones,
                              use="cat",
                              color="grey")

    else:
        grass.fatal(_("You need to chose at least one of the methods"))
Exemplo n.º 21
0
def main():
    '''
    This is the main code block where the variables and loop are set up and
    executed.
    '''
    # Set up some basic variables
    years = options["number"]
    prefx = options["prefx"]

    # These values could be read in from a climate file, so check that, and
    # act accordingly. Either way, the result will be some lists with the same
    # number of entries as there are iterations.
    if options["climfile"]:
        R2 = climfile(options["climfile"], 0, years)
        rain2 = climfile(options["climfile"], 1, years)
        stormlength2 = climfile(options["climfile"], 2, years)
        storms2 = climfile(options["climfile"], 3, years)
        stormi2 = climfile(options["climfile"], 4, years)
    else:
        R2 = climfile(options["r"], 0, years)
        rain2 = climfile(options["rain"], 1, years)
        stormlength2 = climfile(options["stormlength"], 2, years)
        storms2 = climfile(options["storms"], 3, years)
        stormi2 = climfile(options["stormi"], 4, years)

    # Now gather these four lists into one master list, to make it easier to pass on to main()
    masterlist = [R2, rain2, stormlength2, storms2, stormi2]

    # Make the statsout file with correct column headers
    if options["statsout"] == "":
        env = grass.gisenv()
        mapset = env['MAPSET']
        statsout = '%s_%slsevol_stats.csv' % (mapset, prefx)
    else:
        statsout = options["statsout"]
    if os.path.isfile(statsout):
        f = open(statsout, 'at')
    else:
        f = open(statsout, 'wt')
        f.write(
            'These statistics are in units of vertical meters (depth) per cell\n'
            +
            ' ,,Mean Values,,,,Standard Deviations,,,,Totals,,,Additional Stats\n'
            + 'Iteration,,Mean Erosion,Mean Deposition,Mean Soil Depth,,' +
            'Standard Deviation Erosion,Standard Deviation Deposition,Standard Deviation Soil Depth,,'
            + 'Total Sediment Eroded,Total Sediment Deposited,,' +
            'Minimum Erosion,First Quartile Erosion,Median Erosion,Third Quartile Erosion,Maximum Erosion,Original Un-smoothed Maximum Erosion,,'
            +
            'Minimum Deposition,First Quartile Deposition,Median Deposition,Third Quartile Deposition,Maximum Deposition,Original Un-smoothed Maximum Deposition,,'
            +
            'Minimum Soil Depth,First Quartile Soil Depth,Median Soil Depth,Third Quartile Soil Depth,Maximum Soil Depth'
        )
    if flags["p"] is True:
        grass.message('Making sample points map for determining cutoffs.')
    else:
        grass.message(
            '\n##################################################' +
            '\n##################################################\n' +
            '\n STARTING SIMULATION\n' +
            '\nBeginning iteration sequence. This may take some time.\n' +
            'Process is not finished until you see the message: \'Done with everything!\'\n'
            +
            ' _____________________________________________________________\n'
            +
            '_____________________________________________________________\n')
        grass.message("Total number of iterations to be run is %s" % years)

    # Get the region settings
    region1 = grass.region()

    # This is the main loop for interating landscape evolution!
    for x in range(int(years)):
        grass.message(
            '\n##################################################\n' +
            '\n*************************\n' + "Starting Iteration = %s" %
            (x + 1) + '\n*************************\n')
        landscapeEvol(x, (x + 1), prefx, statsout, region1['nsres'],
                      masterlist, f)

    # Since we are now done with the loop, close the stats file.
    f.close()
    grass.message('\nIterations complete!\n' + '\nDone with everything!')
    sys.exit(0)
def coarsen_region(factor=3):
    gs.run_command('g.region',
        rows=gs.region()['rows']/factor,
        cols=gs.region()['cols']/factor)
Exemplo n.º 23
0
def main():
    """Do the main work"""
    # Define static variables
    global tmpname
    tmpname = gscript.tempname(12)

    # Define user input variables
    a_flag = flags['a']
    elevation = options['elevation']
    direction = options['direction']
    slope_measure = options['slope_measure']
    outputs = options['output'].split(',')
    dir_format = options['dir_type']

    try:
        steps = list(map(int, options['steps'].split(',')))
    except:
        gscript.fatal(_('Not all steps given as integer.'))

    n_steps = max(steps)

    abs = 'abs' if a_flag else ''

    dir_values = gscript.parse_command('r.info', map=direction, flags='r')

    dir_type = check_directions(dir_format,float(dir_values['max']))

    # Ceck if number of requested steps and outputs match
    if len(outputs) != len(steps):
        gscript.fatal(_("Number of steps and number of output maps differ"))

    # Define static variables
    kwargs_even = {'dir': direction,
                   'elev_in': '{}_elev_even'.format(tmpname),
                   'elev_out': '{}_elev_odd'.format(tmpname)}
    kwargs_odd = {'dir': direction,
                  'elev_in': '{}_elev_odd'.format(tmpname),
                  'elev_out': '{}_elev_even'.format(tmpname)}

    if slope_measure != 'difference':
        kwargs_even['dist_in'] = '{}_dist_even'.format(tmpname)
        kwargs_even['dist_out'] = '{}_dist_odd'.format(tmpname)
        kwargs_even['dist_sum_in'] = '{}_dist_sum_even'.format(tmpname)
        kwargs_even['dist_sum_out'] = '{}_dist_sum_odd'.format(tmpname)
        kwargs_odd['dist_in'] = '{}_dist_odd'.format(tmpname)
        kwargs_odd['dist_out'] = '{}_dist_even'.format(tmpname)
        kwargs_odd['dist_sum_in'] = '{}_dist_sum_odd'.format(tmpname)
        kwargs_odd['dist_sum_out'] = '{}_dist_sum_even'.format(tmpname)

    dir_format_dict = {
        'degree_45': [1, 2, 3, 4, 5, 6, 7],
        'degree': [45, 90, 135, 180, 225, 270, 315],
        'bitmask': [1, 8, 7, 6, 5, 4, 3]
    }

    slope_measure_dict = {
        'difference': """\n{gradient}={abs}({elev}-{elev_in})""",
        'percent': """\n{gradient}={abs}({elev}-{elev_in})/{dist}""",
        'percent_int': """\n{gradient}=round(({abs}(({elev}-{elev_in}))/{dist})*10000.0)""",
        'degree': """\n{gradient}=atan({abs}({elev}-{elev_in})/{dist})""",
        'degree_int': """\n{gradient}=round(atan({abs}({elev}-{elev_in})/{dist})*100.0)"""
    }

    dirs = dir_format_dict[dir_type]

    expression_template = """{{elev_out}}=\
if({{dir}} == {0}, if(isnull({{elev_in}}[-1,1]),{{elev_in}},{{elev_in}}[-1,1]), \
if({{dir}} == {1}, if(isnull({{elev_in}}[-1,0]),{{elev_in}},{{elev_in}}[-1,0]), \
if({{dir}} == {2}, if(isnull({{elev_in}}[-1,-1]),{{elev_in}},{{elev_in}}[-1,-1]), \
if({{dir}} == {3}, if(isnull({{elev_in}}[0,-1]),{{elev_in}},{{elev_in}}[0,-1]), \
if({{dir}} == {4}, if(isnull({{elev_in}}[1,-1]),{{elev_in}},{{elev_in}}[1,-1]), \
if({{dir}} == {5}, if(isnull({{elev_in}}[1,0]),{{elev_in}},{{elev_in}}[1,0]), \
if({{dir}} == {6}, if(isnull({{elev_in}}[1,1]),{{elev_in}},{{elev_in}}[1,1]), \
if(isnull({{elev_in}}[0,1]),{{elev_in}},{{elev_in}}[0,1]))))))))""".format(*dirs)

    kwargs = {'dir': direction,
               'elev_in': elevation,
               'elev_out': '{}_elev_even'.format(tmpname)}

    if slope_measure != 'difference':
        expression_template += """\n{{dist_out}}=\
if({{dir}} == {0}, if(isnull({{dist_in}}[-1,1]),{{dist_in}},{{dist_in}}[-1,1]), \
if({{dir}} == {1}, if(isnull({{dist_in}}[-1,0]),{{dist_in}},{{dist_in}}[-1,0]), \
if({{dir}} == {2}, if(isnull({{dist_in}}[-1,-1]),{{dist_in}},{{dist_in}}[-1,-1]), \
if({{dir}} == {3}, if(isnull({{dist_in}}[0,-1]),{{dist_in}},{{dist_in}}[0,-1]), \
if({{dir}} == {4}, if(isnull({{dist_in}}[1,-1]),{{dist_in}},{{dist_in}}[1,-1]), \
if({{dir}} == {5}, if(isnull({{dist_in}}[1,0]),{{dist_in}},{{dist_in}}[1,0]), \
if({{dir}} == {6}, if(isnull({{dist_in}}[1,1]),{{dist_in}},{{dist_in}}[1,1]), \
if(isnull({{dist_in}}[0,1]),{{dist_in}},{{dist_in}}[0,1]))))))))
{{dist_sum_out}}={{dist_sum_in}}+{{dist_in}}""".format(*dirs)

        kwargs['dist_in'] = '{}_dist_odd'.format(tmpname)
        kwargs['dist_out'] = '{}_dist_even'.format(tmpname)
        kwargs['dist_sum_in'] = '{}_dist_sum_odd'.format(tmpname)
        kwargs['dist_sum_out'] = '{}_dist_sum_even'.format(tmpname)

        # Start processing
        curent_region = gscript.region()

        gscript.run_command('r.mapcalc', overwrite=True, quiet=True,
                            expression="""{dist_in}=\
if({dir} == {NE} || {dir} == {NW} || {dir} == {SW}\
|| {dir} == {SE}, sqrt({ewres}^2+{nsres}^2), \
if({dir} == {N} || {dir} == {S},{nsres},{ewres}))
{dist_sum_in}=0""".format(NE=dirs[0], NW=dirs[2], SW=dirs[4], SE=dirs[6],
                                N=dirs[1], S=dirs[5],
                                nsres=curent_region['nsres'],
                                ewres=curent_region['ewres'],
                                dir=direction,
                                dist_in=kwargs['dist_in'],
                                dist_sum_in=kwargs['dist_sum_in']))

    for x in range(max(steps)+1):
        mc_expression = expression_template.format(**kwargs)

        if x in steps:
            idx = steps.index(x)
            # Compile expression for r.mapcalc
            result_expression = slope_measure_dict[slope_measure]
            # Results are computed for output from previous step
            if slope_measure != 'difference':
                result_kwargs = {'elev_in': kwargs['elev_in'],
                                 'elev': elevation,
                                 'dist': kwargs['dist_sum_in'],
                                 'abs': abs,
                                 'gradient': outputs[idx]}
            else:
                result_kwargs = {'elev_in': kwargs['elev_in'],
                                 'elev': elevation,
                                 'abs': abs,
                                 'gradient': outputs[idx]}

            result_expression = result_expression.format(**result_kwargs)


            if x == max(steps):
                mc_expression = result_expression.lstrip('\n')
            else:
                mc_expression += result_expression

        gscript.run_command('r.mapcalc', overwrite=True, quiet=True,
                            expression=mc_expression)

        if x in steps:
            gscript.raster.raster_history(outputs[idx])

        # Set variables for next iteration
        # Use even and odd numbers for iterative re-naming
        if x % 2 == 0:
            # Even
            kwargs = kwargs_even
        else:
            # Odd
            kwargs = kwargs_odd

        gscript.percent(x, max(steps), 1)
Exemplo n.º 24
0
def main():
    global usermask, mapset, tmp_rmaps, tmp_vmaps

    input = options['input']
    output = options['output']
    tension = options['tension']
    smooth = options['smooth']
    method = options['method']
    edge = int(options['edge'])
    segmax = int(options['segmax'])
    npmin = int(options['npmin'])
    lambda_ = float(options['lambda'])
    memory = options['memory']
    quiet = True  # FIXME
    mapset = grass.gisenv()['MAPSET']
    unique = str(os.getpid())  # Shouldn't we use temp name?
    prefix = 'r_fillnulls_%s_' % unique
    failed_list = list()  # a list of failed holes. Caused by issues with v.surf.rst. Connected with #1813

    # check if input file exists
    if not grass.find_file(input)['file']:
        grass.fatal(_("Raster map <%s> not found") % input)

    # save original region
    reg_org = grass.region()

    # check if a MASK is already present
    # and remove it to not interfere with NULL lookup part
    # as we don't fill MASKed parts!
    if grass.find_file('MASK', mapset=mapset)['file']:
        usermask = "usermask_mask." + unique
        grass.message(_("A user raster mask (MASK) is present. Saving it..."))
        grass.run_command('g.rename', quiet=quiet, raster=('MASK', usermask))

    # check if method is rst to use v.surf.rst
    if method == 'rst':
        # idea: filter all NULLS and grow that area(s) by 3 pixel, then
        # interpolate from these surrounding 3 pixel edge
        filling = prefix + 'filled'

        grass.use_temp_region()
        grass.run_command('g.region', align=input, quiet=quiet)
        region = grass.region()
        ns_res = region['nsres']
        ew_res = region['ewres']

        grass.message(_("Using RST interpolation..."))
        grass.message(_("Locating and isolating NULL areas..."))

        # creating binary (0/1) map
        if usermask:
            grass.message(_("Skipping masked raster parts"))
            grass.mapcalc("$tmp1 = if(isnull(\"$input\") && !($mask == 0 || isnull($mask)),1,null())",
                          tmp1=prefix + 'nulls', input=input, mask=usermask)
        else:
            grass.mapcalc("$tmp1 = if(isnull(\"$input\"),1,null())",
                          tmp1=prefix + 'nulls', input=input)
        tmp_rmaps.append(prefix + 'nulls')

        # restoring user's mask, if present
        # to ignore MASKed original values
        if usermask:
            grass.message(_("Restoring user mask (MASK)..."))
            try:
                grass.run_command('g.rename', quiet=quiet, raster=(usermask, 'MASK'))
            except CalledModuleError:
                grass.warning(_("Failed to restore user MASK!"))
            usermask = None

        # grow identified holes by X pixels
        grass.message(_("Growing NULL areas"))
        tmp_rmaps.append(prefix + 'grown')
        try:
            grass.run_command('r.grow', input=prefix + 'nulls',
                              radius=edge + 0.01, old=1, new=1,
                              out=prefix + 'grown', quiet=quiet)
        except CalledModuleError:
            grass.fatal(_("abandoned. Removing temporary map, restoring "
                          "user mask if needed:"))

        # assign unique IDs to each hole or hole system (holes closer than edge distance)
        grass.message(_("Assigning IDs to NULL areas"))
        tmp_rmaps.append(prefix + 'clumped')
        try:
            grass.run_command(
                'r.clump',
                input=prefix +
                'grown',
                output=prefix +
                'clumped',
                quiet=quiet)
        except CalledModuleError:
            grass.fatal(_("abandoned. Removing temporary map, restoring "
                          "user mask if needed:"))

        # get a list of unique hole cat's
        grass.mapcalc("$out = if(isnull($inp), null(), $clumped)",
                      out=prefix + 'holes', inp=prefix + 'nulls', clumped=prefix + 'clumped')
        tmp_rmaps.append(prefix + 'holes')

        # use new IDs to identify holes
        try:
            grass.run_command('r.to.vect', flags='v',
                              input=prefix + 'holes', output=prefix + 'holes',
                              type='area', quiet=quiet)
        except:
            grass.fatal(_("abandoned. Removing temporary maps, restoring "
                          "user mask if needed:"))
        tmp_vmaps.append(prefix + 'holes')

        # get a list of unique hole cat's
        cats_file_name = grass.tempfile(False)
        grass.run_command(
            'v.db.select',
            flags='c',
            map=prefix + 'holes',
            columns='cat',
            file=cats_file_name,
            quiet=quiet)
        cat_list = list()
        cats_file = open(cats_file_name)
        for line in cats_file:
            cat_list.append(line.rstrip('\n'))
        cats_file.close()
        os.remove(cats_file_name)

        if len(cat_list) < 1:
            grass.fatal(_("Input map has no holes. Check region settings."))

        # GTC Hole is NULL area in a raster map
        grass.message(_("Processing %d map holes") % len(cat_list))
        first = True
        hole_n = 1
        for cat in cat_list:
            holename = prefix + 'hole_' + cat
            # GTC Hole is a NULL area in a raster map
            grass.message(_("Filling hole %s of %s") % (hole_n, len(cat_list)))
            hole_n = hole_n + 1
            # cut out only CAT hole for processing
            try:
                grass.run_command('v.extract', input=prefix + 'holes',
                                  output=holename + '_pol',
                                  cats=cat, quiet=quiet)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring "
                              "user mask if needed:"))
            tmp_vmaps.append(holename + '_pol')

            # zoom to specific hole with a buffer of two cells around the hole to
            # remove rest of data
            try:
                grass.run_command('g.region',
                                  vector=holename + '_pol', align=input,
                                  w='w-%d' % (edge * 2 * ew_res),
                                  e='e+%d' % (edge * 2 * ew_res),
                                  n='n+%d' % (edge * 2 * ns_res),
                                  s='s-%d' % (edge * 2 * ns_res),
                                  quiet=quiet)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring "
                              "user mask if needed:"))

            # remove temporary map to not overfill disk
            try:
                grass.run_command('g.remove', flags='fb', type='vector',
                                  name=holename + '_pol', quiet=quiet)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring "
                              "user mask if needed:"))
            tmp_vmaps.remove(holename + '_pol')

            # copy only data around hole
            grass.mapcalc("$out = if($inp == $catn, $inp, null())",
                          out=holename, inp=prefix + 'holes', catn=cat)
            tmp_rmaps.append(holename)

            # If here loop is split into two, next part of loop can be run in parallel
            # (except final result patching)
            # Downside - on large maps such approach causes large disk usage

            # grow hole border to get it's edge area
            tmp_rmaps.append(holename + '_grown')
            try:
                grass.run_command('r.grow', input=holename, radius=edge + 0.01,
                                  old=-1, out=holename + '_grown', quiet=quiet)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary map, restoring "
                              "user mask if needed:"))

            # no idea why r.grow old=-1 doesn't replace existing values with NULL
            grass.mapcalc("$out = if($inp == -1, null(), \"$dem\")",
                          out=holename + '_edges', inp=holename + '_grown', dem=input)
            tmp_rmaps.append(holename + '_edges')

            # convert to points for interpolation
            tmp_vmaps.append(holename)
            try:
                grass.run_command('r.to.vect',
                                  input=holename + '_edges', output=holename,
                                  type='point', flags='z', quiet=quiet)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring "
                              "user mask if needed:"))

            # count number of points to control segmax parameter for interpolation:
            pointsnumber = grass.vector_info_topo(map=holename)['points']
            grass.verbose(_("Interpolating %d points") % pointsnumber)

            if pointsnumber < 2:
                grass.verbose(_("No points to interpolate"))
                failed_list.append(holename)
                continue

            # Avoid v.surf.rst warnings
            if pointsnumber < segmax:
                use_npmin = pointsnumber
                use_segmax = pointsnumber * 2
            else:
                use_npmin = npmin
                use_segmax = segmax

            # launch v.surf.rst
            tmp_rmaps.append(holename + '_dem')
            try:
                grass.run_command('v.surf.rst', quiet=quiet,
                                  input=holename, elev=holename + '_dem',
                                  tension=tension, smooth=smooth,
                                  segmax=use_segmax, npmin=use_npmin)
            except CalledModuleError:
                # GTC Hole is NULL area in a raster map
                grass.fatal(_("Failed to fill hole %s") % cat)

            # v.surf.rst sometimes fails with exit code 0
            # related bug #1813
            if not grass.find_file(holename + '_dem')['file']:
                try:
                    tmp_rmaps.remove(holename)
                    tmp_rmaps.remove(holename + '_grown')
                    tmp_rmaps.remove(holename + '_edges')
                    tmp_rmaps.remove(holename + '_dem')
                    tmp_vmaps.remove(holename)
                except:
                    pass
                grass.warning(
                    _("Filling has failed silently. Leaving temporary maps "
                      "with prefix <%s> for debugging.") %
                    holename)
                failed_list.append(holename)
                continue

            # append hole result to interpolated version later used to patch into original DEM
            if first:
                tmp_rmaps.append(filling)
                grass.run_command('g.region', align=input, raster=holename + '_dem', quiet=quiet)
                grass.mapcalc("$out = if(isnull($inp), null(), $dem)",
                              out=filling, inp=holename, dem=holename + '_dem')
                first = False
            else:
                tmp_rmaps.append(filling + '_tmp')
                grass.run_command(
                    'g.region', align=input, raster=(
                        filling, holename + '_dem'), quiet=quiet)
                grass.mapcalc(
                    "$out = if(isnull($inp), if(isnull($fill), null(), $fill), $dem)",
                    out=filling + '_tmp',
                    inp=holename,
                    dem=holename + '_dem',
                    fill=filling)
                try:
                    grass.run_command('g.rename',
                                      raster=(filling + '_tmp', filling),
                                      overwrite=True, quiet=quiet)
                except CalledModuleError:
                    grass.fatal(
                        _("abandoned. Removing temporary maps, restoring user "
                          "mask if needed:"))
                # this map has been removed. No need for later cleanup.
                tmp_rmaps.remove(filling + '_tmp')

            # remove temporary maps to not overfill disk
            try:
                tmp_rmaps.remove(holename)
                tmp_rmaps.remove(holename + '_grown')
                tmp_rmaps.remove(holename + '_edges')
                tmp_rmaps.remove(holename + '_dem')
            except:
                pass
            try:
                grass.run_command('g.remove', quiet=quiet,
                                  flags='fb', type='raster',
                                  name=(holename,
                                        holename + '_grown',
                                        holename + '_edges',
                                        holename + '_dem'))
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring "
                              "user mask if needed:"))
            try:
                tmp_vmaps.remove(holename)
            except:
                pass
            try:
                grass.run_command('g.remove', quiet=quiet, flags='fb',
                                  type='vector', name=holename)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring user mask if needed:"))

    # check if method is different from rst to use r.resamp.bspline
    if method != 'rst':
        grass.message(_("Using %s bspline interpolation") % method)

        # clone current region
        grass.use_temp_region()
        grass.run_command('g.region', align=input)

        reg = grass.region()
        # launch r.resamp.bspline
        tmp_rmaps.append(prefix + 'filled')
        # If there are no NULL cells, r.resamp.bslpine call
        # will end with an error although for our needs it's fine
        # Only problem - this state must be read from stderr
        new_env = dict(os.environ)
        new_env['LC_ALL'] = 'C'
        if usermask:
            try:
                p = grass.core.start_command(
                    'r.resamp.bspline',
                    input=input,
                    mask=usermask,
                    output=prefix + 'filled',
                    method=method,
                    ew_step=3 * reg['ewres'],
                    ns_step=3 * reg['nsres'],
                    lambda_=lambda_,
                    memory=memory,
                    flags='n',
                    stderr=subprocess.PIPE,
                    env=new_env)
                stdout, stderr = p.communicate()
                if "No NULL cells found" in stderr:
                    grass.run_command('g.copy', raster='%s,%sfilled' % (input, prefix), overwrite=True)
                    p.returncode = 0
                    grass.warning(_("Input map <%s> has no holes. Copying to output without modification.") % (input,))
            except CalledModuleError as e:
                grass.fatal(_("Failure during bspline interpolation. Error message: %s") % stderr)
        else:
            try:
                p = grass.core.start_command(
                    'r.resamp.bspline',
                    input=input,
                    output=prefix + 'filled',
                    method=method,
                    ew_step=3 * reg['ewres'],
                    ns_step=3 * reg['nsres'],
                    lambda_=lambda_,
                    memory=memory,
                    flags='n',
                    stderr=subprocess.PIPE,
                    env=new_env)
                stdout, stderr = p.communicate()
                if "No NULL cells found" in stderr:
                    grass.run_command('g.copy', raster='%s,%sfilled' % (input, prefix), overwrite=True)
                    p.returncode = 0
                    grass.warning(_("Input map <%s> has no holes. Copying to output without modification.") % (input,))
            except CalledModuleError as e:
                grass.fatal(_("Failure during bspline interpolation. Error message: %s") % stderr)

    # restoring user's mask, if present:
    if usermask:
        grass.message(_("Restoring user mask (MASK)..."))
        try:
            grass.run_command('g.rename', quiet=quiet, raster=(usermask, 'MASK'))
        except CalledModuleError:
            grass.warning(_("Failed to restore user MASK!"))
        usermask = None

    # set region to original extents, align to input
    grass.run_command('g.region', n=reg_org['n'], s=reg_org['s'],
                      e=reg_org['e'], w=reg_org['w'], align=input)

    # patch orig and fill map
    grass.message(_("Patching fill data into NULL areas..."))
    # we can use --o here as g.parser already checks on startup
    grass.run_command('r.patch', input=(input, prefix + 'filled'),
                      output=output, overwrite=True)

    # restore the real region
    grass.del_temp_region()

    grass.message(_("Filled raster map is: %s") % output)

    # write cmd history:
    grass.raster_history(output)

    if len(failed_list) > 0:
        grass.warning(
            _("Following holes where not filled. Temporary maps with are left "
              "in place to allow examination of unfilled holes"))
        outlist = failed_list[0]
        for hole in failed_list[1:]:
            outlist = ', ' + outlist
        grass.message(outlist)

    grass.message(_("Done."))
from grass.script import array as garray
import re
#import matplotlib.mpl as mpl
#import matplotlib.scale as scale
from matplotlib.colors import Normalize

# run in global database 30as

imshow = True

m = Basemap(width=7000000,height=6500000,
            resolution='l',projection='laea',\
            lat_ts=52,lat_0=52,lon_0=-100.)

grass.run_command('g.region', rast='wb_000000')
nx = grass.region()['cols']
ny = grass.region()['rows']

# Now the actual data
WB_000000 = garray.array()
WB_000000.read("wb_000000", null=np.nan)

WB = garray.array()
WB.read("wb_021000", null=np.nan)

WBdiff = np.flipud(WB - WB_000000) * 1000 # mm/yr


# Colorbar is bipolar:
# http://stackoverflow.com/questions/7404116/defining-the-midpoint-of-a-colormap-in-matplotlib
Exemplo n.º 26
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
                'product': 'National Elevation Dataset (NED)',
                'dataset': {
                        'ned1sec': (1. / 3600, 30, 100),
                        'ned13sec': (1. / 3600 / 3, 10, 30),
                        'ned19sec': (1. / 3600 / 9, 3, 10)
                        },
                'subset': {},
                'extent': [
                        '1 x 1 degree',
                        '15 x 15 minute'
                         ],
                'format': 'IMG',
                'extension': 'img',
                'zip': True,
                'srs': 'wgs84',
                'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
                'interpolation': 'bilinear',
                'url_split': '/'
                },
        "nlcd": {
                'product': 'National Land Cover Database (NLCD)',
                'dataset': {
                        'National Land Cover Database (NLCD) - 2001': (1. / 3600, 30, 100),
                        'National Land Cover Database (NLCD) - 2006': (1. / 3600, 30, 100),
                        'National Land Cover Database (NLCD) - 2011': (1. / 3600, 30, 100)
                        },
                'subset': {
                        'Percent Developed Imperviousness',
                        'Percent Tree Canopy',
                        'Land Cover'
                        },
                'extent': ['3 x 3 degree'],
                'format': 'GeoTIFF',
                'extension': 'tif',
                'zip': True,
                'srs': 'wgs84',
                'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
                'interpolation': 'nearest',
                'url_split': '/'
                },
        "naip": {
                'product': 'USDA National Agriculture Imagery Program (NAIP)',
                'dataset': {
                        'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)},
                'subset': {},
                'extent': [
                        '3.75 x 3.75 minute',
                         ],
                'format': 'JPEG2000',
                'extension': 'jp2',
                'zip': False,
                'srs': 'wgs84',
                'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
                'interpolation': 'nearest',
                'url_split': '/'
                }
            }

    # Set GRASS GUI options and flags to python variables
    gui_product = options['product']

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extension = nav_string['extension']
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(_("The default resampling method for product {product} is {res}").format(product=gui_product,
                        res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    min_coords = gscript.read_command('m.proj', coordinates=(gregion['w'], gregion['s']),
                                      proj_out=product_proj4, separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj', coordinates=(gregion['e'], gregion['n']),
                                      proj_out=product_proj4, separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = urllib.quote_plus(gui_prod_str)
    prod_format = urllib.quote_plus(product_format)
    prod_extent = urllib.quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try:
        TNM_API_GET = urllib2.urlopen(TNM_API_URL, timeout=12)
    except urllib2.URLError:
        gscript.fatal(_("USGS TNM API query has timed out. Check network configuration. Please try again."))
    except:
        gscript.fatal(_("USGS TNM API query has timed out. Check network configuration. Please try again."))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)
        if f['datasets'][0] not in dataset_name:
            if len(dataset_name) <= 1:
                dataset_name.append(str(f['datasets'][0]))

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        dataset_name = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # if file exists, but is incomplete, remove file and redownload
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size - TNM_file_size) > size_diff_tolerance:
                    # add file to cleanup list
                    cleanup_list.append(local_file_path)
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(_("TNM API ERROR or Zero tiles available for given input parameters."))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    if exist_tile_list:
        exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)
    if cleanup_list:
        cleanup_msg = _("\n{0} existing incomplete file(s) detected and removed. Run module again.").format(len(cleanup_list))
        gscript.fatal(cleanup_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
                     "USGS file(s) to download:",
                     "-------------------------",
                     "Total download size:\t{size}",
                     "Tile count:\t{count}",
                     "USGS SRS:\t{srs}",
                     "USGS tile titles:\n{tile}",
                     "-------------------------",
                     )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print(data_info)

    if gui_i_flag:
        gscript.info(_("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urllib2.urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                    download_count, TNM_count)
            gscript.info(file_complete)
        except urllib2.URLError:
            gscript.fatal(_("USGS download request has timed out. Network or formatting error."))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                            download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        for z in local_zip_path_list:
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.endswith(product_extension):
                            extracted_tile = os.path.join(work_dir, str(f))
                            if os.path.exists(extracted_tile):
                                os.remove(extracted_tile)
                                read_zip.extract(f, work_dir)
                            else:
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    cleanup_list.append(extracted_tile)
            except:
                cleanup_list.append(extracted_tile)
                gscript.fatal(_("Unable to locate or extract IMG file from ZIP archive."))

    # operations for extracted or complete files available locally
    for t in local_tile_path_list:
        # create variables for use in GRASS GIS import process
        LT_file_name = os.path.basename(t)
        LT_layer_name = os.path.splitext(LT_file_name)[0]
        in_info = ("Importing and reprojecting {0}...").format(LT_file_name)
        gscript.info(in_info)
        # import to GRASS GIS
        try:
            gscript.run_command('r.import', input=t, output=LT_layer_name,
                                resolution='value', resolution_value=product_resolution,
                                extent="region", resample=product_interpolation)
        except CalledModuleError:
            in_error = ("Unable to import '{0}'").format(LT_file_name)
            gscript.warning(in_error)
        else:
            patch_names.append(LT_layer_name)
        # do not remove by default with NAIP, there are no zip files
        if gui_product != 'naip' or not gui_k_flag:
            cleanup_list.append(t)

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if len(patch_names) > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region', res=product_resolution, flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [name + '.' + i for name in patch_names]
                        gscript.run_command('r.patch', input=patch_names_i,
                                            output=gui_output_layer + '.' + i)
                else:
                    gscript.run_command('r.patch', input=patch_names,
                                        output=gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added").format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if 'k' flag
                if not gui_k_flag:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [name + '.' + i for name in patch_names]
                            gscript.run_command('g.remove', type='raster',
                                                name=patch_names_i, flags='f')
                    else:
                        gscript.run_command('g.remove', type='raster',
                                            name=patch_names, flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
        elif len(patch_names) == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename', raster=(patch_names[0] + '.' + i, gui_output_layer + '.' + i))
            else:
                gscript.run_command('g.rename', raster=(patch_names[0], gui_output_layer))
        temp_down_count = "\n{0} of {1} tile/s succesfully imported and patched.".format(completed_tiles_count,
                                                                                         tiles_needed_count)
        gscript.info(temp_down_count)
    else:
        gscript.fatal("Error downloading files. Please retry.")

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = ("<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors', map=gui_output_layer, color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite', red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2', blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.del_temp_region()
Exemplo n.º 27
0
def main(rinput, background, output, method):
    try:
        from PIL import Image
    except ImportError:
        gscript.fatal("Cannot import PIL."
                      " Please install the Python pillow package.")

    if "@" in rinput:
        rinput = rinput.split("@")[0]
    suffix = "_" + os.path.basename(gscript.tempfile(False))
    tmpname = rinput + suffix
    gscript.run_command("g.copy", raster=[rinput, tmpname])
    TMPRAST.append(tmpname)
    gscript.run_command("r.colors", map=tmpname, color="grey")

    reg = gscript.region()
    width = reg["cols"]
    height = reg["rows"]

    fg_out = os.path.join(TMPDIR, "foreground.png")
    bg_out = os.path.join(TMPDIR, "background.png")
    intensity_tmp = os.path.join(TMPDIR, "intensity.png")
    gscript.run_command(
        "d.mon",
        start="cairo",
        output=fg_out,
        width=width,
        height=height,
        bgcolor="black",
    )
    gscript.run_command("d.rast", map=rinput)
    gscript.run_command("d.mon", stop="cairo")

    # background
    gscript.run_command("d.mon",
                        start="cairo",
                        output=bg_out,
                        width=width,
                        height=height)
    gscript.run_command("d.rast", map=background)
    gscript.run_command("d.mon", stop="cairo")

    # greyscale
    gscript.run_command("d.mon",
                        start="cairo",
                        output=intensity_tmp,
                        width=width,
                        height=height)
    gscript.run_command("d.rast", map=tmpname)
    gscript.run_command("d.mon", stop="cairo")

    # put together with transparency
    foreground = Image.open(fg_out)
    background = Image.open(bg_out)
    intensity = Image.open(intensity_tmp)

    foreground = foreground.convert("RGBA")
    data_f = foreground.getdata()
    data_i = intensity.getdata()
    newData = []
    for i in range(len(data_f)):
        intens = data_i[i][0]
        if intens == 0:
            newData.append((data_f[i][0], data_f[i][1], data_f[i][2], 0))
        else:
            newData.append((
                data_f[i][0],
                data_f[i][1],
                data_f[i][2],
                scale(0, 255, intens, method),
            ))
    foreground.putdata(newData)
    background.paste(foreground, (0, 0), foreground)
    background.save(output)
Exemplo n.º 28
0
    def Run(self, input, column, output, package, psill, nugget, range, kappa,
            logger, overwrite, model, block, output_var, command, **kwargs):
        """Wrapper for all functions above."""

        logger.message(
            _("Processing %d cells. Computing time raises "
              "exponentially with resolution." % grass.region()["cells"]))
        logger.message(_("Importing data..."))

        if self.InputData is None:
            self.InputData = self.ImportMap(input, column)
        # and from here over, InputData refers to the global variable
        logger.message(_("Data successfully imported."))

        GridPredicted = self.CreateGrid(self.InputData)

        logger.message(_("Fitting variogram..."))

        if block is not "":
            self.predictor = "x+y"
        else:
            self.predictor = "1"
        if self.Variogram is None:
            self.Variogram = self.FitVariogram(
                robjects.Formula(column + "~" + self.predictor),
                self.InputData,
                model=model,
                psill=psill,
                nugget=nugget,
                range=range,
                kappa=kappa,
            )
        logger.message(_("Variogram fitting complete."))

        logger.message(_("Kriging..."))
        KrigingResult = self.DoKriging(
            robjects.Formula(column + "~" + self.predictor),
            self.InputData,
            GridPredicted,
            self.Variogram["variogrammodel"],
            block,
        )  # using global ones
        logger.message(_("Kriging complete."))

        self.ExportMap(
            map=KrigingResult,
            column="var1.pred",
            name=output,
            overwrite=overwrite,
            command=command,
            variograms=self.Variogram,
        )
        if output_var is not "":
            self.ExportMap(
                map=KrigingResult,
                column="var1.var",
                name=output_var,
                overwrite=overwrite,
                command=command,
                variograms=self.Variogram,
            )
Exemplo n.º 29
0
def main():

    # Input for all methods
    global roads
    roads = options['map']
    global layer
    layer = options['roads_layer']
    node_layer = options['node_layer']
    cost_column = options['cost_column']
    start_points = options['start_points']
    time_steps = options['time_steps'].split(',')
    isochrones = options['isochrones']
    global method
    method = options['method']

    if method == 'r.cost':
        offroad_speed = float(options['offroad_speed'])
        if offroad_speed == 0:
            grass.message(_('Offroad speed has to be > 0. Set to 0.00001.'))
            offroad_speed = 0.00001

        memory = int(options['memory'])
        # Output
        if options['timemap']:
            timemap = options['timemap']
        else:
            timemap = None

        global tmp_cost_map
        global tmp_time_map
        global tmp_region_map
        global tmp_cost_column

        tmp_cost_map = 'cost_map_tmp_%d' % os.getpid()
        tmp_time_map = 'time_map_tmp_%d' % os.getpid()
        tmp_region_map = 'region_map_tmp_%d' % os.getpid()

        # get current resolution
        region = grass.region()
        resolution = math.sqrt(float(region['nsres']) * float(region['ewres']))

        if grass.vector_db(roads)[int(layer)]['driver'] == 'dbf':
            # add cost column to road vector
            tmp_cost_column = 'tmp%d' % os.getpid()
            def_cost_column = tmp_cost_column + ' DOUBLE PRECISION'
            grass.run_command('v.db.addcolumn',
                              map=roads,
                              layer=layer,
                              column=def_cost_column,
                              quiet=True)

            # calculate cost (in minutes) depending on speed
            # (resolution/(speed (in km/h) * 1000 / 60))
            query_value = "%s / (%s * 1000 / 60)" % (resolution, cost_column)
            grass.run_command('v.db.update',
                              map=roads,
                              column=tmp_cost_column,
                              qcolumn=query_value)
        else:
            tmp_cost_column = "%s / (%s * 1000 / 60)" % (resolution,
                                                         cost_column)

        # transform to raster
        grass.run_command('v.to.rast',
                          input=roads,
                          output=tmp_cost_map,
                          use='attr',
                          attrcolumn=tmp_cost_column,
                          type='line',
                          memory=memory)

        # replace null values with cost for off-road areas
        # (resolution/(off-road speed * 1000 / 60))
        null_value = resolution / (offroad_speed * 1000 / 60)
        grass.run_command('r.null', map=tmp_cost_map, null=null_value)

        # limit the cumulated cost surface calculation to the max time distance
        # requested
        max_cost = time_steps[-1]

        # calculate time distance from starting points
        grass.run_command('r.cost',
                          input=tmp_cost_map,
                          start_points=start_points,
                          output=tmp_time_map,
                          max_cost=max_cost,
                          memory=memory)

        if timemap:
            grass.run_command('g.copy', raster=(tmp_time_map, timemap))
            grass.run_command('r.colors',
                              map=timemap,
                              color='grey',
                              flags='ne')

        # recode time distance to time steps
        recode_rules = '0:%s:%s\n' % (time_steps[0], time_steps[0])
        for count in range(1, len(time_steps)):
            recode_rules += time_steps[count - 1] + ':'
            recode_rules += time_steps[count] + ':'
            recode_rules += time_steps[count] + '\n'

        grass.write_command('r.recode',
                            input=tmp_time_map,
                            output=tmp_region_map,
                            rules='-',
                            stdin=recode_rules)

        # transform to vector areas
        grass.run_command('r.to.vect',
                          input=tmp_region_map,
                          output=isochrones,
                          type='area',
                          column='traveltime',
                          flags='s')

        # give the polygons a default color table
        grass.run_command('v.colors',
                          map=isochrones,
                          use='attr',
                          column='traveltime',
                          color='grey')

    elif method == 'v.net.iso':
        global max_distance
        if (options['max_distance']):
            max_distance = float(options['max_distance'])
        else:
            max_distance = None
        global output_cats
        output_cats = []
        for i in range(1, len(time_steps) + 2):
            output_cats.append(i)
        startpoints = grass.read_command('v.distance',
                                         from_=start_points,
                                         to=roads,
                                         to_type='point',
                                         to_layer=node_layer,
                                         upload='cat',
                                         flags='p',
                                         quiet=True).split('\n')[1:-1]

        global isoraw
        isoraw = 'isoraw_temp_%d' % os.getpid()
        global isos_final
        isos_final = 'isos_final_%d' % os.getpid()

        if flags['i']:
            for point in startpoints:
                startpoint_cat = point.split('|')[0]
                startnode_cat = point.split('|')[1]
                grass.run_command('v.net.iso',
                                  input_=roads,
                                  output=isoraw,
                                  center_cats=startnode_cat,
                                  costs=time_steps,
                                  arc_column=cost_column,
                                  overwrite=True)
                isocalc(isoraw)
                outname = isochrones + '_' + startpoint_cat
                grass.run_command('g.rename', vect=isos_final + ',' + outname)
                # give the polygons a default color table
                grass.run_command('v.colors',
                                  map=outname,
                                  use='cat',
                                  color='grey')

        else:
            startnodes = []
            for point in startpoints:
                startnodes.append(point.split('|')[1])
            grass.run_command('v.net.iso',
                              input_=roads,
                              output=isoraw,
                              center_cats=startnodes,
                              costs=time_steps,
                              arc_column=cost_column,
                              overwrite=True)
            isocalc(isoraw)
            grass.run_command('g.rename', vect=isos_final + ',' + isochrones)
            # give the polygons a default color table
            grass.run_command('v.colors',
                              map=isochrones,
                              use='cat',
                              color='grey')

    else:
        grass.fatal(_("You need to chose at least one of the methods"))
Exemplo n.º 30
0
def main():
    global far_edge_value
    global d_max

    options, flags = gscript.parser()
    high = options['high']
    low = options['low']
    output = options['output']
    far_edge = float(options['far_edge'])
    inter_points = int(options['inter_points'])
    use_average_differences = flags['a']

    if (high is None or high == ""):
        gscript.error(_("[r.mblend] ERROR: high is a mandatory parameter."))
        exit()

    if (low is None or low == ""):
        gscript.error(_("[r.mblend] ERROR: low is a mandatory parameter."))
        exit()

    if (output is None or output == ""):
        gscript.error(_("[r.mblend] ERROR: output is a mandatory parameter."))
        exit()

    if (far_edge < 0 or far_edge > 100):
        gscript.error(
            _("[r.mblend] ERROR: far_edge must be a percentage",
              " between 0 and 100."))
        exit()

    if (inter_points < 0):
        gscript.error(
            _("[r.mblend] ERROR: inter_points must be a positive",
              " integer."))
        exit()

    # Set the region to the two input rasters
    gscript.run_command('g.region', raster=high + "," + low)
    # Determine cell side
    region = gscript.region()
    if region['nsres'] > region['ewres']:
        cell_side = region['nsres']
    else:
        cell_side = region['ewres']

    compute_d_max(region)

    # Make cell size compatible
    low_res_inter = getTemporaryIdentifier()
    gscript.message(
        _("[r.mblend] Resampling low resolution raster to higher" +
          " resolution"))
    gscript.run_command('r.resamp.interp',
                        input=low,
                        output=low_res_inter,
                        method='nearest')

    # Obtain extent to interpolate
    low_extent_rast = getTemporaryIdentifier()
    high_extent_rast = getTemporaryIdentifier()
    low_extent = getTemporaryIdentifier()
    high_extent = getTemporaryIdentifier()
    interpol_area = getTemporaryIdentifier()
    gscript.message(_("[r.mblend] Multiplying low resolution by zero"))
    gscript.mapcalc(low_extent_rast + ' = ' + low + ' * 0')
    gscript.message(_("[r.mblend] Multiplying high resolution by zero"))
    gscript.mapcalc(high_extent_rast + ' = ' + high + ' * 0')
    gscript.message(_("[r.mblend] Computing extent of low resolution"))
    gscript.run_command('r.to.vect',
                        input=low_extent_rast,
                        output=low_extent,
                        type='area')
    gscript.message(_("[r.mblend] Computing extent of high resolution"))
    gscript.run_command('r.to.vect',
                        input=high_extent_rast,
                        output=high_extent,
                        type='area')
    gscript.message(_("[r.mblend] Computing area to interpolate"))
    gscript.run_command('v.overlay',
                        ainput=low_extent,
                        binput=high_extent,
                        output=interpol_area,
                        operator='not')

    # Compute difference between the two rasters and vectorise to points
    interpol_area_buff = getTemporaryIdentifier()
    diff = getTemporaryIdentifier()
    diff_points_edge = getTemporaryIdentifier()
    gscript.mapcalc(diff + ' = ' + high + ' - ' + low_res_inter)
    gscript.message(_("[r.mblend] Computing buffer around interpolation area"))
    gscript.run_command('v.buffer',
                        input=interpol_area,
                        output=interpol_area_buff,
                        type='area',
                        distance=cell_side)
    gscript.message(
        _("[r.mblend] Vectorising differences between input" + " rasters"))
    gscript.run_command('r.mask', vector=interpol_area_buff)
    gscript.run_command('r.to.vect',
                        input=diff,
                        output=diff_points_edge,
                        type='point')
    gscript.run_command('r.mask', flags='r')

    # Compute average of the differences if flag -a was passed
    if use_average_differences:
        p = gscript.pipe_command('r.univar', map=diff)
        result = {}
        for line in p.stdout:
            vector = line.split(": ")
            if vector[0] == "mean":
                print("Found it: " + vector[1])
                far_edge_value = vector[1]
        p.wait()

    # Get points in low resolution farther away from high resolution raster
    dist_high = getTemporaryIdentifier()
    weights = getTemporaryIdentifier()
    interpol_area_points = getTemporaryIdentifier()
    pre_interpol_area_points = getTemporaryIdentifier()
    weight_points = getTemporaryIdentifier()
    interpol_area_in_buff = getTemporaryIdentifier()
    weight_points_all_edges = getTemporaryIdentifier()
    weight_points_edge = getTemporaryIdentifier()
    # 1. Distance to High resolution raster
    gscript.message(
        _("[r.mblend] Computing distance to high resolution" + " raster"))
    gscript.run_command('r.grow.distance', input=high, distance=dist_high)
    # 2. Rescale to the interval [0,10000]: these are the weights
    gscript.message(_("[r.mblend] Rescaling distance to [0,10000] interval"))
    gscript.run_command('r.rescale',
                        input=dist_high,
                        output=weights,
                        to='0,' + str(WEIGHT_MAX))
    # 3. Extract points from interpolation area border
    gscript.message(
        _("[r.mblend] Extract points from interpolation area " + "boundary"))
    gscript.run_command('v.to.points',
                        input=interpol_area,
                        output=pre_interpol_area_points,
                        type='boundary',
                        dmax=d_max,
                        layer='-1')
    gscript.message(_("[r.mblend] Copying features to layer 1"))
    gscript.run_command('v.category',
                        input=pre_interpol_area_points,
                        output=interpol_area_points,
                        option='chlayer',
                        layer='2,1')
    gscript.message(_("[r.mblend] Linking attribute table to layer 1"))
    gscript.run_command('v.db.connect',
                        map=interpol_area_points,
                        table=interpol_area_points,
                        layer='1',
                        flags='o')
    # 4. Query distances to interpolation area points
    gscript.message(_("[r.mblend] Querying distances raster"))
    gscript.run_command('v.what.rast',
                        map=interpol_area_points,
                        raster=weights,
                        column=COL_VALUE)
    # 5. Select those with higher weights
    cut_off = str(far_edge / 100 * WEIGHT_MAX)
    gscript.message(
        _("[r.mblend] Selecting far edge points (using cut-off" +
          " percentage)"))
    gscript.run_command('v.extract',
                        input=interpol_area_points,
                        output=weight_points_edge,
                        where=COL_VALUE + '>' + cut_off)

    # Merge the two point edges and set low res edge to zero
    points_edges = getTemporaryIdentifier()
    gscript.message(_("[r.mblend] Dropping extra column from far edge"))
    gscript.run_command('v.db.dropcolumn',
                        map=weight_points_edge,
                        layer='1',
                        columns='along')
    gscript.message(_("[r.mblend] Setting far edge weights to zero"))
    gscript.run_command('v.db.update',
                        map=weight_points_edge,
                        column=COL_VALUE,
                        value=far_edge_value)
    gscript.message(_("[r.mblend] Patching the two edges"))
    gscript.run_command('v.patch',
                        input=diff_points_edge + ',' + weight_points_edge,
                        output=points_edges,
                        flags='e')

    # Interpolate smoothing raster
    smoothing = getTemporaryIdentifier()
    interpol_area_rst = getTemporaryIdentifier()
    # Consign region to interpolation area
    gscript.run_command('g.region', vector=interpol_area_buff)
    gscript.message(
        _("[r.mblend] Interpolating smoothing surface. This" +
          " might take a while..."))
    gscript.run_command('v.surf.idw',
                        input=points_edges,
                        column=COL_VALUE,
                        output=smoothing,
                        power=2,
                        npoints=inter_points)
    # Reset region to full extent
    gscript.run_command('g.region', raster=high + "," + low)

    # Apply stitching
    smooth_low_res = getTemporaryIdentifier()
    # Sum to low res
    gscript.message(_("[r.mblend] Applying smoothing surface"))
    gscript.mapcalc(smooth_low_res + ' = ' + low_res_inter + ' + ' + smoothing)
    # Add both rasters
    try:
        gscript.message(_("[r.mblend] Joining result into a single raster"))
        gscript.run_command('r.patch',
                            input=high + ',' + smooth_low_res,
                            output=output)
    except Exception, ex:
        gscript.error(_("[r.mblend] ERROR: Failed to create smoothed raster."))
        exit()
Exemplo n.º 31
0
import grass.script as grass
import numpy as np
import random
import os
#from __future__ import print_function
from grass.script import array as garray


grass.run_command("g.region",vect="source_shp,target_shp")
influensprocess=5000
c= grass.region()
n=float(c['n'])
s=float(c['s'])
e=float(c['e'])
w=float(c['w'])


n=n+influensprocess
s=s-influensprocess
e=e+influensprocess
w=w-influensprocess



grass.run_command("g.region",n=n,e=e,s=s,w=w)





Exemplo n.º 32
0
def main():
    soildepth = "temp.soildepth"
    tempsdepth = "temp.soildepth2"
    if os.getenv('GIS_OPT_soildepth') == None:
        soildepth2 = "temp.soildepth3"
    else:
        soildepth2 = os.getenv('GIS_OPT_soildepth')
    elev = os.getenv('GIS_OPT_elev')
    bedrock = os.getenv('GIS_OPT_bedrock')
    smoothingtype = os.getenv('GIS_OPT_smoothingtype')
    smoothingsize = os.getenv('GIS_OPT_smoothingsize')
    slopebreaks = os.getenv('GIS_OPT_slopebreaks').split(";")
    curvebreaks = os.getenv('GIS_OPT_curvebreaks').split(";")
    smin = os.getenv('GIS_OPT_min')
    smax = os.getenv('GIS_OPT_max')
    slope = "temp_slope_deletable"
    pc = "temp_pc_deletable"
    tc = "temp_tc_deletable"
    mc = "temp_mc_deletable"
    temprate = "temp_rate_deletable"
    #let's grab the current resolution
    res = grass.region()['nsres']
    # make color rules for soil depth maps
    sdcolors = tempfile.NamedTemporaryFile()
    sdcolors.write(
        '100% 0:249:47\n20% 78:151:211\n6% 194:84:171\n0% 227:174:217')
    sdcolors.flush()
    grass.message('STEP 1, Calculating curvatures\n')
    grass.run_command('r.slope.aspect',
                      quiet="True",
                      overwrite="True",
                      elevation=elev,
                      slope=slope,
                      pcurv=pc,
                      tcurv=tc)
    #creat meancurvature map (in a manner compatible with older versions of grass6), and then grab some stats from it for the later rescale operation
    grass.mapcalc("${mc}=(${tc}+${pc})/2", quiet="True", mc=mc, tc=tc, pc=pc)
    mcdict = grass.parse_command('r.univar', flags="g", map=mc)
    #figuring out if user-supplied curvature breakpoints exceed actual limits of curvature in the map, and adjusting if necessary
    if mcdict['min'] < curvebreaks[0].split(',')[0]:
        y1 = mcdict['min'] + ',' + curvebreaks[0].split(',')[1]
    else:
        y1 = curvebreaks[0]
    if mcdict['max'] > curvebreaks[1].split(',')[0]:
        y2 = mcdict['max'] + ',' + curvebreaks[1].split(',')[1]
    else:
        y2 = curvebreaks[1]

    grass.message(
        'STEP 2, Calculating "depth potential" across the landscape\n')
    #nested rescale of first slope (to percentage of maximum soil depth potential), and then curvature (to percentage offset from slope function), and then combining the two measures to make final estimation of soil depth potential. Final output depth potential map will be scaled between 0 and 1, which maps to lowest depth potential to highest depth potential.
    grass.mapcalc(
        "${temprate}=eval(x=graph( ${slope}, 0,1 , ${x1}, ${x2}, 90,0), y=graph(${mc}, ${y1}, 0,0, ${y2}), z=if(y < 0, x+(x*y), x+((1-x)*y)), if(z < 0, 0, if(z > 1, 1, z)))",
        quiet="True",
        temprate=temprate,
        slope=slope,
        x1=slopebreaks[0],
        x2=slopebreaks[1],
        mc=mc,
        y1=y1,
        y2=y2)

    grass.message(
        'STEP 3, Calculating actual soil depths across the landscape (based on user input min and max soil depth values)\n'
    )
    # create dictionary to record max and min rate so we can rescale it according the user supplied max and min desired soil depths
    ratedict = grass.parse_command('r.univar', flags="g", map=temprate)
    #creating and running a linear regression to scale the calculated landform soil depth potential into real soil depths using user specified min and max soil depth values
    grass.mapcalc(
        '${soildepth}=graph(${temprate}, ${rmin},${smin}, ${rmax},${smax})',
        quiet="True",
        soildepth=soildepth,
        temprate=temprate,
        rmin=ratedict['min'],
        rmax=ratedict['max'],
        smin=smin,
        smax=smax)
    unsmodict = grass.parse_command('r.univar', flags="g", map=soildepth)
    grass.run_command('r.neighbors',
                      quiet="True",
                      input=soildepth,
                      output=tempsdepth,
                      size=smoothingsize,
                      method=smoothingtype)
    #fix shrinking edge caused by eighborhood operation (and r.slope.aspect above) by filling in the null areas. We do this by 0.98 * smax, since the null cells are all the cells of the actual drainage divide with slope basically = 0, and very mildly convex curvatures. This basically blends them in nicely with the neighboring cells.
    grass.mapcalc(
        '${soildepth_real}=if(isnull(${input_sdepth}) && isnull(${elev}), null(), if(isnull(${input_sdepth}), 0.98*${smax},${input_sdepth}))',
        quiet='True',
        input_sdepth=tempsdepth,
        soildepth_real=soildepth2,
        elev=elev,
        smax=smax)
    #grab some stats if asked to
    if os.getenv('GIS_FLAG_s') == '1':
        depthdict = grass.parse_command('r.univar',
                                        flags="ge",
                                        map=soildepth2,
                                        percentile=90)

    grass.message('STEP 4, calculating bedrock elevation map\n')
    grass.mapcalc(
        "${bedrock}=eval(x=(${elev} - ${soildepth}), if(isnull(x), ${elev}, x))",
        quiet="True",
        bedrock=bedrock,
        elev=elev,
        soildepth=soildepth)
    grass.message('Cleaning up...')
    grass.run_command('g.remove',
                      quiet="true",
                      rast=[pc, tc, mc, slope, soildepth, tempsdepth])
    if os.getenv('GIS_FLAG_k') == '1':
        grass.run_command('g.rename',
                          quiet="true",
                          rast="%s,%s_depth_potential" % (temprate, bedrock))
    else:
        grass.run_command('g.remove', quiet="True", rast=temprate)
    if os.getenv('GIS_OPT_soildepth') is None:
        grass.run_command('g.remove', quiet="true", rast=soildepth2)
    else:
        grass.run_command('r.colors',
                          quiet="True",
                          map=soildepth2,
                          rules=sdcolors.name)
    grass.message('\nDONE!\n')
    if os.getenv('GIS_FLAG_s') == '1':
        grass.message("min, max, and mean before smoothing: " +
                      unsmodict['min'] + ", " + unsmodict['max'] + ", " +
                      unsmodict['mean'])
        for key in depthdict.keys():
            grass.message('%s=%s' % (key, depthdict[key]))
        grass.message('Total volume of soil is %s cubic meters' %
                      (float(depthdict['sum']) * res * res))
    return
Exemplo n.º 33
0
def main():
    # check dependencies
    check_progs()

    # check for unsupported locations
    in_proj = grass.parse_command('g.proj', flags='g')
    if in_proj['unit'].lower() == 'degree':
        grass.fatal(_("Latitude-longitude locations are not supported"))
    if in_proj['name'].lower() == 'xy_location_unprojected':
        grass.fatal(_("xy-locations are not supported"))

    r_elevation = options['map'].split('@')[0]
    mapname = options['map'].replace("@"," ")
    mapname = mapname.split()
    mapname[0] = mapname[0].replace(".","_")
    coordinates = options['coordinates']
    directory = options['dir']
    # Check if directory exists
    if not os.path.isdir(directory):
        os.makedirs(directory)
    autothreshold = flags['a']
    nomap = flags['c']
    prefix = options['prefix']+'_'+mapname[0]
    r_accumulation = prefix+'_accumulation'
    r_drainage = prefix+'_drainage'
    r_stream = prefix+'_stream'
    r_slope = prefix+'_slope'
    r_aspect = prefix+'_aspect'
    r_basin = prefix+'_basin'
    r_strahler = prefix+'_strahler'
    r_shreve = prefix+'_shreve'
    r_horton = prefix+'_horton'
    r_hack = prefix+'_hack'
    r_distance = prefix+'_dist2out'
    r_hillslope_distance = prefix+'_hillslope_distance'
    r_height_average = prefix+'_height_average'
    r_aspect_mod = prefix+'_aspect_mod'
    r_dtm_basin = prefix+'_dtm_basin'
    r_mainchannel = prefix+'_mainchannel'
    r_stream_e = prefix+'_stream_e'
    r_drainage_e = prefix+'_drainage_e'
    r_mask = prefix+'_mask'
    r_ord_1 = prefix+'_ord_1'
    r_average_hillslope = prefix+'_average_hillslope'
    r_mainchannel_dim = prefix+'_mainchannel_dim'
    r_outlet = prefix+'_r_outlet'
    v_outlet = prefix+'_outlet'
    v_outlet_snap = prefix+'_outlet_snap'
    v_basin = prefix+'_basin'
    v_mainchannel = prefix+'_mainchannel'
    v_mainchannel_dim = prefix+'_mainchannel_dim'
    v_network = prefix+'_network'
    v_ord_1 = prefix+'_ord_1'
    global tmp


    # Save current region
    grass.read_command('g.region', flags = 'p', save = 'original')

    # Watershed SFD
    grass.run_command('r.watershed', elevation = r_elevation,
                                     accumulation = r_accumulation,
                                     drainage = r_drainage,
                                     convergence = 5,
                                     flags = 'am')

    # Managing flag
    if autothreshold:
        resolution = grass.region()['nsres']
        th = 1000000 / (resolution**2)
        grass.message("threshold : %s" % th)
    else:
        th = options['threshold']

    # Stream extraction
    grass.run_command('r.stream.extract', elevation = r_elevation,
                                          accumulation = r_accumulation,
                                          threshold = th,
                                          d8cut = 1000000000,
                                          mexp = 0,
                                          stream_rast = r_stream_e,
                                          direction = r_drainage_e)

    try:
        # Delineation of basin
        # Create outlet
        grass.write_command('v.in.ascii', output = v_outlet,
                                      input = "-",
                                      sep = ",",
                                      stdin = "%s,9999" % (coordinates))

        # Snap outlet to stream network
        # TODO: does snap depend on the raster resolution? hardcoded 30 below
        grass.run_command('r.stream.snap', input = v_outlet,
                                           output = v_outlet_snap,
                                           stream_rast = r_stream_e,
                                           radius = 30)

        grass.run_command('v.to.rast', input = v_outlet_snap,
                                   output = r_outlet,
                                   use = 'cat',
                                   type = 'point',
                                   layer = 1,
                                   value = 1)


        grass.run_command('r.stream.basins', direction = r_drainage_e,
                                             basins = r_basin,
                                             points = v_outlet_snap)

        grass.message("Delineation of basin done")

        # Mask and cropping
        elevation_name = r_elevation = r_elevation.split('@')[0]

        grass.mapcalc("$r_mask = $r_basin / $r_basin",
                       r_mask = r_mask,
                       r_basin = r_basin)

        grass.mapcalc("tmp = $r_accumulation / $r_mask",
                       r_accumulation = r_accumulation,
                       r_mask = r_mask)

        grass.run_command('g.remove', flags='f', type='raster', name= r_accumulation, quiet = True)

        grass.run_command('g.rename', raster = ('tmp',r_accumulation))

        grass.mapcalc("tmp = $r_drainage / $r_mask",
                       r_drainage = r_drainage,
                       r_mask = r_mask)

        grass.run_command('g.remove', flags='f', type='raster', name= r_drainage, quiet = True)

        grass.run_command('g.rename', raster = ('tmp', r_drainage))

        grass.mapcalc("$r_elevation_crop = $r_elevation * $r_mask",
                       r_mask = r_mask,
                       r_elevation = r_elevation,
                       r_elevation_crop = 'r_elevation_crop')

        grass.mapcalc("tmp = $r_drainage_e * $r_mask",
                       r_mask = r_mask,
                       r_drainage_e = r_drainage_e)

        grass.run_command('g.remove', flags='f', type='raster', name= r_drainage_e, quiet = True)

        grass.run_command('g.rename', raster = ('tmp',r_drainage_e))

        grass.mapcalc("tmp = $r_stream_e * $r_mask",
                       r_mask = r_mask,
                       r_stream_e = r_stream_e)

        grass.run_command('g.remove', flags='f', type='raster', name= r_stream_e, quiet = True)
        #grass.run_command('g.rename', raster = (r_stream_e,'streams'))

        grass.run_command('g.rename', raster = ('tmp',r_stream_e))

        grass.run_command('r.thin', input = r_stream_e,
                                    output = r_stream_e+'_thin')

        grass.run_command('r.to.vect', input = r_stream_e+'_thin',
                                       output = v_network,
                                       type = 'line')

        # Creation of slope and aspect maps
        grass.run_command('r.slope.aspect', elevation = 'r_elevation_crop',
                                            slope = r_slope,
                                            aspect = r_aspect)

        # Basin mask (vector)
        # Raster to vector
        grass.run_command('r.to.vect', input = r_basin,
                                       output = v_basin,
                                       type = 'area',
                                       flags = 'sv')

        # Add two columns to the table: area and perimeter
        grass.run_command('v.db.addcolumn', map = v_basin,
                                         columns = 'area double precision')

        grass.run_command('v.db.addcolumn', map = v_basin,
                                         columns = 'perimeter double precision')

        # Populate perimeter column
        grass.run_command('v.to.db', map = v_basin,
                                 type = 'line,boundary',
                                 layer = 1,
                                 qlayer = 1,
                                 option = 'perimeter',
                                 units = 'kilometers',
                                 columns = 'perimeter',
                                 overwrite = True)

        # Read perimeter
        tmp = grass.read_command('v.to.db', map = v_basin,
                                 type = 'line,boundary',
                                 layer = 1,
                                 qlayer = 1,
                                 option = 'perimeter',
                                 units = 'kilometers',
                                 qcolumn = 'perimeter',
                                 flags = 'p')
        perimeter_basin = float(tmp.split('\n')[1].split('|')[1])

        # Populate area column
        grass.run_command('v.to.db', map = v_basin,
                                 type = 'line,boundary',
                                 layer = 1,
                                 qlayer = 1,
                                 option = 'area',
                                 units = 'kilometers',
                                 columns = 'area',
                                 overwrite = True)

        # Read area
        tmp = grass.read_command('v.to.db', map = v_basin,
                                 type = 'line,boundary',
                                 layer = 1,
                                 qlayer = 1,
                                 option = 'area',
                                 units = 'kilometers',
                                 qcolumn = 'area',
                                 flags = 'p')
        area_basin = float(tmp.split('\n')[1].split('|')[1])

        # Creation of order maps: strahler, horton, hack, shreeve
        grass.message("Creating %s" % r_hack)

        grass.run_command('r.stream.order', stream_rast = r_stream_e,
                                            direction = r_drainage_e,
                                            strahler = r_strahler,
                                            shreve = r_shreve,
                                            horton = r_horton,
                                            hack = r_hack)

        # Distance to outlet
        grass.run_command('r.stream.distance', stream_rast = r_outlet,
                                               direction = r_drainage_e,
                                               flags = 'o',
                                               distance = r_distance)


        # hypsographic curve

        grass.message("------------------------------")

        grass.run_command('r.hypso', map = 'r_elevation_crop',
                                  image = os.path.join(directory,prefix), flags = 'ab')

        grass.message("------------------------------")

        # Width Function

        grass.message("------------------------------")

        grass.run_command('r.width.funct', map = r_distance,
                                  image = os.path.join(directory,prefix))

        grass.message("------------------------------")

        # Creation of map of hillslope distance to river network

        grass.run_command("r.stream.distance", stream_rast = r_stream_e,
                                           direction = r_drainage,
                                           elevation = 'r_elevation_crop',
                                           distance = r_hillslope_distance)


        # Mean elevation
        grass.run_command("r.stats.zonal", base = r_basin,
                                    cover = "r_elevation_crop",
                                    method = "average",
                                    output = r_height_average)


        grass.message("r.stats.zonal done")
        mean_elev = float(grass.read_command('r.info', flags = 'r',
                                                       map = r_height_average).split('\n')[0].split('=')[1])
        grass.message("r.info done")


        # In Grass, aspect categories represent the number degrees of east and they increase
        # counterclockwise: 90deg is North, 180 is West, 270 is South 360 is East.
        # The aspect value 0 is used to indicate undefined aspect in flat areas with slope=0.
        # We calculate the number of degree from north, increasing counterclockwise.
        grass.mapcalc("$r_aspect_mod = if($r_aspect == 0, 0, if($r_aspect > 90, 450 - $r_aspect, 90 - $r_aspect))",
                  r_aspect = r_aspect,
                  r_aspect_mod = r_aspect_mod)
        grass.message("r.mapcalc done")

        # Centroid and mean slope
        baricenter_slope_baricenter = grass.read_command("r.volume", input = r_slope,
                                                                     clump = r_basin)

        grass.message("r.volume done")

        baricenter_slope_baricenter = baricenter_slope_baricenter.split()
        mean_slope = baricenter_slope_baricenter[30]

        # Rectangle containing basin
        basin_east = baricenter_slope_baricenter[33]
        basin_north = baricenter_slope_baricenter[34]
        info_region_basin = grass.read_command("g.region",
                                            vect = options['prefix']+'_'+mapname[0]+'_basin',
                                            flags = 'm')

        grass.message("g.region done")
        dict_region_basin = dict(x.split('=', 1) for x in info_region_basin.split('\n') if '=' in x)
        basin_resolution = float(dict_region_basin['nsres'])
#        x_massimo = float(dict_region_basin['n']) + (basin_resolution * 10)
#        x_minimo = float(dict_region_basin['w']) - (basin_resolution * 10)
#        y_massimo = float(dict_region_basin['e']) + (basin_resolution * 10)
#        y_minimo = float(dict_region_basin['s']) - (basin_resolution * 10)
        nw = dict_region_basin['w'], dict_region_basin['n']
        se = dict_region_basin['e'], dict_region_basin['s']
        grass.message("Rectangle containing basin done")

        east1,north1 = coordinates.split(',')
        east = float(east1)
        north = float(north1)

        # Directing vector
        delta_x = abs(float(basin_east) - east)
        delta_y = abs(float(basin_north) - north)
        L_orienting_vect = math.sqrt((delta_x**2)+(delta_y**2)) / 1000
        grass.message("Directing vector done")

        # Prevalent orientation
        prevalent_orientation = math.atan(delta_y/delta_x)
        grass.message("Prevalent orientation done")

        # Compactness coefficient
        C_comp = perimeter_basin / (2 * math.sqrt(area_basin / math.pi))
        grass.message("Compactness coefficient done")

        # Circularity ratio
        R_c = (4 * math.pi * area_basin) / (perimeter_basin ** 2)
        grass.message("Circularity ratio done")

        # Mainchannel
        grass.mapcalc("$r_mainchannel = if($r_hack==1,1,null())",
                  r_hack = r_hack,
                  r_mainchannel = r_mainchannel)

        grass.run_command("r.thin", input = r_mainchannel,
                                output = r_mainchannel+'_thin')
        grass.run_command('r.to.vect', input = r_mainchannel+'_thin',
                                   output = v_mainchannel,
                                   type = 'line',
                                   verbose = True)


        # Get coordinates of the outlet (belonging to stream network)

        grass.run_command('v.db.addtable', map = v_outlet_snap)

        grass.run_command('v.db.addcolumn', map = v_outlet_snap,
                                            columns="x double precision,y double precision")

        grass.run_command('v.to.db', map = v_outlet_snap,
                                     option = "coor",
                                     col = "x,y",
                                     overwrite = True)

        namefile = os.path.join(directory, prefix + '_outlet_coors.txt')

        grass.run_command('v.out.ascii', input = v_outlet_snap,
                                         output = namefile,
                                         cats = 1,
                                         format = "point")

        f = open(namefile)
        east_o, north_o, cat = f.readline().split('|')

        param_mainchannel = grass.read_command('v.what', map = v_mainchannel,
                                                     coordinates = '%s,%s' % (east_o,north_o),
                                                     distance = 5)
        tmp = param_mainchannel.split('\n')[7]
        mainchannel = float(tmp.split()[1]) / 1000   # km

        # Topological Diameter
        grass.mapcalc("$r_mainchannel_dim = -($r_mainchannel - $r_shreve) + 1",
                  r_mainchannel_dim = r_mainchannel_dim,
                  r_shreve = r_shreve,
                  r_mainchannel = r_mainchannel)
        grass.run_command('r.thin', input = r_mainchannel_dim,
                                output = r_mainchannel_dim + '_thin')
        grass.run_command('r.to.vect', input = r_mainchannel_dim + '_thin',
                                   output = v_mainchannel_dim,
                                   type = 'line',
                                   flags = 'v',
                                   verbose = True)
        try:
            D_topo1 = grass.read_command('v.info', map = v_mainchannel_dim,
                                               layer = 1,
                                               flags = 't')
            D_topo = float(D_topo1.split('\n')[2].split('=')[1])
        except:
            D_topo = 1
            grass.message("Topological Diameter = WARNING")

        # Mean slope of mainchannel
        grass.message("doing v.to.points")
        grass.run_command('v.to.points',
                                     input = v_mainchannel_dim,
                                     output = v_mainchannel_dim+'_point',
                                     type = 'line')
        vertex = grass.read_command('v.out.ascii', verbose = True,
                                               input = v_mainchannel_dim+'_point').strip().split('\n')
        nodi = zeros((len(vertex),4),float)
        pendenze = []

        for i in range(len(vertex)):
            x, y = float(vertex[i].split('|')[0]), float(vertex[i].split('|')[1])
            vertice1 = grass.read_command('r.what', verbose = True,
                                               map = 'r_elevation_crop',
                                               coordinates = '%s,%s' % (x,y))
            vertice = vertice1.replace('\n','').replace('||','|').split('|')
            nodi[i,0],nodi[i,1], nodi[i,2] = float(vertice[0]), float(vertice[1]), float(vertice[2])

        for i in range(0,len(vertex)-1,2):
            dist = math.sqrt(math.fabs((nodi[i,0] - nodi[i+1,0]))**2 + math.fabs((nodi[i,1] - nodi[i+1,1]))**2)
            deltaz = math.fabs(nodi[i,2] - nodi[i+1,2])
            # Control to prevent float division by zero (dist=0)

            try:
                pendenza = deltaz / dist
                pendenze.append(pendenza)
                mainchannel_slope = sum(pendenze) / len(pendenze) * 100
            except:
                pass

        # Elongation Ratio
        R_al = (2 * math.sqrt(area_basin / math.pi)) / mainchannel

        # Shape factor
        S_f = area_basin / mainchannel

        # Characteristic altitudes
        height_basin_average = grass.read_command('r.what', map = r_height_average,
                                                        cache = 500,
                                                        coordinates = '%s,%s' % (east_o, north_o))
        height_basin_average = height_basin_average.replace('\n','')
        height_basin_average = float(height_basin_average.split('|')[-1])
        minmax_height_basin = grass.read_command('r.info', flags = 'r',
                                                       map = 'r_elevation_crop')
        minmax_height_basin = minmax_height_basin.strip().split('\n')
        min_height_basin, max_height_basin = float(minmax_height_basin[0].split('=')[-1]), float(minmax_height_basin[1].split('=')[-1])
        H1 = max_height_basin
        H2 = min_height_basin
        HM = H1 - H2

        # Concentration time (Giandotti, 1934)
        t_c = ((4 * math.sqrt(area_basin)) + (1.5 * mainchannel)) / (0.8 * math.sqrt(HM))

        # Mean hillslope length
        grass.run_command("r.stats.zonal", cover = r_stream_e,
                                   base = r_mask,
                                   method = "average",
                                   output = r_average_hillslope)
        mean_hillslope_length = float(grass.read_command('r.info', flags = 'r',
                                                         map = r_average_hillslope).split('\n')[0].split('=')[1])

        # Magnitude
        grass.mapcalc("$r_ord_1 = if($r_strahler==1,1,null())",
                  r_ord_1 = r_ord_1,
                  r_strahler = r_strahler)
        grass.run_command('r.thin', input = r_ord_1,
                                output = r_ord_1+'_thin',
                                iterations = 200)
        grass.run_command('r.to.vect', input = r_ord_1+'_thin',
                                   output = v_ord_1,
                                   type = 'line',
                                   flags = 'v')
        magnitudo = float(grass.read_command('v.info', map = v_ord_1,
                                                   layer = 1,
                                                   flags = 't').split('\n')[2].split('=')[1])

        # First order stream frequency
        FSF = magnitudo / area_basin

        # Statistics

        stream_stats = grass.read_command('r.stream.stats', stream_rast = r_strahler,
                                                        direction = r_drainage_e,
                                                        elevation = 'r_elevation_crop')


        print(" ------------------------------ ")
        print("Output of r.stream.stats: ")
        print(stream_stats)

        stream_stats_summary = stream_stats.split('\n')[4].split('|')
        stream_stats_mom = stream_stats.split('\n')[8].split('|')
        Max_order, Num_streams, Len_streams, Stream_freq = stream_stats_summary[0], stream_stats_summary[1], stream_stats_summary[2], stream_stats_summary[5]
        Bif_ratio, Len_ratio, Area_ratio, Slope_ratio = stream_stats_mom[0], stream_stats_mom[1], stream_stats_mom[2], stream_stats_mom[3]
        drainage_density = float(Len_streams) / float(area_basin)

        # Cleaning up
        grass.run_command('g.remove', flags='f', type='raster', name= 'r_elevation_crop', quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_height_average, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_aspect_mod, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_mainchannel, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_stream_e, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_drainage_e, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_mask, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_ord_1, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_average_hillslope, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_mainchannel_dim, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_outlet, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= r_basin, quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= prefix+'_mainchannel_thin', quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= prefix+'_mainchannel_dim_thin', quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= prefix+'_ord_1_thin', quiet = True)
        grass.run_command('g.remove', flags='f', type='raster', name= prefix+'_stream_e_thin', quiet = True)
        grass.run_command('g.remove', flags='f', type='vector', name= v_mainchannel_dim+'_point', quiet = True)
        grass.run_command('g.remove', flags='f', type='vector', name= v_mainchannel_dim, quiet = True)
        grass.run_command('g.remove', flags='f', type='vector', name= v_ord_1, quiet = True)

        if nomap:
            grass.run_command('g.remove', flags='f', type='vector', name= v_outlet, quiet = True)
            grass.run_command('g.remove', flags='f', type='vector', name= v_basin, quiet = True)
            grass.run_command('g.remove', flags='f', type='vector', name= v_mainchannel, quiet = True)
            grass.run_command('g.remove', flags='f', type='raster', name= r_accumulation, quiet = True)
            grass.run_command('g.remove', flags='f', type='raster', name= r_drainage, quiet = True)
            grass.run_command('g.remove', flags='f', type='raster', name= r_aspect, quiet = True)
            grass.run_command('g.remove', flags='f', type='raster', name= r_strahler, quiet = True)
            grass.run_command('g.remove', flags='f', type='raster', name= r_shreve, quiet = True)
            grass.run_command('g.remove', flags='f', type='raster', name= r_horton, quiet = True)
            grass.run_command('g.remove', flags='f', type='raster', name= r_hack, quiet = True)
            grass.run_command('g.remove', flags='f', type='raster', name= r_distance, quiet = True)
            grass.run_command('g.remove', flags='f', type='raster', name= r_hillslope_distance, quiet = True)
            grass.run_command('g.remove', flags='f', type='raster', name= r_slope, quiet = True)

        ####################################################

        parametri_bacino = {}
        parametri_bacino["mean_slope"] = float(mean_slope)
        parametri_bacino["mean_elev"] = float(mean_elev)
        parametri_bacino["basin_east"] = float(basin_east)
        parametri_bacino["basin_north"] = float(basin_north)
        parametri_bacino["basin_resolution"] = float(basin_resolution)
        parametri_bacino["nw"] = nw
        parametri_bacino["se"] = se
        parametri_bacino["area_basin"] = float(area_basin)
        parametri_bacino["perimeter_basin"] = float(perimeter_basin)
        parametri_bacino["L_orienting_vect"] = float(L_orienting_vect)
        parametri_bacino["prevalent_orientation"] = float(prevalent_orientation)
        parametri_bacino["C_comp"] = float(C_comp)
        parametri_bacino["R_c"] = float(R_c)
        parametri_bacino["mainchannel"] = float(mainchannel)
        parametri_bacino["D_topo"] = float(D_topo)
        parametri_bacino["mainchannel_slope"] = float(mainchannel_slope)
        parametri_bacino["R_al"] = float(R_al)
        parametri_bacino["S_f"] = float(S_f)
        parametri_bacino["H1"] = float(H1)
        parametri_bacino["H2"] = float(H2)
        parametri_bacino["HM"] = float(HM)
        parametri_bacino["t_c"] = float(t_c)
        parametri_bacino["mean_hillslope_length"] = float(mean_hillslope_length)
        parametri_bacino["magnitudo"] = float(magnitudo)
        parametri_bacino["Max_order"] = float(Max_order)
        parametri_bacino["Num_streams"] = float(Num_streams)
        parametri_bacino["Len_streams"] = float(Len_streams)
        parametri_bacino["Stream_freq"] = float(Stream_freq)
        parametri_bacino["Bif_ratio"] = float(Bif_ratio)
        parametri_bacino["Len_ratio"] = float(Len_ratio)
        parametri_bacino["Area_ratio"] = float(Area_ratio)
        parametri_bacino["Slope_ratio"] = float(Slope_ratio)
        parametri_bacino["drainage_density"] = float(drainage_density)
        parametri_bacino["FSF"] = float(FSF)


        # create .csv file
        csvfile = os.path.join(directory, prefix + '_parameters.csv')
        with open(csvfile, 'w') as f:
            writer = csv.writer(f)
            writer.writerow(['Morphometric parameters of basin:'])
            writer.writerow([' '])
            writer.writerow(['Easting Centroid of basin'] + [basin_east])
            writer.writerow(['Northing Centroid of basin'] + [basin_north])
            writer.writerow(['Rectangle containing basin N-W'] + [nw])
            writer.writerow(['Rectangle containing basin S-E'] + [se])
            writer.writerow(['Area of basin [km^2]'] + [area_basin])
            writer.writerow(['Perimeter of basin [km]'] + [perimeter_basin])
            writer.writerow(['Max Elevation [m s.l.m.]'] + [H1])
            writer.writerow(['Min Elevation [m s.l.m.]'] + [H2])
            writer.writerow(['Elevation Difference [m]'] + [HM])
            writer.writerow(['Mean Elevation'] + [mean_elev])
            writer.writerow(['Mean Slope'] + [mean_slope])
            writer.writerow(['Length of Directing Vector [km]'] + [L_orienting_vect])
            writer.writerow(['Prevalent Orientation [degree from north, counterclockwise]'] + [prevalent_orientation])
            writer.writerow(['Compactness Coefficient'] + [C_comp])
            writer.writerow(['Circularity Ratio'] + [R_c])
            writer.writerow(['Topological Diameter'] + [D_topo])
            writer.writerow(['Elongation Ratio'] + [R_al])
            writer.writerow(['Shape Factor'] + [S_f])
            writer.writerow(['Concentration Time (Giandotti, 1934) [hr]'] + [t_c])
            writer.writerow(['Length of Mainchannel [km]'] + [mainchannel])
            writer.writerow(['Mean slope of mainchannel [percent]'] + [mainchannel_slope])
            writer.writerow(['Mean hillslope length [m]'] + [mean_hillslope_length])
            writer.writerow(['Magnitudo'] + [magnitudo])
            writer.writerow(['Max order (Strahler)'] + [Max_order])
            writer.writerow(['Number of streams'] + [Num_streams])
            writer.writerow(['Total Stream Length [km]'] + [Len_streams])
            writer.writerow(['First order stream frequency'] + [FSF])
            writer.writerow(['Drainage Density [km/km^2]'] + [drainage_density])
            writer.writerow(['Bifurcation Ratio (Horton)'] + [Bif_ratio])
            writer.writerow(['Length Ratio (Horton)'] + [Len_ratio])
            writer.writerow(['Area ratio (Horton)'] + [Area_ratio])
            writer.writerow(['Slope ratio (Horton)'] + [Slope_ratio])

        # Create summary (transposed)
        csvfileT = os.path.join(directory, prefix + '_parametersT.csv') # transposed
        with open(csvfileT, 'w') as f:
            writer = csv.writer(f)
            writer.writerow(['x'] +
                            ['y'] +
                            ['Easting_Centroid_basin'] +
                            ['Northing_Centroid_basin'] +
                            ['Rectangle_containing_basin_N_W'] +
                            ['Rectangle_containing_basin_S_E'] +
                            ['Area_of_basin_km2'] +
                            ['Perimeter_of_basin_km'] +
                            ['Max_Elevation'] +
                            ['Min_Elevation'] +
                            ['Elevation_Difference'] +
                            ['Mean_Elevation'] +
                            ['Mean_Slope'] +
                            ['Length_of_Directing_Vector_km'] +
                            ['Prevalent_Orientation_deg_from_north_ccw'] +
                            ['Compactness_Coefficient'] +
                            ['Circularity_Ratio'] +
                            ['Topological_Diameter'] +
                            ['Elongation_Ratio'] +
                            ['Shape_Factor'] +
                            ['Concentration_Time_hr'] +
                            ['Length_of_Mainchannel_km'] +
                            ['Mean_slope_of_mainchannel_percent'] +
                            ['Mean_hillslope_length_m'] +
                            ['Magnitudo'] +
                            ['Max_order_Strahler'] +
                            ['Number_of_streams'] +
                            ['Total_Stream_Length_km'] +
                            ['First_order_stream_frequency'] +
                            ['Drainage_Density_km_over_km2'] +
                            ['Bifurcation_Ratio_Horton'] +
                            ['Length_Ratio_Horton'] +
                            ['Area_ratio_Horton'] +
                            ['Slope_ratio_Horton'])
            writer.writerow([east_o]
                          + [north_o]
                          + [basin_east]
                          + [basin_north]
                          + [nw]
                          + [se]
                          + [area_basin]
                          + [perimeter_basin]
                          + [H1]
                          + [H2]
                          + [HM]
                          + [mean_elev]
                          + [mean_slope]
                          + [L_orienting_vect]
                          + [prevalent_orientation]
                          + [C_comp]
                          + [R_c]
                          + [D_topo]
                          + [R_al]
                          + [S_f]
                          + [t_c]
                          + [mainchannel]
                          + [mainchannel_slope]
                          + [mean_hillslope_length]
                          + [magnitudo]
                          + [Max_order]
                          + [Num_streams]
                          + [Len_streams]
                          + [FSF]
                          + [drainage_density]
                          + [Bif_ratio]
                          + [Len_ratio]
                          + [Area_ratio]
                          + [Slope_ratio])


        # Import table "rbasin_summary", joins it to "outlet_snap", then drops it
        grass.message("db.in.ogr: importing CSV table <%s>..." % csvfileT)
        grass.run_command("db.in.ogr", input = csvfileT,
                          output = "rbasin_summary")

        grass.run_command("v.db.join", map = v_outlet_snap,
                          otable = "rbasin_summary",
                          column = "y",
                          ocolumn = "y")
        grass.run_command("db.droptable", table = "rbasin_summary", flags = 'f')

        grass.message("\n")
        grass.message("----------------------------------")
        grass.message("Morphometric parameters of basin :")
        grass.message("----------------------------------\n")
        grass.message("Easting Centroid of basin : %s " % basin_east)
        grass.message("Northing Centroid of Basin : %s " % basin_north)
        grass.message("Rectangle containing basin N-W : %s , %s " % nw)
        grass.message("Rectangle containing basin S-E : %s , %s " % se)
        grass.message("Area of basin [km^2] : %s " % area_basin)
        grass.message("Perimeter of basin [km] : %s " % perimeter_basin)
        grass.message("Max Elevation [m s.l.m.] : %s " % H1)
        grass.message("Min Elevation [m s.l.m.]: %s " % H2)
        grass.message("Elevation Difference [m]: %s " % HM)
        grass.message("Mean Elevation [m s.l.m.]: %s " % mean_elev)
        grass.message("Mean Slope : %s " % mean_slope)
        grass.message("Length of Directing Vector [km] : %s " % L_orienting_vect)
        grass.message("Prevalent Orientation [degree from north, counterclockwise] : %s " % prevalent_orientation)
        grass.message("Compactness Coefficient : %s " % C_comp)
        grass.message("Circularity Ratio : %s " % R_c)
        grass.message("Topological Diameter : %s " % D_topo)
        grass.message("Elongation Ratio : %s " % R_al)
        grass.message("Shape Factor : %s " % S_f)
        grass.message("Concentration Time (Giandotti, 1934) [hr] : %s " % t_c)
        grass.message("Length of Mainchannel [km] : %s " % mainchannel)
        grass.message("Mean slope of mainchannel [percent] : %f " % mainchannel_slope)
        grass.message("Mean hillslope length [m] : %s " % mean_hillslope_length)
        grass.message("Magnitudo : %s " % magnitudo)
        grass.message("Max order (Strahler) : %s " % Max_order)
        grass.message("Number of streams : %s " % Num_streams)
        grass.message("Total Stream Length [km] : %s " % Len_streams)
        grass.message("First order stream frequency : %s " % FSF)
        grass.message("Drainage Density [km/km^2] : %s " % drainage_density)
        grass.message("Bifurcation Ratio (Horton) : %s " % Bif_ratio)
        grass.message("Length Ratio (Horton) : %s " % Len_ratio)
        grass.message("Area ratio (Horton) : %s " % Area_ratio)
        grass.message("Slope ratio (Horton): %s " % Slope_ratio)
        grass.message("------------------------------")
        grass.message("\n")
        grass.message("Done!")

    except:
        grass.message("\n")
        grass.message("------------------------------")
        grass.message("\n")
        grass.message("An ERROR occurred running r.basin")
        grass.message("Please check for error messages above or try with another pairs of outlet coordinates")


    # Set region to original
    grass.read_command('g.region', flags = 'p', region = 'original')
    grass.run_command('g.remove', flags = 'f', type = 'region', name = 'original')
Exemplo n.º 34
0
def main():
    stats = grass.read_command(
        "r.stats", input=options["map"], sep="space", nv="*", nsteps="255", flags="inc"
    ).split("\n")[:-1]
    res = grass.region()["nsres"]
    zn = np.zeros((len(stats), 6), float)
    kl = np.zeros((len(stats), 2), float)
    prc = np.zeros((9, 2), float)

    for i in range(len(stats)):
        if i == 0:
            zn[i, 0], zn[i, 1] = list(map(float, stats[i].split(" ")))
            zn[i, 2] = zn[i, 1]
        else:
            zn[i, 0], zn[i, 1] = list(map(float, stats[i].split(" ")))
            zn[i, 2] = zn[i, 1] + zn[i - 1, 2]

    totcell = sum(zn[:, 1])
    print("Tot. cells", totcell)

    for i in range(len(stats)):
        zn[i, 3] = 1 - (zn[i, 2] / sum(zn[:, 1]))
        zn[i, 4] = zn[i, 3] * (((res**2) / 1000000) * sum(zn[:, 1]))
        zn[i, 5] = (zn[i, 0] - min(zn[:, 0])) / (max(zn[:, 0]) - min(zn[:, 0]))
        kl[i, 0] = zn[i, 0]
        kl[i, 1] = 1 - (zn[i, 2] / totcell)

    # quantiles
    prc[0, 0], prc[0, 1] = findint(kl, 0.025), 0.025
    prc[1, 0], prc[1, 1] = findint(kl, 0.05), 0.05
    prc[2, 0], prc[2, 1] = findint(kl, 0.1), 0.1
    prc[3, 0], prc[3, 1] = findint(kl, 0.25), 0.25
    prc[4, 0], prc[4, 1] = findint(kl, 0.5), 0.5
    prc[5, 0], prc[5, 1] = findint(kl, 0.75), 0.75
    prc[6, 0], prc[6, 1] = findint(kl, 0.9), 0.9
    prc[7, 0], prc[7, 1] = findint(kl, 0.95), 0.95
    prc[8, 0], prc[8, 1] = findint(kl, 0.975), 0.975

    # Managing flag & plot
    if flags["a"]:
        plotImage(
            zn[:, 3],
            zn[:, 5],
            options["image"] + "_Hypsometric.png",
            "-",
            "A(i) / A",
            "Z(i) / Zmax",
            "Hypsometric Curve",
        )
    if flags["b"]:
        plotImage(
            zn[:, 4],
            zn[:, 0],
            options["image"] + "_Hypsographic.png",
            "-",
            "A [km^2]",
            "Z [m.slm]",
            "Hypsographic Curve",
        )

    print("===========================")
    print("Hypsometric | quantiles")
    print("===========================")
    print("%.0f" % findint(kl, 0.025), "|", 0.025)
    print("%.0f" % findint(kl, 0.05), "|", 0.05)
    print("%.0f" % findint(kl, 0.1), "|", 0.1)
    print("%.0f" % findint(kl, 0.25), "|", 0.25)
    print("%.0f" % findint(kl, 0.5), "|", 0.5)
    print("%.0f" % findint(kl, 0.75), "|", 0.75)
    print("%.0f" % findint(kl, 0.9), "|", 0.9)
    print("%.0f" % findint(kl, 0.975), "|", 0.975)
    print("\n")
    print("Done!")
Exemplo n.º 35
0
    print ""
  print "ENVIRONMENT:"
  gisenv = g.gisenv()
  for row in gisenv.items():
    for item in row:
      print item,
    print ""
  print "---------------------------"
except:
  sys.exit("Must be run inside GRASS GIS; tested only with GRASS 7.0")
  
# Create vector map around boundaries and classify them

# Compuational region
print "Obtaining geographical region from GRASS."
reg = g.region()
n = reg['n']
s = reg['s']
w = reg['w']
e = reg['e']
dns = n-s
dew = e-w

dx = args.resolution
dy = args.resolution * np.cos( np.radians(30) )

nsres = dx
ewres = dy

print "Classifying map boundaries..."
Exemplo n.º 36
0
def main():
    region = gscript.region()
    res = (region["ewres"], region["nsres"])
    delta = (1, 1)
    size = (250, 250)
    for i in range(len(res)):
        if not size[i] - delta[i] < ewres < size[i] + delta[i]:
            raise ValueError(
                f"Resolution mismatch, should be {size[0]}+-{delta}")
    # Create one vector feature to improve speed for calculations
    print("Preparing data...", flush=True)
    gscript.run_command(
        "v.buffer",
        input="motorways@PERMANENT",
        output=f"motorway",
        distance=0.001,
        overwrite=True,
        stdout=subprocess.DEVNULL,
        stderr=subprocess.DEVNULL,
    )
    distances = {
        250: None,
        500: None,
        1000: None,
        2500: None,
        5000: None,
    }
    for i in distances.keys():
        print(f"Starting calculation for {i}m distance", flush=True)
        print("Creating buffer", flush=True)
        gscript.run_command(
            "v.buffer",
            input="motorway@PERMANENT",
            output=f"motorway_buf_{i}",
            distance=i,
            overwrite=True,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL,
        )
        gscript.run_command(
            "g.region",
            vector=f"motorway_buf_{i}",
            overwrite=True,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL,
        )
        print("Rasterizing buffer", flush=True)
        gscript.run_command(
            "v.to.rast",
            input=f"motorway_buf_{i}@PERMANENT",
            output=f"motorway_rast_{i}",
            use="val",
            overwrite=True,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL,
        )
        print("Counting population", flush=True)
        gscript.run_command(
            "r.stats.zonal",
            base=f"motorway_rast_{i}@PERMANENT",
            cover="GHS_POP_E2015_GLOBE_R2019A_54009_250_V1_0_18_3@PERMANENT",
            method="sum",
            output=f"pop_motorway_{i}",
            overwrite=True,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL,
        )
        a = garray.array(f"pop_motorway_{i}@PERMANENT")
        distances[i] = np.amax(a)
    print("Populations:")
    for i, pop in distances.items():
        if pop is None: continue
        print(f"{i}m\t {pop}")
Exemplo n.º 37
0
def main():
    """
    Builds a grid for the MODFLOW component of the USGS hydrologic model,
    GSFLOW.
    """

    options, flags = gscript.parser()
    basin = options['basin']
    pp = options['pour_point']
    raster_input = options['raster_input']
    dx = options['dx']
    dy = options['dy']
    grid = options['output']
    mask = options['mask_output']
    bc_cell = options['bc_cell']
    # basin='basins_tmp_onebasin'; pp='pp_tmp'; raster_input='DEM'; raster_output='DEM_coarse'; dx=dy='500'; grid='grid_tmp'; mask='mask_tmp'
    """
    # Fatal if raster input and output are not both set
    _lena0 = (len(raster_input) == 0)
    _lenb0 = (len(raster_output) == 0)
    if _lena0 + _lenb0 == 1:
        gscript.fatal("You must set both raster input and output, or neither.")
    """

    # Fatal if bc_cell set but mask and grid are false
    if bc_cell != '':
        if (mask == '') or (pp == ''):
            gscript.fatal(
                'Mask and pour point must be set to define b.c. cell')

    # Create grid -- overlaps DEM, three cells of padding
    gscript.use_temp_region()
    reg = gscript.region()
    reg_grid_edges_sn = np.linspace(reg['s'], reg['n'], reg['rows'])
    reg_grid_edges_we = np.linspace(reg['w'], reg['e'], reg['cols'])
    g.region(vector=basin, ewres=dx, nsres=dy)
    regnew = gscript.region()
    # Use a grid ratio -- don't match exactly the desired MODFLOW resolution
    grid_ratio_ns = np.round(regnew['nsres'] / reg['nsres'])
    grid_ratio_ew = np.round(regnew['ewres'] / reg['ewres'])
    # Get S, W, and then move the unit number of grid cells over to get N and E
    # and include 3 cells of padding around the whole watershed
    _s_dist = np.abs(reg_grid_edges_sn - (regnew['s'] - 3. * regnew['nsres']))
    _s_idx = np.where(_s_dist == np.min(_s_dist))[0][0]
    _s = float(reg_grid_edges_sn[_s_idx])
    _n_grid = np.arange(_s, reg['n'] + 3 * grid_ratio_ns * reg['nsres'],
                        grid_ratio_ns * reg['nsres'])
    _n_dist = np.abs(_n_grid - (regnew['n'] + 3. * regnew['nsres']))
    _n_idx = np.where(_n_dist == np.min(_n_dist))[0][0]
    _n = float(_n_grid[_n_idx])
    _w_dist = np.abs(reg_grid_edges_we - (regnew['w'] - 3. * regnew['ewres']))
    _w_idx = np.where(_w_dist == np.min(_w_dist))[0][0]
    _w = float(reg_grid_edges_we[_w_idx])
    _e_grid = np.arange(_w, reg['e'] + 3 * grid_ratio_ew * reg['ewres'],
                        grid_ratio_ew * reg['ewres'])
    _e_dist = np.abs(_e_grid - (regnew['e'] + 3. * regnew['ewres']))
    _e_idx = np.where(_e_dist == np.min(_e_dist))[0][0]
    _e = float(_e_grid[_e_idx])
    # Finally make the region
    g.region(w=str(_w),
             e=str(_e),
             s=str(_s),
             n=str(_n),
             nsres=str(grid_ratio_ns * reg['nsres']),
             ewres=str(grid_ratio_ew * reg['ewres']))
    # And then make the grid
    v.mkgrid(map=grid, overwrite=gscript.overwrite())

    # Cell numbers (row, column, continuous ID)
    v.db_addcolumn(map=grid, columns='id int', quiet=True)
    colNames = np.array(gscript.vector_db_select(grid, layer=1)['columns'])
    colValues = np.array(
        gscript.vector_db_select(grid, layer=1)['values'].values())
    cats = colValues[:, colNames == 'cat'].astype(int).squeeze()
    rows = colValues[:, colNames == 'row'].astype(int).squeeze()
    cols = colValues[:, colNames == 'col'].astype(int).squeeze()
    nrows = np.max(rows)
    ncols = np.max(cols)
    cats = np.ravel([cats])
    _id = np.ravel([ncols * (rows - 1) + cols])
    _id_cat = []
    for i in range(len(_id)):
        _id_cat.append((_id[i], cats[i]))
    gridTopo = VectorTopo(grid)
    gridTopo.open('rw')
    cur = gridTopo.table.conn.cursor()
    cur.executemany("update " + grid + " set id=? where cat=?", _id_cat)
    gridTopo.table.conn.commit()
    gridTopo.close()

    # Cell area
    v.db_addcolumn(map=grid, columns='area_m2', quiet=True)
    v.to_db(map=grid,
            option='area',
            units='meters',
            columns='area_m2',
            quiet=True)

    # Basin mask
    if len(mask) > 0:
        # Fine resolution region:
        g.region(n=reg['n'],
                 s=reg['s'],
                 w=reg['w'],
                 e=reg['e'],
                 nsres=reg['nsres'],
                 ewres=reg['ewres'])
        # Rasterize basin
        v.to_rast(input=basin,
                  output=mask,
                  use='val',
                  value=1,
                  overwrite=gscript.overwrite(),
                  quiet=True)
        # Coarse resolution region:
        g.region(w=str(_w),
                 e=str(_e),
                 s=str(_s),
                 n=str(_n),
                 nsres=str(grid_ratio_ns * reg['nsres']),
                 ewres=str(grid_ratio_ew * reg['ewres']))
        r.resamp_stats(input=mask,
                       output=mask,
                       method='sum',
                       overwrite=True,
                       quiet=True)
        r.mapcalc('tmp' + ' = ' + mask + ' > 0', overwrite=True, quiet=True)
        g.rename(raster=('tmp', mask), overwrite=True, quiet=True)
        r.null(map=mask, null=0, quiet=True)
        # Add mask location (1 vs 0) in the MODFLOW grid
        v.db_addcolumn(map=grid,
                       columns='basinmask double precision',
                       quiet=True)
        v.what_rast(map=grid, type='centroid', raster=mask, column='basinmask')
    """
    # Resampled raster
    if len(raster_output) > 0:
        r.resamp_stats(input=raster_input, output=raster_output, method='average', overwrite=gscript.overwrite(), quiet=True)
    """

    # Pour point
    if len(pp) > 0:
        v.db_addcolumn(map=pp,
                       columns=('row integer', 'col integer'),
                       quiet=True)
        v.build(map=pp, quiet=True)
        v.what_vect(map=pp,
                    query_map=grid,
                    column='row',
                    query_column='row',
                    quiet=True)
        v.what_vect(map=pp,
                    query_map=grid,
                    column='col',
                    query_column='col',
                    quiet=True)

    # Next point downstream of the pour point
    # Requires pp (always) and mask (sometimes)
    # Dependency set above w/ gscript.fatal
    if len(bc_cell) > 0:
        ########## NEED TO USE TRUE TEMPORARY FILE ##########
        # May not work with dx != dy!
        v.to_rast(input=pp, output='tmp', use='val', value=1, overwrite=True)
        r.buffer(input='tmp',
                 output='tmp',
                 distances=float(dx) * 1.5,
                 overwrite=True)
        r.mapcalc('tmp2 = if(tmp==2,1,null()) * ' + raster_input,
                  overwrite=True)
        g.rename(raster=('tmp2', 'tmp'), overwrite=True, quiet=True)
        #r.mapcalc('tmp = if(isnull('+raster_input+',0,(tmp == 2)))', overwrite=True)
        #g.region(rast='tmp')
        #r.null(map=raster_input,
        r.drain(input=raster_input,
                start_points=pp,
                output='tmp2',
                overwrite=True)
        r.mapcalc('tmp3 = tmp2 * tmp', overwrite=True, quiet=True)
        g.rename(raster=('tmp3', 'tmp'), overwrite=True, quiet=True)
        #r.null(map='tmp', setnull=0) # Not necessary: center point removed above
        r.to_vect(input='tmp',
                  output=bc_cell,
                  type='point',
                  column='z',
                  overwrite=gscript.overwrite(),
                  quiet=True)
        v.db_addcolumn(map=bc_cell,
                       columns=('row integer', 'col integer',
                                'x double precision', 'y double precision'),
                       quiet=True)
        v.build(map=bc_cell, quiet=True)
        v.what_vect(map=bc_cell, query_map=grid, column='row', \
                    query_column='row', quiet=True)
        v.what_vect(map=bc_cell, query_map=grid, column='col', \
                    query_column='col', quiet=True)
        v.to_db(map=bc_cell, option='coor', columns=('x,y'))

        # Find out if this is diagonal: finite difference works only N-S, W-E
        colNames = np.array(gscript.vector_db_select(pp, layer=1)['columns'])
        colValues = np.array(
            gscript.vector_db_select(pp, layer=1)['values'].values())
        pp_row = int(colValues[:, colNames == 'row'].astype(int).squeeze())
        pp_col = int(colValues[:, colNames == 'col'].astype(int).squeeze())
        colNames = np.array(
            gscript.vector_db_select(bc_cell, layer=1)['columns'])
        colValues = np.array(
            gscript.vector_db_select(bc_cell, layer=1)['values'].values())
        bc_row = int(colValues[:, colNames == 'row'].astype(int).squeeze())
        bc_col = int(colValues[:, colNames == 'col'].astype(int).squeeze())
        # Also get x and y while we are at it: may be needed later
        bc_x = float(colValues[:, colNames == 'x'].astype(float).squeeze())
        bc_y = float(colValues[:, colNames == 'y'].astype(float).squeeze())
        if (bc_row != pp_row) and (bc_col != pp_col):
            # If not diagonal, two possible locations that are adjacent
            # to the pour point
            _col1, _row1 = str(bc_col), str(pp_row)
            _col2, _row2 = str(pp_col), str(bc_row)
            # Check if either of these is covered by the basin mask
            _ismask_1 = gscript.vector_db_select(grid,
                                                 layer=1,
                                                 where='(row == ' + _row1 +
                                                 ') AND (col ==' + _col1 + ')',
                                                 columns='basinmask')
            _ismask_1 = int(_ismask_1['values'].values()[0][0])
            _ismask_2 = gscript.vector_db_select(grid,
                                                 layer=1,
                                                 where='(row == ' + _row2 +
                                                 ') AND (col ==' + _col2 + ')',
                                                 columns='basinmask')
            _ismask_2 = int(_ismask_2['values'].values()[0][0])
            # If both covered by mask, error
            if _ismask_1 and _ismask_2:
                gscript.fatal(
                    'All possible b.c. cells covered by basin mask.\n\
                             Contact the developer: awickert (at) umn(.)edu')
            # Otherwise, those that keep those that are not covered by basin
            # mask and set ...
            # ... wait, do we want the point that touches as few interior
            # cells as possible?
            # maybe just try setting both and seeing what happens for now!
            else:
                # Get dx and dy
                dx = gscript.region()['ewres']
                dy = gscript.region()['nsres']
                # Build tool to handle multiple b.c. cells?
                bcvect = vector.Vector(bc_cell)
                bcvect.open('rw')
                _cat_i = 2
                if not _ismask_1:
                    # _x should always be bc_x, but writing generalized code
                    _x = bc_x + dx * (int(_col1) - bc_col)  # col 1 at w edge
                    _y = bc_y - dy * (int(_row1) - bc_row)  # row 1 at n edge
                    point0 = Point(_x, _y)
                    bcvect.write(
                        point0,
                        cat=_cat_i,
                        attrs=(None, None, _row1, _col1, _x, _y),
                    )
                    bcvect.table.conn.commit()
                    _cat_i += 1
                if not _ismask_2:
                    # _y should always be bc_y, but writing generalized code
                    _x = bc_x + dx * (int(_col2) - bc_col)  # col 1 at w edge
                    _y = bc_y - dy * (int(_row2) - bc_row)  # row 1 at n edge
                    point0 = Point(_x, _y)
                    bcvect.write(
                        point0,
                        cat=_cat_i,
                        attrs=(None, None, _row2, _col2, _x, _y),
                    )
                    bcvect.table.conn.commit()
                # Build database table and vector geometry
                bcvect.build()
                bcvect.close()

    g.region(n=reg['n'],
             s=reg['s'],
             w=reg['w'],
             e=reg['e'],
             nsres=reg['nsres'],
             ewres=reg['ewres'])
Exemplo n.º 38
0
    def __init__(self, **optionsandflags):
        '''Process all arguments and prepare processing'''
        # add all options and flags as attributes (only nonempty ones)
        self.options = interpret_options(optionsandflags)
        self.__dict__.update(self.options)

        # save region for convenience
        self.region = grass.region()
        self.region['kmtocell'] = 10**6 / (self.region['ewres'] * self.region['nsres'])
        self.region['celltokm'] = self.region['ewres'] * self.region['nsres'] * 1e-6

        # check if DEM to processed or if all inputs set
        if not self.is_set('accumulation', 'drainage', 'streams'):
            grass.fatal('Either of these not set: accumulation, drainage, streams.')

        # lothresh default
        if 'lothresh' not in self.options:
            self.lothresh = self.upthresh * 0.05

        # what to do with upthresh
        if self.is_set('upthreshcolumn'):
            gm('Will look for upper thresholds in the %s column.' %
               self.upthreshcolumn)
            # get thresholds from column in station vect
            try:
                threshs = grass.vector_db_select(
                          self.stations, columns=self.upthreshcolumn)['values']
                self.upthresh = OrderedDict([(k, float(v[0]))
                                             for k, v in sorted(threshs.items())])
            except:
                grass.fatal('Cant read the upper threshold from the column %s'
                            % self.upthreshcolumn)

        # streamthresh
        if 'streamthresh' in self.options:
            # convert to cells
            self.streamthresh = self.region['kmtocell'] * self.streamthresh
            # check if reasonable
            fract = float(self.streamthresh) / self.region['cells']
            if fract > 0.5 or fract < 0.01:
                gwarn('streamthresh is %s percent of the region size!' % (fract*100))
        else:
            self.streamthresh = int(self.region['cells'] * 0.02)

        # if no r.watershed flags given
        if 'rwatershedflags' not in self.options:
            self.rwatershedflags = 's'
        if 'rwatershedmemory' in self.options:
            self.rwatershedflags += 'm'
        else:
            # default value/not used
            self.rwatershedmemory = 300

        # check input for stats print
        if self.s:
            for o in ['streams', 'stations', 'catchmentprefix']:
                if not self.is_set(o):
                    grass.fatal('%s needs to be set!')
            # get all catchments
            rst = grass.list_strings('rast', self.catchmentprefix+'*')
            rolist = [(int(r.split('@')[0].replace(self.catchmentprefix, '')), r)
                      for r in sorted(rst) if '__' not in r]
            self.catchment_rasters = OrderedDict(rolist)
            gm('Found these catchments %s' % self.catchment_rasters)
            # calculate station topology
            self.snap_stations()
            self.get_stations_topology()

        # initialise subbasinsdone
        self.subbasinsdone = {}

        return
Exemplo n.º 39
0
def main():

    pan = options['pan']
    msxlst = options['msx'].split(',')
    outputsuffix = options['suffix']
    custom_ratio = options['ratio']
    center = options['center']
    center2 = options['center2']
    modulation = options['modulation']
    modulation2 = options['modulation2']

    if options['trim']:
        trimming_factor = float(options['trim'])
    else:
        trimming_factor = False

    histogram_match = flags['l']
    second_pass = flags['2']
    color_match = flags['c']

#    # Check & warn user about "ns == ew" resolution of current region ======
#    region = grass.region()
#    nsr = region['nsres']
#    ewr = region['ewres']
#
#    if nsr != ewr:
#        msg = ('>>> Region's North:South ({ns}) and East:West ({ew}) '
#               'resolutions do not match!')
#        msg = msg.format(ns=nsr, ew=ewr)
#        g.message(msg, flags='w')

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?
    region = grass.region()  # and region settings

    # List images and their properties

    imglst = [pan]
    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in imglst:  # Retrieving Image Info
        images[img] = Info(img, mapset)
        images[img].read()

    panres = images[pan].nsres  # Panchromatic resolution

    grass.use_temp_region()  # to safely modify the region
    run('g.region', res=panres)  # Respect extent, change resolution
    g.message("|! Region's resolution matched to Pan's ({p})".format(p=panres))

    # Loop Algorithm over Multi-Spectral images

    for msx in msxlst:
        g.message("\nProcessing image: {m}".format(m=msx))

        # Tracking command history -- Why don't do this all r.* modules?
        cmd_history = []

        #
        # 1. Compute Ratio
        #

        g.message("\n|1 Determining ratio of low to high resolution")

        # Custom Ratio? Skip standard computation method.
        if custom_ratio:
            ratio = float(custom_ratio)
            g.message('Using custom ratio, overriding standard method!',
                      flags='w')

        # Multi-Spectral resolution(s), multiple
        else:
            # Image resolutions
            g.message("   > Retrieving image resolutions")

            msxres = images[msx].nsres

            # check
            if panres == msxres:
                msg = ("The Panchromatic's image resolution ({pr}) "
                       "equals to the Multi-Spectral's one ({mr}). "
                       "Something is probably not right! "
                       "Please check your input images.")
                msg = msg.format(pr=panres, mr=msxres)
                grass.fatal(_(msg))

            # compute ratio
            ratio = msxres / panres
            msg_ratio = ('   >> Resolution ratio '
                         'low ({m:.{dec}f}) to high ({p:.{dec}f}): {r:.1f}')
            msg_ratio = msg_ratio.format(m=msxres, p=panres, r=ratio, dec=3)
            g.message(msg_ratio)

        # 2nd Pass requested, yet Ratio < 5.5
        if second_pass and ratio < 5.5:
            g.message("   >>> Resolution ratio < 5.5, skipping 2nd pass.\n"
                      "   >>> If you insist, force it via the <ratio> option!",
                      flags='i')
            second_pass = bool(0)

        #
        # 2. High Pass Filtering
        #

        g.message('\n|2 High Pass Filtering the Panchromatic Image')

        tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
        tmp = 'tmp.' + grass.basename(tmpfile)  # use its basenam
        tmp_pan_hpf = '{tmp}_pan_hpf'.format(tmp=tmp)  # HPF image
        tmp_msx_blnr = '{tmp}_msx_blnr'.format(tmp=tmp)  # Upsampled MSx
        tmp_msx_hpf = '{tmp}_msx_hpf'.format(tmp=tmp)  # Fused image
        tmp_hpf_matrix = grass.tempfile()  # ASCII filter

        # Construct and apply Filter
        hpf = get_high_pass_filter(ratio, center)
        hpf_ascii(center, hpf, tmp_hpf_matrix, second_pass)
        run('r.mfilter', input=pan, filter=tmp_hpf_matrix,
            output=tmp_pan_hpf,
            title='High Pass Filtered Panchromatic image',
            overwrite=True)

        # 2nd pass
        if second_pass and ratio > 5.5:
            # Temporary files
            tmp_pan_hpf_2 = '{tmp}_pan_hpf_2'.format(tmp=tmp)  # 2nd Pass HPF image
            tmp_hpf_matrix_2 = grass.tempfile()  # 2nd Pass ASCII filter
            # Construct and apply 2nd Filter
            hpf_2 = get_high_pass_filter(ratio, center2)
            hpf_ascii(center2, hpf_2, tmp_hpf_matrix_2, second_pass)
            run('r.mfilter',
                input=pan,
                filter=tmp_hpf_matrix_2,
                output=tmp_pan_hpf_2,
                title='2-High-Pass Filtered Panchromatic Image',
                overwrite=True)

        #
        # 3. Upsampling low resolution image
        #

        g.message("\n|3 Upsampling (bilinearly) low resolution image")

        run('r.resamp.interp',
            method='bilinear', input=msx, output=tmp_msx_blnr, overwrite=True)

        #
        # 4. Weighting the High Pass Filtered image(s)
        #

        g.message("\n|4 Weighting the High-Pass-Filtered image (HPFi)")

        # Compute (1st Pass) Weighting
        msg_w = "   > Weighting = StdDev(MSx) / StdDev(HPFi) * " \
            "Modulating Factor"
        g.message(msg_w)

        # StdDev of Multi-Spectral Image(s)
        msx_avg = avg(msx)
        msx_sd = stddev(msx)
        g.message("   >> StdDev of <{m}>: {sd:.3f}".format(m=msx, sd=msx_sd))

        # StdDev of HPF Image
        hpf_sd = stddev(tmp_pan_hpf)
        g.message("   >> StdDev of HPFi: {sd:.3f}".format(sd=hpf_sd))

        # Modulating factor
        modulator = get_modulator_factor(modulation, ratio)
        g.message("   >> Modulating Factor: {m:.2f}".format(m=modulator))

        # weighting HPFi
        weighting = hpf_weight(msx_sd, hpf_sd, modulator, 1)

        #
        # 5. Adding weighted HPF image to upsampled Multi-Spectral band
        #

        g.message("\n|5 Adding weighted HPFi to upsampled image")
        fusion = '{hpf} = {msx} + {pan} * {wgt}'
        fusion = fusion.format(hpf=tmp_msx_hpf, msx=tmp_msx_blnr,
                               pan=tmp_pan_hpf, wgt=weighting)
        grass.mapcalc(fusion)

        # command history
        hst = 'Weigthing applied: {msd:.3f} / {hsd:.3f} * {mod:.3f}'
        cmd_history.append(hst.format(msd=msx_sd, hsd=hpf_sd, mod=modulator))

        if second_pass and ratio > 5.5:

            #
            # 4+ 2nd Pass Weighting the High Pass Filtered image
            #

            g.message("\n|4+ 2nd Pass Weighting the HPFi")

            # StdDev of HPF Image #2
            hpf_2_sd = stddev(tmp_pan_hpf_2)
            g.message("   >> StdDev of 2nd HPFi: {h:.3f}".format(h=hpf_2_sd))

            # Modulating factor #2
            modulator_2 = get_modulator_factor2(modulation2)
            msg = '   >> 2nd Pass Modulating Factor: {m:.2f}'
            g.message(msg.format(m=modulator_2))

            # 2nd Pass weighting
            weighting_2 = hpf_weight(msx_sd, hpf_2_sd, modulator_2, 2)

            #
            # 5+ Adding weighted HPF image to upsampled Multi-Spectral band
            #

            g.message("\n|5+ Adding small-kernel-based weighted 2nd HPFi "
                      "back to fused image")

            add_back = '{final} = {msx_hpf} + {pan_hpf} * {wgt}'
            add_back = add_back.format(final=tmp_msx_hpf, msx_hpf=tmp_msx_hpf,
                                       pan_hpf=tmp_pan_hpf_2, wgt=weighting_2)
            grass.mapcalc(add_back)

            # 2nd Pass history entry
            hst = "2nd Pass Weighting: {m:.3f} / {h:.3f} * {mod:.3f}"
            cmd_history.append(hst.format(m=msx_sd, h=hpf_2_sd, mod=modulator_2))

        if color_match:
            g.message("\n|* Matching output to input color table")
            run('r.colors', map=tmp_msx_hpf, raster=msx)

        #
        # 6. Stretching linearly the HPF-Sharpened image(s) to match the Mean
        #     and Standard Deviation of the input Multi-Sectral image(s)
        #

        if histogram_match:

            # adapt output StdDev and Mean to the input(ted) ones
            g.message("\n|+ Matching histogram of Pansharpened image "
                      "to %s" % (msx), flags='v')

            # Collect stats for linear histogram matching
            msx_hpf_avg = avg(tmp_msx_hpf)
            msx_hpf_sd = stddev(tmp_msx_hpf)

            # expression for mapcalc
            lhm = '{out} = ({hpf} - {hpfavg}) / {hpfsd} * {msxsd} + {msxavg}'
            lhm = lhm.format(out=tmp_msx_hpf, hpf=tmp_msx_hpf,
                             hpfavg=msx_hpf_avg, hpfsd=msx_hpf_sd,
                             msxsd=msx_sd, msxavg=msx_avg)

            # compute
            grass.mapcalc(lhm, quiet=True, overwrite=True)

            # update history string
            cmd_history.append("Linear Histogram Matching: %s" % lhm)

        #
        # Optional. Trim to remove black border effect (rectangular only)
        #

        if trimming_factor:

            tf = trimming_factor

            # communicate
            msg = '\n|* Trimming output image border pixels by '
            msg += '{factor} times the low resolution\n'.format(factor=tf)
            nsew = '   > Input extent: n: {n}, s: {s}, e: {e}, w: {w}'
            nsew = nsew.format(n=region.n, s=region.s, e=region.e, w=region.w)
            msg += nsew

            g.message(msg)

            # re-set borders
            region.n -= tf * images[msx].nsres
            region.s += tf * images[msx].nsres
            region.e -= tf * images[msx].ewres
            region.w += tf * images[msx].ewres

            # communicate and act
            msg = '   > Output extent: n: {n}, s: {s}, e: {e}, w: {w}'
            msg = msg.format(n=region.n, s=region.s, e=region.e, w=region.w)
            g.message(msg)

            # modify only the extent
            run('g.region',
                n=region.n, s=region.s, e=region.e, w=region.w)
            trim = "{out} = {input}".format(out=tmp_msx_hpf, input=tmp_msx_hpf)
            grass.mapcalc(trim)

        #
        # End of Algorithm

        # history entry
        run("r.support", map=tmp_msx_hpf, history="\n".join(cmd_history))

        # add suffix to basename & rename end product
        msx_name = "{base}.{suffix}"
        msx_name = msx_name.format(base=msx.split('@')[0], suffix=outputsuffix)
        run("g.rename", raster=(tmp_msx_hpf, msx_name))

        # remove temporary files
        cleanup()

    # visualising-related information
    grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Original Region restored")
    g.message("\n>>> Hint, rebalancing colors (via i.colors.enhance) "
              "may improve appearance of RGB composites!",
              flags='i')
Exemplo n.º 40
0
def main():
    stats = grass.read_command('r.stats',
                               input=options['map'],
                               sep='space',
                               nv='*',
                               nsteps='255',
                               flags='inc').split('\n')[:-1]
    res = grass.region()['nsres']
    zn = np.zeros((len(stats), 6), float)
    kl = np.zeros((len(stats), 2), float)
    prc = np.zeros((9, 2), float)

    for i in range(len(stats)):
        if i == 0:
            zn[i, 0], zn[i, 1] = list(map(float, stats[i].split(' ')))
            zn[i, 2] = zn[i, 1]
        else:
            zn[i, 0], zn[i, 1] = list(map(float, stats[i].split(' ')))
            zn[i, 2] = zn[i, 1] + zn[i - 1, 2]

    totcell = sum(zn[:, 1])
    print("Tot. cells", totcell)

    for i in range(len(stats)):
        zn[i, 3] = 1 - (zn[i, 2] / sum(zn[:, 1]))
        zn[i, 4] = zn[i, 3] * (((res**2) / 1000000) * sum(zn[:, 1]))
        zn[i,
           5] = ((zn[i, 0] - min(zn[:, 0])) / (max(zn[:, 0]) - min(zn[:, 0])))
        kl[i, 0] = zn[i, 0]
        kl[i, 1] = 1 - (zn[i, 2] / totcell)

    # quantiles
    prc[0, 0], prc[0, 1] = findint(kl, 0.025), 0.025
    prc[1, 0], prc[1, 1] = findint(kl, 0.05), 0.05
    prc[2, 0], prc[2, 1] = findint(kl, 0.1), 0.1
    prc[3, 0], prc[3, 1] = findint(kl, 0.25), 0.25
    prc[4, 0], prc[4, 1] = findint(kl, 0.5), 0.5
    prc[5, 0], prc[5, 1] = findint(kl, 0.75), 0.75
    prc[6, 0], prc[6, 1] = findint(kl, 0.9), 0.9
    prc[7, 0], prc[7, 1] = findint(kl, 0.95), 0.95
    prc[8, 0], prc[8, 1] = findint(kl, 0.975), 0.975

    # Managing flag & plot
    if flags['a']:
        plotImage(zn[:, 3], zn[:, 5], options['image'] + '_Hypsometric.png',
                  '-', 'A(i) / A', 'Z(i) / Zmax', 'Hypsometric Curve')
    if flags['b']:
        plotImage(zn[:, 4], zn[:, 0], options['image'] + '_Hypsographic.png',
                  '-', 'A [km^2]', 'Z [m.slm]', 'Hypsographic Curve')

    print("===========================")
    print("Hypsometric | quantiles")
    print("===========================")
    print('%.0f' % findint(kl, 0.025), "|", 0.025)
    print('%.0f' % findint(kl, 0.05), "|", 0.05)
    print('%.0f' % findint(kl, 0.1), "|", 0.1)
    print('%.0f' % findint(kl, 0.25), "|", 0.25)
    print('%.0f' % findint(kl, 0.5), "|", 0.5)
    print('%.0f' % findint(kl, 0.75), "|", 0.75)
    print('%.0f' % findint(kl, 0.9), "|", 0.9)
    print('%.0f' % findint(kl, 0.975), "|", 0.975)
    print('\n')
    print('Done!')
    r.out_gdal(input=mapi, output=outname, format='GTiff', overwrite=False)


# Placeholder to make these into future functions
def outputByType(self):
  pass




# Start in the lowest cell along the left-hand side of the experiment
# May want to make this a user-defined region in the future.
# And may want to define boundaries as closed or open -- for this,
# just r.patch a wall around everything except the end of the flume on the RHS.

reg = gscript.region()
g.region(w=reg['w']-2*reg['ewres'], e=reg['e']+2*reg['ewres'], s=reg['s']-2*reg['nsres'], n=reg['n']+2*reg['nsres'], save='with_boundaries', overwrite=True)
# CUSTOM COMMANDS HERE TO CREATE WALL BASED ON X AND Y POSITIONS
# THIS SHOULD ALSO BE PRE-DEFINED WHEN THIS IS FINISHED
# Keep right boundary open
mcstr = "boundaries = (x < "+str(margin_left/1000.)+") + "+ \
                     "(y < "+str(margin_bottom/1000.)+") + "+ \
                     "(y > "+str(margin_top/1000.)+")"
r.mapcalc(mcstr, overwrite=True)
r.mapcalc("boundaries = boundaries > 0", overwrite=True) # Logical 0/1
r.null(map='boundaries', setnull=0)

_x = garray.array()
_x.read('x')
_y = garray.array()
_y.read('y')
Exemplo n.º 42
0
def main():
    #temporary map names
    global tm, t
    tm = {}
    t = True
    #read user inputs
    dtm = options['elevation']
    lake = options['lake']
    rho = float(options['s_rho'])
    por = float(options["s_por"])
    vol = float(options['s_vol'])
    width = float(options['s_width'])
    thick = float(options['s_thick'])
    east = float(options['i_east'])
    north = float(options['i_north'])
    idepth = float(options['i_depth'])
    vel = float(options['i_vel'])
    slope = float(options['i_slope'])
    azi = float(options['i_azimut'])
    wbc = float(options['wbc'])
    shadow = options['shadow']
    outI = options["inund"]
    outH = options["wave"]
    quiet = flags["g"]

    #print "outH=%s" %(options)

    #check if output map exist
    mapset = grass.gisenv()['MAPSET']
    if not grass.overwrite():
        if grass.find_file(outI, element='cell', mapset=mapset)['file']:
            grass.fatal(_("Raster map <%s> already exists.") % outI)
        if grass.find_file(outH, element='cell', mapset=mapset)['file']:
            grass.fatal(_("Raster map <%s> already exists.") % outH)

    # initialize costants & env variables
    rho_w = 1000
    g = 9.80665
    region = grass.region()

    # prepare temporary map raster names
    processid = "%.7f" % time.time()
    tm["distance"] = "distance_" + processid
    tm["impact"] = "impact_" + processid
    tm["field"] = "field_" + processid
    tm["azimut"] = "azimut_" + processid
    tm["gamma"] = "gamma_" + processid
    tm["H"] = "H_" + processid
    tm["T"] = "T_" + processid
    tm["L"] = "L_" + processid
    #tm["dzw"] = "dzw_"+ processid
    tm["zones"] = "zones_" + processid
    tm["zoneSUM"] = "zoneSUM_" + processid
    tm["runup"] = "runup_" + processid
    tm["ruline"] = "ruline_" + processid
    tm["slope"] = "slope_" + processid
    #tm["ru_mask"] = "ru_mask_"+ processid
    #tm["ru_grow"] = "ru_grow_"+ processid
    tm["wave_elev"] = "wave_elev_" + processid
    tm["elev"] = "elev_" + processid
    #tm["inond"] = "inond_"+ processid

    #check temporary map names are not existing maps
    for key, value in tm.items():
        if grass.find_file(value, element='cell', mapset=mapset)['file']:
            grass.fatal(_("Temporary raster map <%s> already exists.") % value)

    #step 0 - Calculate water height at impact coordinates
    #============================================================================================
    if not flags["g"]:
        grass.message("step 0")
    i_hw = grass.read_command("r.what",
                              flags="f",
                              input=lake,
                              east_north=[(east, north)]).split("|")[-2]
    if i_hw == "*":
        grass.fatal(
            _("Coordinates <%s,%s> are not on the lake.") % (east, north))
    else:
        #i_hw = float(i_hw)
        i_hw = idepth

    #step 1 - Estimate impulse wave product parameter P (Heller, 2007; Heller and Hager, 2009)
    #==========================================================================================
    if not flags["g"]:
        grass.message("step 1")
    M = (rho * vol) / (rho_w * width * math.pow(i_hw, 2))
    S = thick / i_hw
    F = vel / math.sqrt(g * i_hw)
    P = F * math.sqrt(S) * math.pow(M, 0.25) * math.sqrt(
        math.cos(6.0 / 7.0 * math.radians(slope)))
    if not flags["g"]:
        grass.message("M=%s - S=%s - F=%s - P=%s" % (M, S, F, P))

    #step 2 - Estimate maximum wave informations (Heller, 2007)
    #  [Hm = height, Tm=period, a=amplitude, c=celerity, Lm=length, rm=distance from impact]
    #============================================================================================
    if not flags["g"]:
        grass.message("step 2")
    Hm = (5.0 / 9.0) * math.pow(P, 4.0 / 5.0) * i_hw
    Tm = 9 * math.sqrt(P) * math.sqrt(i_hw / g)
    a = 4.0 / 5.0 * Hm
    c = math.sqrt(g * (i_hw + a))
    Lm = Tm * c
    rm = (11.0 / 2.0) * math.sqrt(P) * i_hw
    #grass.message( "%s * %s * %s" %((5.0/9.0),math.pow(P,4.0/5.0),i_hw) )
    #grass.message("Hm=%s - Tm=%s - a=%s - c=%s - Lm=%s - rm=%s" %(Hm,Tm,a,c,Lm,rm))

    #step 3 - Estimate near field [0] (x<rm) and far field (x>rm) [1]
    #============================================================================================
    #calculate impact map
    if not flags["g"]:
        grass.message("step 3")
    i_col = math.floor((east - region["w"]) / region["ewres"]) + 1
    i_row = math.floor((region["n"] - north) / region["nsres"]) + 1
    grass.mapcalc(" $impact = if( col()==$icol && row()==$irow, 1, null() )",
                  impact=tm["impact"],
                  icol=i_col,
                  irow=i_row,
                  quiet=quiet)
    #grass.message("col=%s - row=%s east=%s north=%s" %(i_col,i_row,east,north))
    #calculate distance map
    grass.run_command("r.grow.distance",
                      input=tm["impact"],
                      distance=tm["distance"],
                      quiet=quiet)
    #calculate far field (=1) and near field (=0)
    grass.mapcalc(" $field = if( $distance>$rm,1,0)",
                  field=tm["field"],
                  distance=tm["distance"],
                  rm=rm,
                  quiet=quiet)

    #step 4 - Calculate wave height in the far field
    #============================================================================================
    #calculate azimut map
    if not flags["g"]:
        grass.message("step 4")
    grass.mapcalc(""" $azimut = \
                        if( x()-$east==0 && y()-$north==0,null(), \
                          if( x()-$east==0 && y()-$north>0, 90, \
                            if( x()-$east==0 && y()-$north<0, 270, \
                              if( x()-$east>0 && y()-$north>0, atan(      (x()-$east) / (y()-$north) ), \
                                if( x()-$east<0 && y()-$north>0, atan(    (x()-$east) / (y()-$north) ) +360, \
                                  if( x()-$east>0 && y()-$north<0, atan(  (x()-$east) / (y()-$north) ) +180, \
                                    if( x()-$east<0 && y()-$north<0, atan((x()-$east) / (y()-$north) ) +180,-1 ) \
                                  ) \
                                ) \
                              ) \
                            ) \
                          ) \
                        ) """,
                  azimut=tm["azimut"],
                  east=east,
                  north=north,
                  quiet=quiet)
    #calculate direction map
    grass.mapcalc(" $gamma = $azimut - $impact_azimut",
                  gamma=tm["gamma"],
                  azimut=tm["azimut"],
                  impact_azimut=azi,
                  quiet=quiet)

    #calculate wave propagation in the far field (H=height, T=period, L=length), in the near field values are Hm,Tm,Lm
    grass.mapcalc(
        " $H = if($field==1,(3.0/2.0) * pow($P,(4.0/5.0)) * pow(cos(2.0/3.0*$gamma),2) * pow(($distance/$ihw),-2.0/3.0) * $ihw ,$Hm)",
        H=tm["H"],
        P=P,
        gamma=tm["gamma"],
        distance=tm["distance"],
        ihw=i_hw,
        Hm=Hm,
        field=tm["field"],
        quiet=quiet)

    #accounting for solid mass type
    if flags["s"]:
        rf = (a / (0.26 * F)) / a
        print "rf=%s" % rf
        grass.mapcalc(" $H = if($lake>0, $H * $rf, $H)",
                      H=tm["H"],
                      field=tm["field"],
                      rf=rf,
                      lake=lake,
                      overwrite=True,
                      quiet=quiet)

    #accounting for shoaling effect
    grass.mapcalc(
        " $H = if($lake>0 && $field==1, $H * pow(($ihw/$lake),(1.0/4.0)), $H)",
        H=tm["H"],
        field=tm["field"],
        ihw=i_hw,
        lake=lake,
        overwrite=True,
        quiet=quiet)

    grass.mapcalc(
        " $T = if($lake>0 && $field==1, 15 * pow($H/$ihw,1.0/4.0) * sqrt($ihw/$g), $Tm)",
        T=tm["T"],
        H=tm["H"],
        ihw=i_hw,
        g=g,
        Tm=Tm,
        lake=lake,
        field=tm["field"],
        quiet=quiet)

    grass.mapcalc(" $L = if($lake>0 && $field==1, $T * $c, $Lm)",
                  L=tm["L"],
                  T=tm["T"],
                  c=c,
                  Lm=Lm,
                  lake=lake,
                  field=tm["field"],
                  quiet=quiet)

    if outH is not "":
        grass.run_command("g.copy",
                          rast="%s,%s" % (tm["H"], outH),
                          overwrite=True)

    #step 5 - Calculate run-up
    #============================================================================================
    if not flags["g"]:
        grass.message("step 5")

    #claculate runup zones:
    #   - 1 = wave propagation zone
    #   - 100 = runup propagation zoone
    #   - 10000 = coast terrain
    #   - 0 = wave generation (near field)
    #   N.B.: wbc = H/h, wave break condition
    #wbc = 0.8 --> following Coast Engineering Manual
    #wbc = 0.521 -- follwoing equations limits
    grass.mapcalc(
        "$zones = if(isnull($lake),10000,if($field==0,0,if($H/$lake>$wbc,100,1)))",
        zones=tm["zones"],
        lake=lake,
        H=tm["H"],
        field=tm["field"],
        wbc=wbc,
        quiet=quiet)

    #calculate runup line (2=runup line, 1=coast line affected by runup, 3=coast line not affected by runup)
    grass.run_command("r.neighbors",
                      input=tm["zones"],
                      output=tm["zoneSUM"],
                      method="sum",
                      size=3,
                      quiet=quiet)
    grass.mapcalc("""$ruline = if($ZO==1 && $ZOS>10000 && $ZOS<80000,1, \
                                    if($ZO==1 && $ZOS>100 && $ZOS<900,2, \
                                        if($ZO==100 && $ZOS>10000 && $ZOS<80000,3,0) \
                                      ) \
                                  )""",
                  ruline=tm["ruline"],
                  ZO=tm["zones"],
                  ZOS=tm["zoneSUM"],
                  quiet=quiet)

    #calculate runup maximum height at runup line
    grass.run_command("r.slope.aspect",
                      elevation=dtm,
                      slope=tm["slope"],
                      quiet=quiet)
    grass.mapcalc(
        """$runup = if($ruline==2, 1.25*pow($H/$h,5/4)*pow($H/$L,-3/20)*pow(90/$slope,1/5)*$h, \
                                    if($ruline==1, $H,0) \
                                )""",
        runup=tm["runup"],
        ruline=tm["ruline"],
        H=tm["H"],
        h=lake,
        L=tm["L"],
        slope=tm["slope"],
        overwrite=True,
        quiet=quiet)

    #calculate elevation surface (dtm + water level)
    grass.mapcalc("$elev = if(isnull($lake),$dtm,$lake+$dtm)",
                  elev=tm["elev"],
                  lake=lake,
                  dtm=dtm,
                  overwrite=True,
                  quiet=quiet)

    #calculate wave elevation in cm
    grass.mapcalc("$wave_elev = if($runup!=0, float($runup+$elev)*100,0.0)",
                  wave_elev=tm["wave_elev"],
                  runup=tm["runup"],
                  elev=tm["elev"],
                  overwrite=True,
                  quiet=quiet)

    tm["outA"] = outI + "A_" + processid
    tm["outB"] = outI + "B_" + processid
    grass.run_command("r.surf.idw",
                      input=tm["wave_elev"],
                      output=tm["outA"],
                      npoints=1,
                      quiet=quiet)

    #calculate inundation in m
    grass.mapcalc(
        "$inondB = if($zones==10000 && $elev<$inondA/100.0, ($inondA/100.0)-$elev,null())",
        inondB=tm["outB"],
        inondA=tm["outA"],
        elev=tm["elev"],
        zones=tm["zones"],
        overwrite=True,
        quiet=quiet)
    grass.run_command("r.neighbors",
                      input=tm["outB"],
                      output=outI,
                      selection=tm["outB"],
                      overwrite=True,
                      quiet=quiet)

    #mask output inundation
    if options['shadow'] is not '':
        grass.mapcalc("$inond=if($shadow,$inond,null())",
                      inond=outI,
                      shadow=shadow,
                      overwrite=True,
                      quiet=quiet)
    if flags['a']:
        grass.mapcalc("$inond=if($gamma>-90 && $gamma<90,$inond,null())",
                      inond=outI,
                      gamma=tm["gamma"],
                      overwrite=True,
                      quiet=quiet)

    #GENERATE A REPORTS
    report = ""
    if not flags["g"]:
        grass.message("step 6 - generating reporting")
    if flags['w']:
        report += "=====================================================\n"
        report += "=============    MAXIMUM WAVE HEIGHT    =============\n"
        report += "=====================================================\n"
        report += "Hm=%s\n" % Hm
        report += "Tm=%s\n" % Tm
        report += "Lm=%s\n" % Lm
        report += "a=%s\n" % a
        report += "c=%s\n" % c
        report += "rm=%s\n" % rm
    if flags['c']:
        report += "=====================================================\n"
        report += "==========    LIMITING CONDITIONS REPORT    =========\n"
        report += "=====================================================\n"
        #slide froude number
        report += "Slide Froude number limitation (0.86<= F <= 6.83):"
        if F >= 0.86 and F <= 6.83:
            report += " value=%s respected=%s\n" % (F, True)
        else:
            report += " value=%s respected=%s\n" % (F, False)
        #Relative slide thickness
        report += "Relative slide thickness (0.09<= S <= 1.64):"
        if S >= 0.09 and S <= 1.64:
            report += " value=%s respected=%s\n" % (S, True)
        else:
            report += " value=%s respected=%s\n" % (S, False)
        #Relative slide mass
        report += "Relative slide mass (0.11<= M <= 10.02):"
        if S >= 0.11 and S <= 10.02:
            report += " value=%s respected=%s\n" % (M, True)
        else:
            report += " value=%s respected=%s\n" % (M, False)
        #Relative slide density
        report += "Relative slide density (0.59<= D <= 1.72):"
        D = (rho / 1000)
        if D >= 0.59 and D <= 1.72:
            report += " value=%s respected=%s\n" % (D, True)
        else:
            report += " value=%s respected=%s\n" % (D, False)
        #Relative granulate density
        report += "Relative granulate density (0.96<= Dg <= 2.75):"
        Dg = (1 - 0.01 * por) / 1000
        if Dg >= 0.96 and Dg <= 2.75:
            report += " value=%s respected=%s\n" % (Dg, True)
        else:
            report += " value=%s respected=%s\n" % (Dg, False)
        #Relative slide volume
        report += "Relative slide volume (0.05<= V <= 5.94):"
        V = vol / (width * thick * thick)
        if V >= 0.05 and V <= 5.94:
            report += " value=%s respected=%s\n" % (V, True)
        else:
            report += " value=%s respected=%s\n" % (V, False)
        #Relative slide width
        report += "Relative slide width (0.74<= B <= 3.33):"
        B = width / thick
        if B >= 0.74 and B <= 3.33:
            report += " value=%s respected=%s\n" % (B, True)
        else:
            report += " value=%s respected=%s\n" % (B, False)
        #Bulk slide porosity
        report += "Bulk slide porosity (30.7<= por <= 43.3):"
        if por >= 30.7 and por <= 43.3:
            report += " value=%s respected=%s\n" % (por, True)
        else:
            report += " value=%s respected=%s\n" % (por, False)
        #Slide impact angle
        report += "Slide impact angle (30deg<= beta <= 90deg):"
        if slope >= 30 and slope <= 90:
            report += " value=%s respected=%s\n" % (slope, True)
        else:
            report += " value=%s respected=%s\n" % (slope, False)
        #Impulse product parameter
        report += "Impulse product parameter (0.17<= P <= 8.13):"
        if P >= 0.17 and P <= 8.13:
            report += " value=%s respected=%s\n" % (P, True)
        else:
            report += " value=%s respected=%s\n" % (P, False)
        """
        #temporary maps for reporting
        #tm["rrd"]="rrd_"+processid
        #tm["rrdB"]="rrdB_"+processid
        #tm["rws"]="rws_"+processid
        #tm["rwsB"]="rwsB_"+processid
        #Relative radial distance
        report += "Relative radial distance (5<= r/h <= 30):"
        if options['shadow'] is not '':
            grass.mapcalc("$rrd=if(!isnull($lake) && $shadow,$distance/$H,null())", rrd=tm["rrd"], lake=lake, shadow=shadow , distance=tm["distance"], H=tm["H"], quiet=quiet)
        elif flags['a']:
            grass.mapcalc("$rrd=if(!isnull($lake) && ($gamma>-90 || $gamma<90),$distance/$H,null())", rrd=tm["rrd"], lake=lake, gamma=tm["gamma"], distance=tm["distance"], H=tm["H"], quiet=quiet)
        grass.mapcalc("$rrdB=if($rrd<=30 && $rrd>=5,1,0)", rrdB=tm["rrdB"], rrd=tm["rrd"])
        ret = grass.read_command("r.univar",map=tm["rrdB"],flags="g")
        info = {}
        for l in ret.split("\n"):
            s = l.split("=")
            if len(s)==2:
                info[s[0]]=float(s[1])
        report += "value=%s respected=%s%%\n" %(info["mean"], info["sum"]*100 / (info["cells"]-info["null_cells"]))
        #Relative wave stepness
        report += "Relative wave stepness"
        if options['shadow'] is not '':
            grass.mapcalc("$rws=if($ruline==2 && $shadow,$H/$L,null())", rws=tm["rws"], ruline=tm["ruline"], shadow=shadow , H=tm["H"], L=tm["L"])
        elif flags['a']:
            grass.mapcalc("$rws=if($ruline==2 && ($gamma>-90 || $gamma<90),$H/$L,null())", rws=tm["rws"], ruline=tm["ruline"], gamma=tm["gamma"], H=tm["H"], L=tm["L"])
        grass.mapcalc("$rwsB=if($rws<=30 && $rws>=5,1,0)", rwsB=tm["rwsB"], rws=tm["rws"])
        ret = grass.read_command("r.univar",map=tm["rwsB"],flags="g")
        info = {}
        for l in ret.split("\n"):
            s = l.split("=")
            if len(s)==2:
                info[s[0]]=float(s[1])
        report += "value=%s respected=%s%%\n" %(info["mean"], info["sum"]*100 / (info["cells"]-info["null_cells"]))
        """
    if flags["c"] or flags["w"]:
        report += "=====================================================\n"
        print report
        """
        report = "Relative radial distance (5<= r/h <= 30):"
        B = tm["gamma"]/lake
        if slope>= 0.74  && slope<= 3.33:
            report += " value=%s respected=%s\n" %(B,True)
        else:
            report += " value=%s respected=%s\n" %(B,False)
        """

    if not flags["t"]:
        grass.run_command("g.remove",
                          rast=",".join([tm[m] for m in tm.keys()]),
                          quiet=True)
    t = False
def refine_region(factor=3):
    gs.run_command('g.region',
        rows=gs.region()['rows']*factor,
        cols=gs.region()['cols']*factor)
Exemplo n.º 44
0
def main():
    global TMPLOC, SRCGISRC, GISDBASE, TMP_REG_NAME

    GDALdatasource = options['input']
    output = options['output']
    method = options['resample']
    memory = options['memory']
    bands = options['band']
    tgtres = options['resolution']
    title = options["title"]
    if options['resolution_value']:
        if tgtres != 'value':
            grass.fatal(
                _("To set custom resolution value, select 'value' in resolution option"
                  ))
        tgtres_value = float(options['resolution_value'])
        if tgtres_value <= 0:
            grass.fatal(_("Resolution value can't be smaller than 0"))
    elif tgtres == 'value':
        grass.fatal(
            _("Please provide the resolution for the imported dataset or change to 'estimated' resolution"
              ))

    grassenv = grass.gisenv()
    tgtloc = grassenv['LOCATION_NAME']
    tgtmapset = grassenv['MAPSET']
    GISDBASE = grassenv['GISDBASE']
    tgtgisrc = os.environ['GISRC']
    SRCGISRC = grass.tempfile()

    TMPLOC = 'temp_import_location_' + str(os.getpid())

    f = open(SRCGISRC, 'w')
    f.write('MAPSET: PERMANENT\n')
    f.write('GISDBASE: %s\n' % GISDBASE)
    f.write('LOCATION_NAME: %s\n' % TMPLOC)
    f.write('GUI: text\n')
    f.close()

    tgtsrs = grass.read_command('g.proj', flags='j', quiet=True)

    # create temp location from input without import
    grass.verbose(
        _("Creating temporary location for <%s>...") % GDALdatasource)
    parameters = dict(input=GDALdatasource,
                      output=output,
                      memory=memory,
                      flags='c',
                      title=title,
                      location=TMPLOC,
                      quiet=True)
    if bands:
        parameters['band'] = bands
    try:
        grass.run_command('r.in.gdal', **parameters)
    except CalledModuleError:
        grass.fatal(_("Unable to read GDAL dataset <%s>") % GDALdatasource)

    # switch to temp location
    os.environ['GISRC'] = str(SRCGISRC)

    # switch to target location
    os.environ['GISRC'] = str(tgtgisrc)

    # try r.in.gdal directly first
    additional_flags = 'l' if flags['l'] else ''
    if flags['o']:
        additional_flags += 'o'
    if flags['o'] or grass.run_command('r.in.gdal',
                                       input=GDALdatasource,
                                       flags='j',
                                       errors='status',
                                       quiet=True) == 0:
        parameters = dict(input=GDALdatasource,
                          output=output,
                          memory=memory,
                          flags='k' + additional_flags)
        if bands:
            parameters['band'] = bands
        try:
            grass.run_command('r.in.gdal', **parameters)
            grass.verbose(
                _("Input <%s> successfully imported without reprojection") %
                GDALdatasource)
            return 0
        except CalledModuleError as e:
            grass.fatal(
                _("Unable to import GDAL dataset <%s>") % GDALdatasource)

    # make sure target is not xy
    if grass.parse_command('g.proj',
                           flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for current location <%s>"
              ) % tgtloc)

    # switch to temp location
    os.environ['GISRC'] = str(SRCGISRC)

    # make sure input is not xy
    if grass.parse_command('g.proj',
                           flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for input <%s>") %
            GDALdatasource)

    # import into temp location
    grass.verbose(
        _("Importing <%s> to temporary location...") % GDALdatasource)
    parameters = dict(input=GDALdatasource,
                      output=output,
                      memory=memory,
                      flags='k' + additional_flags)
    if bands:
        parameters['band'] = bands
    try:
        grass.run_command('r.in.gdal', **parameters)
    except CalledModuleError:
        grass.fatal(_("Unable to import GDAL dataset <%s>") % GDALdatasource)

    outfiles = grass.list_grouped('raster')['PERMANENT']

    # is output a group?
    group = False
    path = os.path.join(GISDBASE, TMPLOC, 'group', output)
    if os.path.exists(path):
        group = True
        path = os.path.join(GISDBASE, TMPLOC, 'group', output, 'POINTS')
        if os.path.exists(path):
            grass.fatal(_("Input contains GCPs, rectification is required"))

    # switch to target location
    os.environ['GISRC'] = str(tgtgisrc)

    region = grass.region()

    rflags = None
    if flags['n']:
        rflags = 'n'

    for outfile in outfiles:

        n = region['n']
        s = region['s']
        e = region['e']
        w = region['w']

        grass.use_temp_region()

        if options['extent'] == 'input':
            # r.proj -g
            try:
                tgtextents = grass.read_command('r.proj',
                                                location=TMPLOC,
                                                mapset='PERMANENT',
                                                input=outfile,
                                                flags='g',
                                                memory=memory,
                                                quiet=True)
            except CalledModuleError:
                grass.fatal(_("Unable to get reprojected map extent"))
            try:
                srcregion = grass.parse_key_val(tgtextents,
                                                val_type=float,
                                                vsep=' ')
                n = srcregion['n']
                s = srcregion['s']
                e = srcregion['e']
                w = srcregion['w']
            except ValueError:  # import into latlong, expect 53:39:06.894826N
                srcregion = grass.parse_key_val(tgtextents, vsep=' ')
                n = grass.float_or_dms(srcregion['n'][:-1]) * \
                    (-1 if srcregion['n'][-1] == 'S' else 1)
                s = grass.float_or_dms(srcregion['s'][:-1]) * \
                    (-1 if srcregion['s'][-1] == 'S' else 1)
                e = grass.float_or_dms(srcregion['e'][:-1]) * \
                    (-1 if srcregion['e'][-1] == 'W' else 1)
                w = grass.float_or_dms(srcregion['w'][:-1]) * \
                    (-1 if srcregion['w'][-1] == 'W' else 1)

            grass.run_command('g.region', n=n, s=s, e=e, w=w)

        # v.in.region in tgt
        vreg = TMP_REG_NAME = 'vreg_tmp_' + str(os.getpid())
        grass.run_command('v.in.region', output=vreg, quiet=True)

        grass.del_temp_region()

        # reproject to src
        # switch to temp location
        os.environ['GISRC'] = str(SRCGISRC)
        try:
            grass.run_command('v.proj',
                              input=vreg,
                              output=vreg,
                              location=tgtloc,
                              mapset=tgtmapset,
                              quiet=True)
        except CalledModuleError:
            grass.fatal(_("Unable to reproject to source location"))

        # set region from region vector
        grass.run_command('g.region', raster=outfile)
        grass.run_command('g.region', vector=vreg)
        # align to first band
        grass.run_command('g.region', align=outfile)
        # get number of cells
        cells = grass.region()['cells']

        estres = math.sqrt((n - s) * (e - w) / cells)
        # remove from source location for multi bands import
        grass.run_command('g.remove',
                          type='vector',
                          name=vreg,
                          flags='f',
                          quiet=True)

        os.environ['GISRC'] = str(tgtgisrc)
        grass.run_command('g.remove',
                          type='vector',
                          name=vreg,
                          flags='f',
                          quiet=True)

        grass.message(
            _("Estimated target resolution for input band <{out}>: {res}").
            format(out=outfile, res=estres))
        if flags['e']:
            continue

        if options['extent'] == 'input':
            grass.use_temp_region()
            grass.run_command('g.region', n=n, s=s, e=e, w=w)

        res = None
        if tgtres == 'estimated':
            res = estres
        elif tgtres == 'value':
            res = tgtres_value
            grass.message(
                _("Using given resolution for input band <{out}>: {res}").
                format(out=outfile, res=res))
            # align to requested resolution
            grass.run_command('g.region', res=res, flags='a')
        else:
            curr_reg = grass.region()
            grass.message(
                _("Using current region resolution for input band "
                  "<{out}>: nsres={ns}, ewres={ew}").format(
                      out=outfile, ns=curr_reg['nsres'], ew=curr_reg['ewres']))

        # r.proj
        grass.message(_("Reprojecting <%s>...") % outfile)
        try:
            grass.run_command('r.proj',
                              location=TMPLOC,
                              mapset='PERMANENT',
                              input=outfile,
                              method=method,
                              resolution=res,
                              memory=memory,
                              flags=rflags,
                              quiet=True)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % outfile)

        if grass.raster_info(outfile)['min'] is None:
            grass.fatal(_("The reprojected raster <%s> is empty") % outfile)

        if options['extent'] == 'input':
            grass.del_temp_region()

    if flags['e']:
        return 0

    if group:
        grass.run_command('i.group', group=output, input=','.join(outfiles))

    # TODO: write metadata with r.support

    return 0
Exemplo n.º 45
0
 def _get_psmerc_region_resolution(self):
     """Get region resolution (average ns and ew) of psmerc mapset"""
     reg = gs.region(env=self._tgt_env)
     return (reg["nsres"] + reg["ewres"]) / 2
Exemplo n.º 46
0
def main():
    stats = grass.read_command("r.stats",
                               input=options["map"],
                               sep="space",
                               nv="*",
                               nsteps="255",
                               flags="Anc").split("\n")[:-1]

    # res = cellsize
    res = grass.region()["nsres"]

    zn = np.zeros((len(stats), 4), float)
    kl = np.zeros((len(stats), 2), float)
    prc = np.zeros((9, 2), float)

    for i in range(len(stats)):
        if i == 0:
            zn[i, 0], zn[i, 1] = map(float, stats[i].split(" "))
            zn[i, 1] = zn[i, 1]
            zn[i, 2] = zn[i, 1] * res
        if i != 0:
            zn[i, 0], zn[i, 1] = map(float, stats[i].split(" "))
            zn[i, 2] = zn[i, 1] + zn[i - 1, 2]
            zn[i, 3] = zn[i, 1] * (res**2)

    totcell = sum(zn[:, 1])
    print("Tot. cells %s" % (totcell))
    totarea = totcell * (res**2)
    print("Tot. area %s" % (totarea))
    maxdist = max(zn[:, 0])
    print("Max distance %s" % (maxdist))

    for i in range(len(stats)):
        kl[i, 0] = zn[i, 0]
        kl[i, 1] = zn[i, 2] / totcell

    # quantiles
    prc[0, 0], prc[0, 1] = findint(kl, 0.05), 0.05
    prc[1, 0], prc[1, 1] = findint(kl, 0.15), 0.15
    prc[2, 0], prc[2, 1] = findint(kl, 0.3), 0.3
    prc[3, 0], prc[3, 1] = findint(kl, 0.4), 0.4
    prc[4, 0], prc[4, 1] = findint(kl, 0.5), 0.5
    prc[5, 0], prc[5, 1] = findint(kl, 0.6), 0.6
    prc[6, 0], prc[6, 1] = findint(kl, 0.7), 0.7
    prc[7, 0], prc[7, 1] = findint(kl, 0.85), 0.85
    prc[8, 0], prc[8, 1] = findint(kl, 0.95), 0.95

    # plot
    plotImage(
        zn[:, 0],
        zn[:, 3],
        options["image"] + "_width_function.png",
        "-",
        "x",
        "W(x)",
        "Width Function",
    )

    print("===========================")
    print("Width Function | quantiles")
    print("===========================")
    print("%.0f | %s" % (findint(kl, 0.05), 0.05))
    print("%.0f | %s" % (findint(kl, 0.15), 0.15))
    print("%.0f | %s" % (findint(kl, 0.3), 0.3))
    print("%.0f | %s" % (findint(kl, 0.4), 0.4))
    print("%.0f | %s" % (findint(kl, 0.5), 0.5))
    print("%.0f | %s" % (findint(kl, 0.6), 0.6))
    print("%.0f | %s" % (findint(kl, 0.7), 0.7))
    print("%.0f | %s" % (findint(kl, 0.85), 0.85))
    print("%.0f | %s" % (findint(kl, 0.95), 0.95))
    print("\n")
    print("Done!")
Exemplo n.º 47
0
    def _initializeParameters(self, options, flags):
        self._debug("_initialize_parameters", "started")
        
        # inicialization of module parameters (options, flags)
        self.flags = flags 
        if self.flags['t']:
            self.transparent = 'TRUE'
        else:
            self.transparent = 'FALSE'   
        
        self.o_mapserver_url = options['mapserver'].strip() + "?" 
        self.o_layers = options['layers'].strip()
        self.o_styles = options['styles'].strip()
        self.o_output = options['output']
        self.o_method = options['method']
        
        self.o_bgcolor = options['bgcolor'].strip()
        if self.o_bgcolor != "" and not flags["d"]:
            grass.warning(_("Parameter bgcolor ignored, use -d flag"))
        
        self.o_urlparams = options['urlparams'].strip()
        if self.o_urlparams != "" and not flags["d"]:
            grass.warning(_("Parameter urlparams ignored, use -d flag"))
        
        self.o_wms_version = options['wms_version']        
        if self.o_wms_version == "1.3.0":
            self.projection_name = "CRS"
        else:
            self.projection_name = "SRS" 
        
        self.o_format = options['format']
        if self.o_format == "geotiff":
            self.mime_format = "image/geotiff"
        elif self.o_format == "tiff":
            self.mime_format = "image/tiff"
        elif self.o_format == "png":
            self.mime_format = "image/png"
        elif self.o_format == "jpeg":
            self.mime_format = "image/jpeg"
            if flags['t']:
                grass.warning(_("JPEG format does not support transparency"))
        elif self.o_format == "gif":
            self.mime_format = "image/gif"
        else:
            grass.fatal(_("Unsupported image format %s") % self.o_format)
        
        self.o_srs = int(options['srs'])
        if self.o_srs <= 0:
            grass.fatal(_("Invalid EPSG code %d") % self.o_srs)
        
        # read projection info
        self.proj_location = grass.read_command('g.proj', 
                                                flags ='jf').rstrip('\n')
        
        self.proj_srs = grass.read_command('g.proj', 
                                           flags = 'jf', 
                                           epsg = str(self.o_srs) ).rstrip('\n')
        
        if not self.proj_srs or not self.proj_location:
            grass.fatal(_("Unable to get projection info"))
        
        # set region 
        self.o_region = options['region']
	if self.o_region:                 
            if not grass.find_file(name = self.o_region, element = 'windows', mapset = '.' )['name']:
                grass.fatal(_("Region <%s> not found") % self.o_region)
        
        if self.o_region:
            s = grass.read_command('g.region',
                                   quiet = True,
                                   flags = 'ug',
                                   region = self.o_region)
            self.region = grass.parse_key_val(s, val_type = float)
        else:
            self.region = grass.region()
        
        min_tile_size = 100
        self.o_maxcols = int(options['maxcols'])
        if self.o_maxcols <= min_tile_size:
            grass.fatal(_("Maxcols must be greater than 100"))
        
        self.o_maxrows = int(options['maxrows'])
        if self.o_maxrows <= min_tile_size:
            grass.fatal(_("Maxrows must be greater than 100"))
        
        # setting optimal tile size according to maxcols and maxrows constraint and region cols and rows      
        self.tile_cols = int(self.region['cols'] / ceil(self.region['cols'] / float(self.o_maxcols)))
        self.tile_rows = int(self.region['rows'] / ceil(self.region['rows'] / float(self.o_maxrows)))
        
        # suffix for existing mask (during overriding will be saved
        # into raster named:self.o_output + this suffix)
        self.original_mask_suffix = "_temp_MASK"
        
        # check names of temporary rasters, which module may create 
        maps = []
        for suffix in ('.red', '.green', '.blue', '.alpha', self.original_mask_suffix ):
            rast = self.o_output + suffix
            if grass.find_file(rast, element = 'cell', mapset = '.')['file']:
                maps.append(rast)
        
        if len(maps) != 0:
            grass.fatal(_("Please change output name, or change names of these rasters: %s, "
                          "module needs to create this temporary maps during runing") % ",".join(maps))
        
        # default format for GDAL library
        self.gdal_drv_format = "GTiff"
        
        self._debug("_initialize_parameters", "finished")
Exemplo n.º 48
0
def main():

    pan = options['pan']
    msxlst = options['msx'].split(',')
    outputsuffix = options['suffix']
    custom_ratio = options['ratio']
    center = options['center']
    center2 = options['center2']
    modulation = options['modulation']
    modulation2 = options['modulation2']

    if options['trim']:
        trimming_factor = float(options['trim'])
    else:
        trimming_factor = False

    histogram_match = flags['l']
    second_pass = flags['2']
    color_match = flags['c']

    #    # Check & warn user about "ns == ew" resolution of current region ======
    #    region = grass.region()
    #    nsr = region['nsres']
    #    ewr = region['ewres']
    #
    #    if nsr != ewr:
    #        msg = ('>>> Region's North:South ({ns}) and East:West ({ew}) '
    #               'resolutions do not match!')
    #        msg = msg.format(ns=nsr, ew=ewr)
    #        g.message(msg, flags='w')

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?
    region = grass.region()  # and region settings

    # List images and their properties

    imglst = [pan]
    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in imglst:  # Retrieving Image Info
        images[img] = Info(img, mapset)
        images[img].read()

    panres = images[pan].nsres  # Panchromatic resolution

    grass.use_temp_region()  # to safely modify the region
    run('g.region', res=panres)  # Respect extent, change resolution
    g.message("|! Region's resolution matched to Pan's ({p})".format(p=panres))

    # Loop Algorithm over Multi-Spectral images

    for msx in msxlst:
        g.message("\nProcessing image: {m}".format(m=msx))

        # Tracking command history -- Why don't do this all r.* modules?
        cmd_history = []

        #
        # 1. Compute Ratio
        #

        g.message("\n|1 Determining ratio of low to high resolution")

        # Custom Ratio? Skip standard computation method.
        if custom_ratio:
            ratio = float(custom_ratio)
            g.message('Using custom ratio, overriding standard method!',
                      flags='w')

        # Multi-Spectral resolution(s), multiple
        else:
            # Image resolutions
            g.message("   > Retrieving image resolutions")

            msxres = images[msx].nsres

            # check
            if panres == msxres:
                msg = ("The Panchromatic's image resolution ({pr}) "
                       "equals to the Multi-Spectral's one ({mr}). "
                       "Something is probably not right! "
                       "Please check your input images.")
                msg = msg.format(pr=panres, mr=msxres)
                grass.fatal(_(msg))

            # compute ratio
            ratio = msxres / panres
            msg_ratio = ('   >> Resolution ratio '
                         'low ({m:.{dec}f}) to high ({p:.{dec}f}): {r:.1f}')
            msg_ratio = msg_ratio.format(m=msxres, p=panres, r=ratio, dec=3)
            g.message(msg_ratio)

        # 2nd Pass requested, yet Ratio < 5.5
        if second_pass and ratio < 5.5:
            g.message(
                "   >>> Resolution ratio < 5.5, skipping 2nd pass.\n"
                "   >>> If you insist, force it via the <ratio> option!",
                flags='i')
            second_pass = bool(0)

        #
        # 2. High Pass Filtering
        #

        g.message('\n|2 High Pass Filtering the Panchromatic Image')

        tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
        tmp = 'tmp.' + grass.basename(tmpfile)  # use its basenam
        tmp_pan_hpf = '{tmp}_pan_hpf'.format(tmp=tmp)  # HPF image
        tmp_msx_blnr = '{tmp}_msx_blnr'.format(tmp=tmp)  # Upsampled MSx
        tmp_msx_hpf = '{tmp}_msx_hpf'.format(tmp=tmp)  # Fused image
        tmp_hpf_matrix = grass.tempfile()  # ASCII filter

        # Construct and apply Filter
        hpf = get_high_pass_filter(ratio, center)
        hpf_ascii(center, hpf, tmp_hpf_matrix, second_pass)
        run('r.mfilter',
            input=pan,
            filter=tmp_hpf_matrix,
            output=tmp_pan_hpf,
            title='High Pass Filtered Panchromatic image',
            overwrite=True)

        # 2nd pass
        if second_pass and ratio > 5.5:
            # Temporary files
            tmp_pan_hpf_2 = '{tmp}_pan_hpf_2'.format(
                tmp=tmp)  # 2nd Pass HPF image
            tmp_hpf_matrix_2 = grass.tempfile()  # 2nd Pass ASCII filter
            # Construct and apply 2nd Filter
            hpf_2 = get_high_pass_filter(ratio, center2)
            hpf_ascii(center2, hpf_2, tmp_hpf_matrix_2, second_pass)
            run('r.mfilter',
                input=pan,
                filter=tmp_hpf_matrix_2,
                output=tmp_pan_hpf_2,
                title='2-High-Pass Filtered Panchromatic Image',
                overwrite=True)

        #
        # 3. Upsampling low resolution image
        #

        g.message("\n|3 Upsampling (bilinearly) low resolution image")

        run('r.resamp.interp',
            method='bilinear',
            input=msx,
            output=tmp_msx_blnr,
            overwrite=True)

        #
        # 4. Weighting the High Pass Filtered image(s)
        #

        g.message("\n|4 Weighting the High-Pass-Filtered image (HPFi)")

        # Compute (1st Pass) Weighting
        msg_w = "   > Weighting = StdDev(MSx) / StdDev(HPFi) * " \
            "Modulating Factor"
        g.message(msg_w)

        # StdDev of Multi-Spectral Image(s)
        msx_avg = avg(msx)
        msx_sd = stddev(msx)
        g.message("   >> StdDev of <{m}>: {sd:.3f}".format(m=msx, sd=msx_sd))

        # StdDev of HPF Image
        hpf_sd = stddev(tmp_pan_hpf)
        g.message("   >> StdDev of HPFi: {sd:.3f}".format(sd=hpf_sd))

        # Modulating factor
        modulator = get_modulator_factor(modulation, ratio)
        g.message("   >> Modulating Factor: {m:.2f}".format(m=modulator))

        # weighting HPFi
        weighting = hpf_weight(msx_sd, hpf_sd, modulator, 1)

        #
        # 5. Adding weighted HPF image to upsampled Multi-Spectral band
        #

        g.message("\n|5 Adding weighted HPFi to upsampled image")
        fusion = '{hpf} = {msx} + {pan} * {wgt}'
        fusion = fusion.format(hpf=tmp_msx_hpf,
                               msx=tmp_msx_blnr,
                               pan=tmp_pan_hpf,
                               wgt=weighting)
        grass.mapcalc(fusion)

        # command history
        hst = 'Weigthing applied: {msd:.3f} / {hsd:.3f} * {mod:.3f}'
        cmd_history.append(hst.format(msd=msx_sd, hsd=hpf_sd, mod=modulator))

        if second_pass and ratio > 5.5:

            #
            # 4+ 2nd Pass Weighting the High Pass Filtered image
            #

            g.message("\n|4+ 2nd Pass Weighting the HPFi")

            # StdDev of HPF Image #2
            hpf_2_sd = stddev(tmp_pan_hpf_2)
            g.message("   >> StdDev of 2nd HPFi: {h:.3f}".format(h=hpf_2_sd))

            # Modulating factor #2
            modulator_2 = get_modulator_factor2(modulation2)
            msg = '   >> 2nd Pass Modulating Factor: {m:.2f}'
            g.message(msg.format(m=modulator_2))

            # 2nd Pass weighting
            weighting_2 = hpf_weight(msx_sd, hpf_2_sd, modulator_2, 2)

            #
            # 5+ Adding weighted HPF image to upsampled Multi-Spectral band
            #

            g.message("\n|5+ Adding small-kernel-based weighted 2nd HPFi "
                      "back to fused image")

            add_back = '{final} = {msx_hpf} + {pan_hpf} * {wgt}'
            add_back = add_back.format(final=tmp_msx_hpf,
                                       msx_hpf=tmp_msx_hpf,
                                       pan_hpf=tmp_pan_hpf_2,
                                       wgt=weighting_2)
            grass.mapcalc(add_back)

            # 2nd Pass history entry
            hst = "2nd Pass Weighting: {m:.3f} / {h:.3f} * {mod:.3f}"
            cmd_history.append(
                hst.format(m=msx_sd, h=hpf_2_sd, mod=modulator_2))

        if color_match:
            g.message("\n|* Matching output to input color table")
            run('r.colors', map=tmp_msx_hpf, raster=msx)

        #
        # 6. Stretching linearly the HPF-Sharpened image(s) to match the Mean
        #     and Standard Deviation of the input Multi-Sectral image(s)
        #

        if histogram_match:

            # adapt output StdDev and Mean to the input(ted) ones
            g.message("\n|+ Matching histogram of Pansharpened image "
                      "to %s" % (msx),
                      flags='v')

            # Collect stats for linear histogram matching
            msx_hpf_avg = avg(tmp_msx_hpf)
            msx_hpf_sd = stddev(tmp_msx_hpf)

            # expression for mapcalc
            lhm = '{out} = ({hpf} - {hpfavg}) / {hpfsd} * {msxsd} + {msxavg}'
            lhm = lhm.format(out=tmp_msx_hpf,
                             hpf=tmp_msx_hpf,
                             hpfavg=msx_hpf_avg,
                             hpfsd=msx_hpf_sd,
                             msxsd=msx_sd,
                             msxavg=msx_avg)

            # compute
            grass.mapcalc(lhm, quiet=True, overwrite=True)

            # update history string
            cmd_history.append("Linear Histogram Matching: %s" % lhm)

        #
        # Optional. Trim to remove black border effect (rectangular only)
        #

        if trimming_factor:

            tf = trimming_factor

            # communicate
            msg = '\n|* Trimming output image border pixels by '
            msg += '{factor} times the low resolution\n'.format(factor=tf)
            nsew = '   > Input extent: n: {n}, s: {s}, e: {e}, w: {w}'
            nsew = nsew.format(n=region.n, s=region.s, e=region.e, w=region.w)
            msg += nsew

            g.message(msg)

            # re-set borders
            region.n -= tf * images[msx].nsres
            region.s += tf * images[msx].nsres
            region.e -= tf * images[msx].ewres
            region.w += tf * images[msx].ewres

            # communicate and act
            msg = '   > Output extent: n: {n}, s: {s}, e: {e}, w: {w}'
            msg = msg.format(n=region.n, s=region.s, e=region.e, w=region.w)
            g.message(msg)

            # modify only the extent
            run('g.region', n=region.n, s=region.s, e=region.e, w=region.w)
            trim = "{out} = {input}".format(out=tmp_msx_hpf, input=tmp_msx_hpf)
            grass.mapcalc(trim)

        #
        # End of Algorithm

        # history entry
        grass.raster_history(tmp_msx_hpf)

        # add suffix to basename & rename end product
        msx_name = "{base}{suffix}"
        msx_name = msx_name.format(base=msx.split('@')[0], suffix=outputsuffix)
        run("g.rename", raster=(tmp_msx_hpf, msx_name))

        # remove temporary files
        cleanup()

    # visualising-related information
    grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Original Region restored")
    g.message(
        "\n>>> Hint, rebalancing colors (via i.colors.enhance) "
        "may improve appearance of RGB composites!",
        flags='i')
Exemplo n.º 49
0
def main():
    global temp_dist, temp_val

    input = options['input']
    radius = float(options['radius'])
    metric = options['metric']
    old = options['old']
    new = options['new']
    mapunits = flags['m']

    tmp = str(os.getpid())

    temp_dist = "r.grow.tmp.%s.dist" % tmp

    shrink = False
    if radius < 0.0:
        shrink = True
        radius = -radius

    if new == '' and shrink == False:
        temp_val = "r.grow.tmp.%s.val" % tmp
        new = temp_val
    else:
        temp_val = None

    if old == '':
        old = input

    if not mapunits:
        kv = grass.region()
        scale = math.sqrt(float(kv['nsres']) * float(kv['ewres']))
        radius *= scale

    if metric == 'euclidean':
        metric = 'squared'
        radius = radius * radius

    # check if input file exists
    if not grass.find_file(input)['file']:
        grass.fatal(_("Raster map <%s> not found") % input)

    # Workaround for r.mapcalc bug #3475
    # Mapcalc will fail if output is a fully qualified map name
    out_name = options['output'].split('@')
    if len(out_name) == 2:
        if out_name[1] != grass.gisenv()['MAPSET']:
            grass.fatal(_("Output can be written only to the current mapset"))
        output = out_name[0]
    else:
        output = out_name[0]

    if shrink is False:
        try:
            grass.run_command('r.grow.distance',
                              input=input,
                              metric=metric,
                              distance=temp_dist,
                              value=temp_val)
        except CalledModuleError:
            grass.fatal(_("Growing failed. Removing temporary maps."))

        grass.mapcalc(
            "$output = if(!isnull($input),$old,if($dist < $radius,$new,null()))",
            output=output,
            input=input,
            radius=radius,
            old=old,
            new=new,
            dist=temp_dist)
    else:
        # shrink
        try:
            grass.run_command('r.grow.distance',
                              input=input,
                              metric=metric,
                              distance=temp_dist,
                              value=temp_val,
                              flags='n')
        except CalledModuleError:
            grass.fatal(_("Shrinking failed. Removing temporary maps."))

        grass.mapcalc("$output = if($dist < $radius,null(),$old)",
                      output=output,
                      radius=radius,
                      old=old,
                      dist=temp_dist)

    grass.run_command('r.colors', map=output, raster=input)

    # write cmd history:
    grass.raster_history(output)
Exemplo n.º 50
0
import grass.script as grass
import numpy as np
import random
import os

# from __future__ import print_function
from grass.script import array as garray


grass.run_command("g.region", vect="source_shp,target_shp")
influensprocess = 5000
c = grass.region()
n = float(c["n"])
s = float(c["s"])
e = float(c["e"])
w = float(c["w"])


n = n + influensprocess
s = s - influensprocess
e = e + influensprocess
w = w - influensprocess


grass.run_command("g.region", n=n, e=e, s=s, w=w)
Exemplo n.º 51
0
def main():
    dem = options['demraster']
    dem = 'dem_tinitaly_rocchetta'
    #SDR = options['output']
    weightmap = options['weightmap']


    #region setting
    gregion=grass.region()
    cell_s=gregion['nsres']
    cell_s=float(cell_s)



    # r.slope.aspect
    # elevation = dem_tinitaly_rocchetta @ SDR
    # slope = slope
    rasterTemp = []
    vectTemp = []
    grass.run_command('r.slope.aspect', elevation=dem, slope="slope1", overwrite=True)
    rasterTemp.append('slope1')

    grass.run_command('r.watershed', flags='s', elevation=dem, accumulation='accD8', drainage='drainD8', overwrite=True)
    rasterTemp.append('accD8')
    rasterTemp.append('drainD8')

    grass.run_command('r.slope.aspect',elevation = dem,slope = 'slope',format ='percent',overwrite=True)
    rasterTemp.append('slope')

    # tif_fdir8 coincide con drainD8

    # read drainage direction map
    tif_fdir8_ar = garray.array()
    tif_fdir8_ar.read('drainD8')
    # converto il float
    tif_fdir8_ar = tif_fdir8_ar.astype(numpy.float)  # otherwise overflow in future operations
    # r.watershead: Negative numbers indicate that those cells possibly have surface runoff from outside of the current geographic region.
    tif_fdir8_ar[(tif_fdir8_ar <= 0)] = 0
    ndv = numpy.min(tif_fdir8_ar)
    tif_fdir8_ar[tif_fdir8_ar == ndv] = numpy.NaN

    # create constant array to trasform into raster
    const_ar = tif_fdir8_ar * 0 + cell_s

    ### zero matrix bigger than F_dir8, to avoid border indexing problems
    # sorrounding tif_fdir8_ar with one width zeros cells
    Fd8 = numpy.zeros(shape=((tif_fdir8_ar.shape[0]) + 1, (tif_fdir8_ar.shape[1]) + 1), dtype=numpy.float32)
    # popolo la matrice
    Fd8[1:Fd8.shape[0], 1:Fd8.shape[1]] = Fd8[1:Fd8.shape[0], 1:Fd8.shape[1]] + tif_fdir8_ar
    # adding bottom row and right y axis with zeros
    Fdir8 = numpy.zeros(shape=((Fd8.shape[0]) + 1, (Fd8.shape[1]) + 1), dtype=numpy.float32)
    Fdir8[:Fdir8.shape[0] - 1, :Fdir8.shape[1] - 1] = Fd8
    ##------------
    # read weight map an slope
    tif_wgt_ar = garray.array()
    #TODO controllare la mappa weight che va presa da input
    tif_wgt_ar.read('weight')
    tif_slope = garray.array()
    tif_slope.read('slope')

    tif_slope=tif_slope/100. #converting percentage from r.slope.aspect to value in range 0 - 1

    # imposing upper and lower limits to slope, no data here are -1
    tif_slope[(tif_slope >= 0) & (tif_slope < 0.005)] = 0.005
    tif_slope[(tif_slope > 1)] = 1
    tif_slope[(tif_slope < 0)] = -1

    #imposing a value bigger than zero in weight map
    tif_wgt_ar[tif_wgt_ar==0]=1e-10

    Ws_1 = 1 / (tif_wgt_ar * tif_slope)
    # converto il float
    Ws_1 = Ws_1.astype(numpy.float)  # otherwise overflow in future operations
    # r.watershead: Negative numbers indicate that those cells possibly have surface runoff from outside of the current geographic region.
    # tif_fdir8_ar[(tif_fdir8_ar <= 0)] = 0
    ndv = numpy.min(Ws_1)
    Ws_1[Ws_1 == ndv] = numpy.NaN
    #
    # zero matrix bigger than weight, to avoid border indexing problems, and have same indexing as Fdir8
    Wg = numpy.zeros(shape=((tif_wgt_ar.shape[0]) + 1, (tif_wgt_ar.shape[1]) + 1), dtype=numpy.float32)
    # TODO da sostituire la variabile con Ws_1 ovvero il denom di Ddn
    Wg[1:Wg.shape[0], 1:Wg.shape[1]] = Wg[1:Fd8.shape[0],
                                       1:Wg.shape[1]] + Ws_1  # the weigth to weigth tha flow length
    # adding bottom row and right y axis with zeros
    Wgt = numpy.zeros(shape=((Wg.shape[0]) + 1, (Wg.shape[1]) + 1), dtype=numpy.float32)
    Wgt[:Wgt.shape[0] - 1, :Wgt.shape[1] - 1] = Wg
    #
    start = time.clock()  # for computational time
    # Creating a bigger matrix as large as weight(and all the matrices) to store the weighted flow length values
    W_Fl = numpy.zeros(shape=((Wgt.shape[0]), (Wgt.shape[1])), dtype=numpy.float32)
    W_Fl = W_Fl - 1  # to give -1 to NoData after the while loop calculation
    #
    # Let's go for the search and algo-rhytm for the weighted-Flow-Length
    ND = numpy.where(numpy.isnan(Fdir8) == True)  # fast coordinates all the NoData values, starting from them to go forward and compute flow length
    #
    Y = ND[0]  # rows, NoData indexes
    X = ND[1]  # columns, NoData indexes pay attention not to invert values !!!!!!!!!!!!!!
    #
    # initializing lists for outlet and moving cell coordinates, in function of their position
    YC1 = []
    YC2 = []
    YC3 = []
    YC4 = []
    YC5 = []
    YC6 = []
    YC7 = []
    YC8 = []
    XC1 = []
    XC2 = []
    XC3 = []
    XC4 = []
    XC5 = []
    XC6 = []
    XC7 = []
    XC8 = []
    #
    #   Flow Directions r.watershead
    #   4   3   2
    #   5   -   1
    #   6   7   8
    #
    #   Draining in Direction Matrix
    #   8   7   6
    #   1   -   5
    #   2   3   4
    #
    i1 = Fdir8[Y, X - 1]  # Searching for NoData with cells draining into them, 8 directions
    D1 = numpy.where(i1 == 1)  # l
    YC1.extend(Y[D1])  # coordinates satisfacting the conditions
    XC1.extend(X[D1])
    W_Fl[YC1, XC1] = 0  # initialize flow length at cells draining to NoData
    #
    i2 = Fdir8[Y + 1, X - 1]  # Searching for NoData with cells draining into them, 8 directions
    D2 = numpy.where(i2 == 2)  # lrad2
    YC2.extend(Y[D2])  # coordinates satisfacting the conditions
    XC2.extend(X[D2])
    W_Fl[YC2, XC2] = 0  # initialize flow length at cells draining to NoData
    #
    i3 = Fdir8[Y + 1, X]  # Searching for NoData with cells draining into them, 8 directions
    D3 = numpy.where(i3 == 3)  # l
    YC3.extend(Y[D3])  # coordinates satisfacting the conditions
    XC3.extend(X[D3])
    W_Fl[YC3, XC3] = 0  # initialize flow length at cells draining to NoData
    #
    i4 = Fdir8[Y + 1, X + 1]  # Searching for NoData with cells draining into them, 8 directions
    D4 = numpy.where(i4 == 4)  # lrad2
    YC4.extend(Y[D4])  # coordinates satisfacting the conditions
    XC4.extend(X[D4])
    W_Fl[YC4, XC4] = 0  # initialize flow length at cells draining to NoData
    #
    i5 = Fdir8[Y, X + 1]  # Searching for NoData with cells draining into them, 8 directions
    D5 = numpy.where(i5 == 5)  # l
    YC5.extend(Y[D5])  # coordinates satisfacting the conditions
    XC5.extend(X[D5])
    W_Fl[YC5, XC5] = 0  # initialize flow length at cells draining to NoData
    #
    i6 = Fdir8[Y - 1, X + 1]  # Searching for NoData with cells draining into them, 8 directions
    D6 = numpy.where(i6 == 6)  # lrad2
    YC6.extend(Y[D6])  # coordinates satisfacting the conditions
    XC6.extend(X[D6])
    W_Fl[YC6, XC6] = 0  # initialize flow length at cells draining to NoData
    #
    i7 = Fdir8[Y - 1, X]  # Searching for NoData with cells draining into them, 8 directions
    D7 = numpy.where(i7 == 7)  # l
    YC7.extend(Y[D7])  # coordinates satisfacting the conditions
    XC7.extend(X[D7])
    W_Fl[YC7, XC7] = 0  # initialize flow length at cells draining to NoData
    #
    i8 = Fdir8[Y - 1, X - 1]  # Searching for NoData with cells draining into them, 8 directions
    D8 = numpy.where(i8 == 8)  # lrad2
    YC8.extend(Y[D8])  # coordinates satisfacting the conditions
    XC8.extend(X[D8])
    W_Fl[YC8, XC8] = 0  # initialize flow length at cells draining to NoData
    #
    #start =time.clock()#da cancellare poi.....!!!!!! Solo per check
    count = 1  # "0" passage already done during the previous step
    while len(YC1) or len(YC2) or len(YC3) or len(YC4) or len(YC5) or len(YC6) or len(YC7) or len(YC8) > 0:
        # Converting into array to be able to do operations
        YYC1=numpy.asarray(YC1);XXC1=numpy.asarray(XC1)
        YYC2=numpy.asarray(YC2);XXC2=numpy.asarray(XC2)
        YYC3=numpy.asarray(YC3);XXC3=numpy.asarray(XC3)
        YYC4=numpy.asarray(YC4);XXC4=numpy.asarray(XC4)
        YYC5=numpy.asarray(YC5);XXC5=numpy.asarray(XC5)
        YYC6=numpy.asarray(YC6);XXC6=numpy.asarray(XC6)
        YYC7=numpy.asarray(YC7);XXC7=numpy.asarray(XC7)
        YYC8=numpy.asarray(YC8);XXC8=numpy.asarray(XC8)
        #
        # Now I can do operations and moving towards the right cell!!!!!!!!
        # Weigthing flow length, weights are half sum of pixels weight * travelled length
        # I'm chosing the directions accordingly to Flow_dir step by step going from outlet-nodata to the ridges,
        # each time account for distance (l or l*rad2) multiplied by the half of the weigths of the 2 travelled cells.
        # Then, with variables substitution I'm moving a step further, and adding the prevous pixel value to the new calculated.
        #
        YYC1 = (YYC1);XXC1 = (XXC1 - 1)  # l
        YYC2 = (YYC2 + 1);XXC2 = (XXC2 - 1)  # lrad2
        YYC3 = (YYC3 + 1);XXC3 = (XXC3)  # l
        YYC4 = (YYC4 + 1);XXC4 = (XXC4 + 1)  # lrad2
        YYC5 = (YYC5);XXC5 = (XXC5 + 1)  # l
        YYC6 = (YYC6 - 1);XXC6 = (XXC6 + 1)  # lrad2
        YYC7 = (YYC7 - 1);XXC7 = (XXC7)  # l
        YYC8 = (YYC8 - 1);XXC8 = (XXC8 - 1)  # lrad2
        #
        if count == 1:  # first run zero, like TauDEM, need to check if there is a Nodata pixel receiving flow for all the 8 directions
            if len(YYC1) > 0:
                W_Fl[YYC1, XXC1] = 0
            else:
                pass
            if len(YYC2) > 0:
                W_Fl[YYC2, XXC2] = 0
            else:
                pass
            if len(YYC3) > 0:
                W_Fl[YYC3, XXC3] = 0
            else:
                pass
            if len(YYC4) > 0:
                W_Fl[YYC4, XXC4] = 0
            else:
                pass
            if len(YYC5) > 0:
                W_Fl[YYC5, XXC5] = 0
            else:
                pass
            if len(YYC6) > 0:
                W_Fl[YYC6, XXC6] = 0
            else:
                pass
            if len(YYC7) > 0:
                W_Fl[YYC7, XXC7] = 0
            else:
                pass
            if len(YYC8) > 0:
                W_Fl[YYC8, XXC8] = 0
            else:
                pass
        else:
            W_Fl[YYC1, XXC1] = W_Fl[YC1, XC1] + (cell_s * ((Wgt[YC1, XC1] + Wgt[YYC1, XXC1]) / 2))
            W_Fl[YYC2, XXC2] = W_Fl[YC2, XC2] + (cell_s * math.sqrt(2) * ((Wgt[YC2, XC2] + Wgt[YYC2, XXC2]) / 2))
            W_Fl[YYC3, XXC3] = W_Fl[YC3, XC3] + (cell_s * ((Wgt[YC3, XC3] + Wgt[YYC3, XXC3]) / 2))
            W_Fl[YYC4, XXC4] = W_Fl[YC4, XC4] + (cell_s * math.sqrt(2) * ((Wgt[YC4, XC4] + Wgt[YYC4, XXC4]) / 2))
            W_Fl[YYC5, XXC5] = W_Fl[YC5, XC5] + (cell_s * ((Wgt[YC5, XC5] + Wgt[YYC5, XXC5]) / 2))
            W_Fl[YYC6, XXC6] = W_Fl[YC6, XC6] + (cell_s * math.sqrt(2) * ((Wgt[YC6, XC6] + Wgt[YYC6, XXC6]) / 2))
            W_Fl[YYC7, XXC7] = W_Fl[YC7, XC7] + (cell_s * ((Wgt[YC7, XC7] + Wgt[YYC7, XXC7]) / 2))
            W_Fl[YYC8, XXC8] = W_Fl[YC8, XC8] + (cell_s * math.sqrt(2) * ((Wgt[YC8, XC8] + Wgt[YYC8, XXC8]) / 2))
            #
        #
        # Reconstructing all X and Y of this step and moving on upwards (Downstream if you think in GIS, right?)
        YY = [];XX = []
        YY.extend(YYC1);XX.extend(XXC1)
        YY.extend(YYC2);XX.extend(XXC2)
        YY.extend(YYC3);XX.extend(XXC3)
        YY.extend(YYC4);XX.extend(XXC4)
        YY.extend(YYC5);XX.extend(XXC5)
        YY.extend(YYC6);XX.extend(XXC6)
        YY.extend(YYC7);XX.extend(XXC7)
        YY.extend(YYC8);XX.extend(XXC8)
        #
        YY = numpy.asarray(YY)
        XX = numpy.asarray(XX)
        #
        i1 = Fdir8[YY, XX - 1]  # Searching for cells draining into them, 8 directions
        D1 = numpy.where(i1 == 1)  # l
        YC1 = YY[D1]  # coordinates satisfacting the conditions, HERE i NEED TO ADD ACTUAL LENGTH VALUE + PREVIOUS ONE
        XC1 = XX[D1]
        #
        i2 = Fdir8[YY + 1, XX - 1]  # Searching for cells draining into them, 8 directions
        D2 = numpy.where(i2 == 2)  # lrad2
        YC2 = YY[D2]  # coordinates satisfacting the conditions
        XC2 = XX[D2]
        #
        i3 = Fdir8[YY + 1, XX]  # Searching for cells draining into them, 8 directions
        D3 = numpy.where(i3 == 3)  # l
        YC3 = YY[D3]  # coordinates satisfacting the conditions
        XC3 = XX[D3]
        #
        i4 = Fdir8[YY + 1, XX + 1]  # Searching for cells draining into them, 8 directions
        D4 = numpy.where(i4 == 4)  # lrad2
        YC4 = YY[D4]  # coordinates satisfacting the conditions
        XC4 = XX[D4]
        #
        i5 = Fdir8[YY, XX + 1]  # Searching for cells draining into them, 8 directions
        D5 = numpy.where(i5 == 5)  # l
        YC5 = YY[D5]  # coordinates satisfacting the conditions
        XC5 = XX[D5]
        #
        i6 = Fdir8[YY - 1, XX + 1]  # Searching for cells draining into them, 8 directions
        D6 = numpy.where(i6 == 6)  # lrad2
        YC6 = YY[D6]  # coordinates satisfacting the conditions
        XC6 = XX[D6]
        #
        i7 = Fdir8[YY - 1, XX]  # Searching for cells draining into them, 8 directions
        D7 = numpy.where(i7 == 7)  # l
        YC7 = YY[D7]  # coordinates satisfacting the conditions
        XC7 = XX[D7]
        #
        i8 = Fdir8[YY - 1, XX - 1]  # Searching for cells draining into them, 8 directions
        D8 = numpy.where(i8 == 8)  # lrad2
        YC8 = YY[D8]  # coordinates satisfacting the conditions
        XC8 = XX[D8]
        count = count + 1
    #
    elapsed = (time.clock() - start)  # computational time
    print time.strftime("%d/%m/%Y %H:%M:%S    "), "Process concluded succesfully \n", "%.2f" % elapsed, 'seconds for Weighted-Flow Length calculation with ', int(count), ' iterations'  # truncating the precision

    W_fl = W_Fl[1:W_Fl.shape[0] - 1, 1:W_Fl.shape[1] - 1]#reshaping weigthed flow length, we need this step to homogenize matrices dimensions!!!!!!!!!!
    del W_Fl
    #imposto il valore di zero a 1 per evitare divisioni per zero
    D_down_ar = garray.array()
    W_fl[W_fl == 0] = 1
    D_down_ar[...] = W_fl
    del W_fl
    D_down_ar.write('w_flow_length',null=numpy.nan,overwrite=True)

    """
    --------------------------------------
    WORKING ON D_UP COMPONENT
    --------------------------------------
    """

    grass.run_command('r.watershed',elevation = 'dem',accumulation = 'accMDF',convergence=5, memory=300)
    rasterTemp.append('accMDF')

    tif_dtmsca = garray.array()
    tif_dtmsca.read('acc_watershead_dinf')

    tif_dtmsca = abs(tif_dtmsca)*cell_s

    acc_final_ar = tif_dtmsca / const_ar

    grass.run_command('r.watershed',elevation = 'dem',flow = 'weight',accumulation = "accW",convergence = 5,memory = 300)
    rasterTemp.append('accW')

    acc_W_ar = garray.array()
    acc_W_ar.read('accW')


    grass.run_command('r.watershed', elevation='dem', flow='slope', accumulation="accS", convergence=5, memory=300)
    rasterTemp.append('accS')

    acc_S_ar = garray.array()
    acc_S_ar.read('accS')

    # Computing C_mean as (accW+weigth)/acc_final
    C_mean_ar = (acc_W_ar + tif_wgt_ar) / acc_final_ar
    del (acc_W_ar)  # free memory
    #
    # Computing S mean (accS+s)/acc_final
    S_mean_ar = (acc_S_ar + tif_fdir8_ar) / acc_final_ar
    del (acc_S_ar, tif_fdir8_ar)  # free memory
    #
    # Computing D_up as "%cmean.tif%" * "%smean.tif%" * SquareRoot("%ACCfinal.tif%" * "%resolution.tif%" * "%resolution.tif%")
    cell_area = (const_ar) ** 2  # change of variables, to be sure
    D_up_ar = C_mean_ar * S_mean_ar * numpy.sqrt(acc_final_ar * cell_area)  # to transform from unit values to square units
    #
    # Computing Connectivity index
    ic_ar = numpy.log10(D_up_ar / D_down_ar)

    SDRmax = 0.8;IC0=0.5;k=1
    SDRmap = SDRmax / (1+math.exp((IC0-ic_ar/k)))
Exemplo n.º 52
0
def main():
    """Main function"""
    regiondict = grass.region()

    output = options["output"]
    values = options["range"].split(",")
    NewMin = int(values[0].strip())
    NewMax = int(values[1].strip())
    percentile = options["percentile"]
    direction = options["direction"]

    checkPercentile(percentile, direction)
    # And now we can calculate the graded rasters
    # for gradient of rows
    if direction == "N-S":
        grass.mapcalc(
            "$newmap = (((row() - $OldMin) * ($NewMax - $NewMin)) / "
            "($OldMax - $OldMin)) + $NewMin",
            newmap=output,
            NewMin=NewMin,
            NewMax=NewMax,
            OldMin=1,
            OldMax=regiondict["rows"],
            overwrite=True,
        )
    elif direction == "S-N":
        grass.mapcalc(
            "$newmap = (((row() - $OldMin) * ($NewMax - $NewMin)) / "
            "($OldMax - $OldMin)) + $NewMin",
            newmap=output,
            NewMin=NewMax,
            NewMax=NewMin,
            OldMin=1,
            OldMax=regiondict["rows"],
            overwrite=True,
        )
    elif direction == "W-E":
        grass.mapcalc(
            "$newmap = (((col() - $OldMin) * ($NewMax - $NewMin)) / "
            "($OldMax - $OldMin)) + $NewMin",
            newmap=output,
            NewMin=NewMin,
            NewMax=NewMax,
            OldMin=1,
            OldMax=regiondict["cols"],
            overwrite=True,
        )
    elif direction == "E-W":
        grass.mapcalc(
            "$newmap = (((col() - $OldMin) * ($NewMax - $NewMin)) / "
            "($OldMax - $OldMin)) + $NewMin",
            newmap=output,
            NewMin=NewMax,
            NewMax=NewMin,
            OldMin=1,
            OldMax=regiondict["cols"],
            overwrite=True,
        )
    elif direction == "NW-SE":
        mat = calculateOblique(regiondict, NewMin, NewMax, percentile)
        createRast(output, mat)
    elif direction == "NE-SW":
        mat = calculateOblique(regiondict, NewMin, NewMax, percentile)
        createRast(output, mat, True)
Exemplo n.º 53
0
def main():
    inputraster = options["input"]
    number_lines = int(options["number_lines"])
    edge_detection_algorithm = options["edge_detection"]
    no_edge_friction = int(options["no_edge_friction"])
    lane_border_multiplier = int(options["lane_border_multiplier"])
    min_tile_size = None
    if options["min_tile_size"]:
        min_tile_size = float(options["min_tile_size"])
    existing_cutlines = None
    if options["existing_cutlines"]:
        existing_cutlines = options["existing_cutlines"].split(",")
    tiles = options["output"]
    memory = int(options["memory"])
    tiled = False

    if options["tile_width"]:
        tiled = True
        gscript.message(_("Using tiles processing for edge detection"))
        width = int(options["tile_width"])
        height = int(options["tile_height"])
        overlap = int(options["overlap"])

    processes = int(options["processes"])

    global temp_maps
    temp_maps = []
    r = "raster"
    v = "vector"

    if existing_cutlines:
        existingcutlinesmap = "temp_icutlines_existingcutlinesmap_%i" % os.getpid(
        )
        if len(existing_cutlines) > 1:
            gscript.run_command(
                "v.patch",
                input_=existing_cutlines,
                output=existingcutlinesmap,
                quiet=True,
                overwrite=True,
            )
            existing_cutlines = existingcutlinesmap

        gscript.run_command(
            "v.to.rast",
            input_=existing_cutlines,
            output=existingcutlinesmap,
            use="val",
            type_="line,boundary",
            overwrite=True,
            quiet=True,
        )

        temp_maps.append([existingcutlinesmap, r])

    temp_edge_map = "temp_icutlines_edgemap_%d" % os.getpid()
    temp_maps.append([temp_edge_map, r])

    gscript.message(
        _("Creating edge map using <%s> edgedetection algorithm") %
        edge_detection_algorithm)
    if edge_detection_algorithm == "zc":
        kwargs = {
            "input": inputraster,
            "output": temp_edge_map,
            "width_": int(options["zc_width"]),
            "threshold": float(options["zc_threshold"]),
            "quiet": True,
        }

        if tiled:
            grd = GridModule("i.zc",
                             width=width,
                             height=height,
                             overlap=overlap,
                             processes=processes,
                             split=False,
                             **kwargs)
            grd.run()
        else:
            gscript.run_command("i.zc", **kwargs)

    elif edge_detection_algorithm == "canny":
        if not gscript.find_program("i.edge", "--help"):
            message = _("You need to install the addon i.edge to use ")
            message += _("the Canny edge detector.\n")
            message += _(
                " You can install the addon with 'g.extension i.edge'")
            gscript.fatal(message)

        kwargs = {
            "input": inputraster,
            "output": temp_edge_map,
            "low_threshold": float(options["canny_low_threshold"]),
            "high_threshold": float(options["canny_high_threshold"]),
            "sigma": float(options["canny_sigma"]),
            "quiet": True,
        }

        if tiled:
            grd = GridModule("i.edge",
                             width=width,
                             height=height,
                             overlap=overlap,
                             processes=processes,
                             split=False,
                             flags="n",
                             **kwargs)
            grd.run()
        else:
            gscript.run_command("i.edge", flags="n", **kwargs)

    else:
        gscript.fatal(
            "Only zero-crossing and Canny available as edge detection algorithms."
        )

    region = gscript.region()
    gscript.message(_("Finding cutlines in both directions"))

    nsrange = float(region.n - region.s - region.nsres)
    ewrange = float(region.e - region.w - region.ewres)

    if nsrange > ewrange:
        hnumber_lines = number_lines
        vnumber_lines = max(int(number_lines * (ewrange / nsrange)), 1)
    else:
        vnumber_lines = number_lines
        hnumber_lines = max(int(number_lines * (nsrange / ewrange)), 1)

    # Create the lines in horizonal direction
    nsstep = float(region.n - region.s - region.nsres) / hnumber_lines
    hpointsy = [((region.n - i * nsstep) - region.nsres / 2.0)
                for i in range(0, hnumber_lines + 1)]
    hlanepointsy = [y - nsstep / 2.0 for y in hpointsy]
    hstartpoints = listzip([region.w + 0.2 * region.ewres] * len(hpointsy),
                           hpointsy)
    hstoppoints = listzip([region.e - 0.2 * region.ewres] * len(hpointsy),
                          hpointsy)
    hlanestartpoints = listzip([region.w + 0.2 * region.ewres] *
                               len(hlanepointsy), hlanepointsy)
    hlanestoppoints = listzip([region.e - 0.2 * region.ewres] *
                              len(hlanepointsy), hlanepointsy)

    hlanemap = "temp_icutlines_hlanemap_%i" % os.getpid()
    temp_maps.append([hlanemap, v])
    temp_maps.append([hlanemap, r])

    os.environ["GRASS_VERBOSE"] = "0"
    new = VectorTopo(hlanemap)
    new.open("w")
    for line in listzip(hlanestartpoints, hlanestoppoints):
        new.write(geom.Line(line), cat=1)
    new.close()
    del os.environ["GRASS_VERBOSE"]

    gscript.run_command(
        "v.to.rast",
        input_=hlanemap,
        output=hlanemap,
        use="val",
        type_="line",
        overwrite=True,
        quiet=True,
    )

    hbasemap = "temp_icutlines_hbasemap_%i" % os.getpid()
    temp_maps.append([hbasemap, r])

    # Building the cost maps using the following logic
    # - Any pixel not on an edge, nor on an existing cutline gets a
    # no_edge_friction cost, or no_edge_friction_cost x 10  if there are
    # existing cutlines
    # - Any pixel on an edge gets a cost of 1 if there are no existing cutlines,
    # and a cost of no_edge_friction if there are
    # - A lane line gets a very high cost (lane_border_multiplier x cost of no
    # edge pixel - the latter depending on the existence of cutlines).

    mapcalc_expression = "%s = " % hbasemap
    mapcalc_expression += "if(isnull(%s), " % hlanemap
    if existing_cutlines:
        mapcalc_expression += "if(%s == 0 && isnull(%s), " % (
            temp_edge_map,
            existingcutlinesmap,
        )
        mapcalc_expression += "%i, " % (no_edge_friction * 10)
        mapcalc_expression += "if(isnull(%s), %s, 1))," % (
            existingcutlinesmap,
            no_edge_friction,
        )
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction * 10)
    else:
        mapcalc_expression += "if(%s == 0, " % temp_edge_map
        mapcalc_expression += "%i, " % no_edge_friction
        mapcalc_expression += "1), "
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction)
    gscript.run_command("r.mapcalc",
                        expression=mapcalc_expression,
                        quiet=True,
                        overwrite=True)

    hcumcost = "temp_icutlines_hcumcost_%i" % os.getpid()
    temp_maps.append([hcumcost, r])
    hdir = "temp_icutlines_hdir_%i" % os.getpid()
    temp_maps.append([hdir, r])

    # Create the lines in vertical direction
    ewstep = float(region.e - region.w - region.ewres) / vnumber_lines
    vpointsx = [((region.e - i * ewstep) - region.ewres / 2.0)
                for i in range(0, vnumber_lines + 1)]
    vlanepointsx = [x + ewstep / 2.0 for x in vpointsx]
    vstartpoints = listzip(vpointsx,
                           [region.n - 0.2 * region.nsres] * len(vpointsx))
    vstoppoints = listzip(vpointsx,
                          [region.s + 0.2 * region.nsres] * len(vpointsx))
    vlanestartpoints = listzip(vlanepointsx, [region.n - 0.2 * region.nsres] *
                               len(vlanepointsx))
    vlanestoppoints = listzip(vlanepointsx, [region.s + 0.2 * region.nsres] *
                              len(vlanepointsx))

    vlanemap = "temp_icutlines_vlanemap_%i" % os.getpid()
    temp_maps.append([vlanemap, v])
    temp_maps.append([vlanemap, r])

    os.environ["GRASS_VERBOSE"] = "0"
    new = VectorTopo(vlanemap)
    new.open("w")
    for line in listzip(vlanestartpoints, vlanestoppoints):
        new.write(geom.Line(line), cat=1)
    new.close()
    del os.environ["GRASS_VERBOSE"]

    gscript.run_command(
        "v.to.rast",
        input_=vlanemap,
        output=vlanemap,
        use="val",
        type_="line",
        overwrite=True,
        quiet=True,
    )

    vbasemap = "temp_icutlines_vbasemap_%i" % os.getpid()
    temp_maps.append([vbasemap, r])
    mapcalc_expression = "%s = " % vbasemap
    mapcalc_expression += "if(isnull(%s), " % vlanemap
    if existing_cutlines:
        mapcalc_expression += "if(%s == 0 && isnull(%s), " % (
            temp_edge_map,
            existingcutlinesmap,
        )
        mapcalc_expression += "%i, " % (no_edge_friction * 10)
        mapcalc_expression += "if(isnull(%s), %s, 1))," % (
            existingcutlinesmap,
            no_edge_friction,
        )
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction * 10)
    else:
        mapcalc_expression += "if(%s == 0, " % temp_edge_map
        mapcalc_expression += "%i, " % no_edge_friction
        mapcalc_expression += "1), "
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction)
    gscript.run_command("r.mapcalc",
                        expression=mapcalc_expression,
                        quiet=True,
                        overwrite=True)

    vcumcost = "temp_icutlines_vcumcost_%i" % os.getpid()
    temp_maps.append([vcumcost, r])
    vdir = "temp_icutlines_vdir_%i" % os.getpid()
    temp_maps.append([vdir, r])

    if processes > 1:
        pmemory = memory / 2.0
        rcv = gscript.start_command(
            "r.cost",
            input_=vbasemap,
            startcoordinates=vstartpoints,
            stopcoordinates=vstoppoints,
            output=vcumcost,
            outdir=vdir,
            memory=pmemory,
            quiet=True,
            overwrite=True,
        )

        rch = gscript.start_command(
            "r.cost",
            input_=hbasemap,
            startcoordinates=hstartpoints,
            stopcoordinates=hstoppoints,
            output=hcumcost,
            outdir=hdir,
            memory=pmemory,
            quiet=True,
            overwrite=True,
        )
        rcv.wait()
        rch.wait()

    else:
        gscript.run_command(
            "r.cost",
            input_=vbasemap,
            startcoordinates=vstartpoints,
            stopcoordinates=vstoppoints,
            output=vcumcost,
            outdir=vdir,
            memory=memory,
            quiet=True,
            overwrite=True,
        )

        gscript.run_command(
            "r.cost",
            input_=hbasemap,
            startcoordinates=hstartpoints,
            stopcoordinates=hstoppoints,
            output=hcumcost,
            outdir=hdir,
            memory=memory,
            quiet=True,
            overwrite=True,
        )

    hlines = "temp_icutlines_hlines_%i" % os.getpid()
    temp_maps.append([hlines, r])
    vlines = "temp_icutlines_vlines_%i" % os.getpid()
    temp_maps.append([vlines, r])

    if processes > 1:
        rdh = gscript.start_command(
            "r.drain",
            input_=hcumcost,
            direction=hdir,
            startcoordinates=hstoppoints,
            output=hlines,
            flags="d",
            quiet=True,
            overwrite=True,
        )

        rdv = gscript.start_command(
            "r.drain",
            input_=vcumcost,
            direction=vdir,
            startcoordinates=vstoppoints,
            output=vlines,
            flags="d",
            quiet=True,
            overwrite=True,
        )

        rdh.wait()
        rdv.wait()

    else:
        gscript.run_command(
            "r.drain",
            input_=hcumcost,
            direction=hdir,
            startcoordinates=hstoppoints,
            output=hlines,
            flags="d",
            quiet=True,
            overwrite=True,
        )

        gscript.run_command(
            "r.drain",
            input_=vcumcost,
            direction=vdir,
            startcoordinates=vstoppoints,
            output=vlines,
            flags="d",
            quiet=True,
            overwrite=True,
        )

    # Combine horizonal and vertical lines
    temp_raster_tile_borders = "temp_icutlines_raster_tile_borders_%i" % os.getpid(
    )
    temp_maps.append([temp_raster_tile_borders, r])
    gscript.run_command(
        "r.patch",
        input_=[hlines, vlines],
        output=temp_raster_tile_borders,
        quiet=True,
        overwrite=True,
    )

    gscript.message(_("Creating vector polygons"))

    # Create vector polygons

    # First we need to shrink the region a bit to make sure that all vector
    # points / lines fall within the raster
    gscript.use_temp_region()
    gscript.run_command("g.region",
                        s=region.s + region.nsres,
                        e=region.e - region.ewres,
                        quiet=True)

    region_map = "temp_icutlines_region_map_%i" % os.getpid()
    temp_maps.append([region_map, v])
    temp_maps.append([region_map, r])
    gscript.run_command("v.in.region",
                        output=region_map,
                        type_="line",
                        quiet=True,
                        overwrite=True)

    gscript.del_temp_region()

    gscript.run_command(
        "v.to.rast",
        input_=region_map,
        output=region_map,
        use="val",
        type_="line",
        quiet=True,
        overwrite=True,
    )

    temp_raster_polygons = "temp_icutlines_raster_polygons_%i" % os.getpid()
    temp_maps.append([temp_raster_polygons, r])
    gscript.run_command(
        "r.patch",
        input_=[temp_raster_tile_borders, region_map],
        output=temp_raster_polygons,
        quiet=True,
        overwrite=True,
    )

    temp_raster_polygons_thin = "temp_icutlines_raster_polygons_thin_%i" % os.getpid(
    )
    temp_maps.append([temp_raster_polygons_thin, r])
    gscript.run_command(
        "r.thin",
        input_=temp_raster_polygons,
        output=temp_raster_polygons_thin,
        quiet=True,
        overwrite=True,
    )

    # Create a series of temporary map names as we have to go
    # through several steps until we reach the final map.
    temp_vector_polygons1 = "temp_icutlines_vector_polygons1_%i" % os.getpid()
    temp_maps.append([temp_vector_polygons1, v])
    temp_vector_polygons2 = "temp_icutlines_vector_polygons2_%i" % os.getpid()
    temp_maps.append([temp_vector_polygons2, v])
    temp_vector_polygons3 = "temp_icutlines_vector_polygons3_%i" % os.getpid()
    temp_maps.append([temp_vector_polygons3, v])
    temp_vector_polygons4 = "temp_icutlines_vector_polygons4_%i" % os.getpid()
    temp_maps.append([temp_vector_polygons4, v])

    gscript.run_command(
        "r.to.vect",
        input_=temp_raster_polygons_thin,
        output=temp_vector_polygons1,
        type_="line",
        flags="t",
        quiet=True,
        overwrite=True,
    )

    # Erase all category values from the lines
    gscript.run_command(
        "v.category",
        input_=temp_vector_polygons1,
        op="del",
        cat="-1",
        output=temp_vector_polygons2,
        quiet=True,
        overwrite=True,
    )

    # Transform lines to boundaries
    gscript.run_command(
        "v.type",
        input_=temp_vector_polygons2,
        from_type="line",
        to_type="boundary",
        output=temp_vector_polygons3,
        quiet=True,
        overwrite=True,
    )

    # Add centroids
    gscript.run_command(
        "v.centroids",
        input_=temp_vector_polygons3,
        output=temp_vector_polygons4,
        quiet=True,
        overwrite=True,
    )

    # If a threshold is given erase polygons that are too small
    if min_tile_size:
        gscript.run_command(
            "v.clean",
            input_=temp_vector_polygons4,
            tool=["rmdangle", "rmarea"],
            threshold=[-1, min_tile_size],
            output=tiles,
            quiet=True,
            overwrite=True,
        )
    else:
        gscript.run_command("g.copy",
                            vect=[temp_vector_polygons4, tiles],
                            quiet=True,
                            overwrite=True)

    gscript.vector_history(tiles)
Exemplo n.º 54
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            "product": "National Elevation Dataset (NED)",
            "dataset": {
                "ned1sec": (1.0 / 3600, 30, 100),
                "ned13sec": (1.0 / 3600 / 3, 10, 30),
                "ned19sec": (1.0 / 3600 / 9, 3, 10),
            },
            "subset": {},
            "extent": ["1 x 1 degree", "15 x 15 minute"],
            "format": "IMG",
            "extension": "img",
            "zip": True,
            "srs": "wgs84",
            "srs_proj4": "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            "interpolation": "bilinear",
            "url_split": "/",
        },
        "nlcd": {
            "product": "National Land Cover Database (NLCD)",
            "dataset": {
                "National Land Cover Database (NLCD) - 2001":
                (1.0 / 3600, 30, 100),
                "National Land Cover Database (NLCD) - 2006":
                (1.0 / 3600, 30, 100),
                "National Land Cover Database (NLCD) - 2011":
                (1.0 / 3600, 30, 100),
            },
            "subset": {
                "Percent Developed Imperviousness",
                "Percent Tree Canopy",
                "Land Cover",
            },
            "extent": ["3 x 3 degree"],
            "format": "GeoTIFF",
            "extension": "tif",
            "zip": True,
            "srs": "wgs84",
            "srs_proj4": "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            "interpolation": "nearest",
            "url_split": "/",
        },
        "naip": {
            "product": "USDA National Agriculture Imagery Program (NAIP)",
            "dataset": {
                "Imagery - 1 meter (NAIP)": (1.0 / 3600 / 27, 1, 3)
            },
            "subset": {},
            "extent": [
                "3.75 x 3.75 minute",
            ],
            "format": "JPEG2000",
            "extension": "jp2",
            "zip": False,
            "srs": "wgs84",
            "srs_proj4": "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            "interpolation": "nearest",
            "url_split": "/",
        },
        "lidar": {
            "product": "Lidar Point Cloud (LPC)",
            "dataset": {
                "Lidar Point Cloud (LPC)": (1.0 / 3600 / 9, 3, 10)
            },
            "subset": {},
            "extent": [""],
            "format": "LAS,LAZ",
            "extension": "las,laz",
            "zip": True,
            "srs": "",
            "srs_proj4": "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            "interpolation": "nearest",
            "url_split": "/",
        },
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options["product"]

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string["product"]
    product_format = nav_string["format"]
    product_extensions = tuple(nav_string["extension"].split(","))
    product_is_zip = nav_string["zip"]
    product_srs = nav_string["srs"]
    product_proj4 = nav_string["srs_proj4"]
    product_interpolation = nav_string["interpolation"]
    product_url_split = nav_string["url_split"]
    product_extent = nav_string["extent"]
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == "ned":
        gui_dataset = options["ned_dataset"]
        ned_api_name = ""
        if options["ned_dataset"] == "ned1sec":
            ned_data_abbrv = "ned_1arc_"
            ned_api_name = "1 arc-second"
        if options["ned_dataset"] == "ned13sec":
            ned_data_abbrv = "ned_13arc_"
            ned_api_name = "1/3 arc-second"
        if options["ned_dataset"] == "ned19sec":
            ned_data_abbrv = "ned_19arc_"
            ned_api_name = "1/9 arc-second"
        product_tag = product + " " + ned_api_name

    if gui_product == "nlcd":
        gui_dataset = options["nlcd_dataset"]
        if options["nlcd_dataset"] == "nlcd2001":
            gui_dataset = "National Land Cover Database (NLCD) - 2001"
        if options["nlcd_dataset"] == "nlcd2006":
            gui_dataset = "National Land Cover Database (NLCD) - 2006"
        if options["nlcd_dataset"] == "nlcd2011":
            gui_dataset = "National Land Cover Database (NLCD) - 2011"

        if options["nlcd_subset"] == "landcover":
            gui_subset = "Land Cover"
        if options["nlcd_subset"] == "impervious":
            gui_subset = "Percent Developed Imperviousness"
        if options["nlcd_subset"] == "canopy":
            gui_subset = "Percent Tree Canopy"
        product_tag = gui_dataset

    if gui_product == "naip":
        gui_dataset = "Imagery - 1 meter (NAIP)"
        product_tag = nav_string["product"]

    has_pdal = gscript.find_program(pgm="v.in.pdal")
    if gui_product == "lidar":
        gui_dataset = "Lidar Point Cloud (LPC)"
        product_tag = nav_string["product"]
        if not has_pdal:
            gscript.warning(
                _("Module v.in.pdal is missing,"
                  " any downloaded data will not be processed."))
    # Assigning further parameters from GUI
    gui_output_layer = options["output_name"]
    gui_resampling_method = options["resampling_method"]
    gui_i_flag = flags["i"]
    gui_k_flag = flags["k"]
    work_dir = options["output_directory"]
    memory = options["memory"]
    nprocs = options["nprocs"]

    preserve_extracted_files = gui_k_flag
    use_existing_extracted_files = True
    preserve_imported_tiles = gui_k_flag
    use_existing_imported_tiles = True

    if not os.path.isdir(work_dir):
        gscript.fatal(
            _("Directory <{}> does not exist."
              " Please create it.").format(work_dir))

    # Returns current units
    try:
        proj = gscript.parse_command("g.proj", flags="g")
        if gscript.locn_is_latlong():
            product_resolution = nav_string["dataset"][gui_dataset][0]
        elif float(proj["meters"]) == 1:
            product_resolution = nav_string["dataset"][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string["dataset"][gui_dataset][2]
    except TypeError:
        product_resolution = False
    if gui_product == "lidar" and options["resolution"]:
        product_resolution = float(options["resolution"])

    if gui_resampling_method == "default":
        gui_resampling_method = nav_string["interpolation"]
        gscript.verbose(
            _("The default resampling method for product {product} is {res}").
            format(product=gui_product, res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    wgs84 = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs"
    min_coords = gscript.read_command(
        "m.proj",
        coordinates=(gregion["w"], gregion["s"]),
        proj_out=wgs84,
        separator="comma",
        flags="d",
    )
    max_coords = gscript.read_command(
        "m.proj",
        coordinates=(gregion["e"], gregion["n"]),
        proj_out=wgs84,
        separator="comma",
        flags="d",
    )
    min_list = min_coords.split(",")[:2]
    max_list = max_coords.split(",")[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = quote_plus(gui_prod_str)
    prod_format = quote_plus(product_format)
    prod_extent = quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://tnmaccess.nationalmap.gov/api/v1/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == "nlcd":
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try_again_messge = _(
        "Possibly, the query has timed out. Check network configuration and try again."
    )
    try:
        TNM_API_GET = urlopen(TNM_API_URL, timeout=12)
    except HTTPError as error:
        gscript.fatal(
            _("HTTP(S) error from USGS TNM API:"
              " {code}: {reason} ({instructions})").format(
                  reason=error.reason,
                  code=error.code,
                  instructions=try_again_messge))
    except (URLError, OSError, IOError) as error:
        # Catching also SSLError and potentially others which are
        # subclasses of IOError in Python 2 and of OSError in Python 3.
        gscript.fatal(
            _("Error accessing USGS TNM API: {error} ({instructions})").format(
                error=error, instructions=try_again_messge))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON["errors"]:
            TNM_API_error = return_JSON["errors"]
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)
        if gui_product == "lidar" and options["title_filter"]:
            return_JSON["items"] = [
                item for item in return_JSON["items"]
                if options["title_filter"] in item["title"]
            ]
            return_JSON["total"] = len(return_JSON["items"])

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON["total"])
    tiles_needed_count = 0
    # TODO: Make the tolerance configurable.
    # Some combinations produce >10 byte differences.
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON["items"]:
            TNM_file_title = f["title"]
            TNM_file_URL = str(f["downloadURL"])
            TNM_file_size = int(f["sizeInBytes"])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == "ned":
                local_file_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir,
                                              ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # If file exists, do not download,
            # but if incomplete (e.g. interupted download), redownload.
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size -
                       TNM_file_size) > size_diff_tolerance:
                    gscript.verbose(
                        _("Size of local file {filename} ({local_size}) differs"
                          " from a file size specified in the API ({api_size})"
                          " by {difference} bytes"
                          " which is more than tolerance ({tolerance})."
                          " It will be downloaded again.").format(
                              filename=local_file_path,
                              local_size=existing_local_file_size,
                              api_size=TNM_file_size,
                              difference=abs(existing_local_file_size -
                                             TNM_file_size),
                              tolerance=size_diff_tolerance,
                          ))
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(
            _("TNM API ERROR or Zero tiles available for given input parameters."
              ))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: fix this way of reporting and merge it with the one in use
    if exist_tile_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = "0"

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = "none"

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == "nlcd":
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = (
                        "NLCD {0} data unavailable for input parameters".
                        format(gui_subset))
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = "\n".join(data_info).format(
            size=total_size_str,
            count=file_download_count,
            srs=product_srs,
            tile=TNM_file_titles_info,
        )
    print(data_info)

    if gui_i_flag:
        gscript.info(
            _("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == "ned":
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()["Content-Length"])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
                gscript.percent(1, 1, 1)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except URLError:
            gscript.fatal(
                _("USGS download request has timed out. Network or formatting error."
                  ))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    # our pre-stats for extraction are broken, collecting stats during
    used_existing_extracted_tiles_num = 0
    removed_extracted_tiles_num = 0
    old_extracted_tiles_num = 0
    extracted_tiles_num = 0
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        files_to_process = len(local_zip_path_list)
        for i, z in enumerate(local_zip_path_list):
            # TODO: measure only for the files being unzipped
            gscript.percent(i, files_to_process, 10)
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.lower().endswith(product_extensions):
                            extracted_tile = os.path.join(work_dir, str(f))
                            remove_and_extract = True
                            if os.path.exists(extracted_tile):
                                if use_existing_extracted_files:
                                    # if the downloaded file is newer
                                    # than the extracted on, we extract
                                    if os.path.getmtime(
                                            extracted_tile) < os.path.getmtime(
                                                z):
                                        remove_and_extract = True
                                        old_extracted_tiles_num += 1
                                    else:
                                        remove_and_extract = False
                                        used_existing_extracted_tiles_num += 1
                                else:
                                    remove_and_extract = True
                                if remove_and_extract:
                                    removed_extracted_tiles_num += 1
                                    os.remove(extracted_tile)
                            if remove_and_extract:
                                extracted_tiles_num += 1
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    if not preserve_extracted_files:
                        cleanup_list.append(extracted_tile)
            except IOError as error:
                cleanup_list.append(extracted_tile)
                gscript.fatal(
                    _("Unable to locate or extract IMG file '{filename}'"
                      " from ZIP archive '{zipname}': {error}").format(
                          filename=extracted_tile, zipname=z, error=error))
        gscript.percent(1, 1, 1)
        # TODO: do this before the extraction begins
        gscript.verbose(
            _("Extracted {extracted} new tiles and"
              " used {used} existing tiles").format(
                  used=used_existing_extracted_tiles_num,
                  extracted=extracted_tiles_num))
        if old_extracted_tiles_num:
            gscript.verbose(
                _("Found {removed} existing tiles older"
                  " than the corresponding downloaded archive").format(
                      removed=old_extracted_tiles_num))
        if removed_extracted_tiles_num:
            gscript.verbose(
                _("Removed {removed} existing tiles").format(
                    removed=removed_extracted_tiles_num))

    if gui_product == "lidar" and not has_pdal:
        gscript.fatal(
            _("Module v.in.pdal is missing,"
              " cannot process downloaded data."))

    # operations for extracted or complete files available locally
    # We are looking only for the existing maps in the current mapset,
    # but theoretically we could be getting them from other mapsets
    # on search path or from the whole location. User may also want to
    # store the individual tiles in a separate mapset.
    # The big assumption here is naming of the maps (it is a smaller
    # for the files in a dedicated download directory).
    used_existing_imported_tiles_num = 0
    imported_tiles_num = 0
    mapset = get_current_mapset()
    files_to_import = len(local_tile_path_list)

    process_list = []
    process_id_list = []
    process_count = 0
    num_tiles = len(local_tile_path_list)

    with Manager() as manager:
        results = manager.dict()
        for i, t in enumerate(local_tile_path_list):
            # create variables for use in GRASS GIS import process
            LT_file_name = os.path.basename(t)
            LT_layer_name = os.path.splitext(LT_file_name)[0]
            # we are removing the files if requested even if we don't use them
            # do not remove by default with NAIP, there are no zip files
            if gui_product != "naip" and not preserve_extracted_files:
                cleanup_list.append(t)
            # TODO: unlike the files, we don't compare date with input
            if use_existing_imported_tiles and map_exists(
                    "raster", LT_layer_name, mapset):
                patch_names.append(LT_layer_name)
                used_existing_imported_tiles_num += 1
            else:
                in_info = _("Importing and reprojecting {name}"
                            " ({count} out of {total})...").format(
                                name=LT_file_name,
                                count=i + 1,
                                total=files_to_import)
                gscript.info(in_info)

                process_count += 1
                if gui_product != "lidar":
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i,
                                                      LT_layer_name),
                        target=run_file_import,
                        kwargs=dict(
                            identifier=i,
                            results=results,
                            input=t,
                            output=LT_layer_name,
                            resolution="value",
                            resolution_value=product_resolution,
                            extent="region",
                            resample=product_interpolation,
                            memory=memory,
                        ),
                    )
                else:
                    srs = options["input_srs"]
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i,
                                                      LT_layer_name),
                        target=run_lidar_import,
                        kwargs=dict(
                            identifier=i,
                            results=results,
                            input=t,
                            output=LT_layer_name,
                            input_srs=srs if srs else None,
                        ),
                    )
                process.start()
                process_list.append(process)
                process_id_list.append(i)

            # Wait for processes to finish when we reached the max number
            # of processes.
            if process_count == nprocs or i == num_tiles - 1:
                exitcodes = 0
                for process in process_list:
                    process.join()
                    exitcodes += process.exitcode
                if exitcodes != 0:
                    if nprocs > 1:
                        gscript.fatal(
                            _("Parallel import and reprojection failed."
                              " Try running with nprocs=1."))
                    else:
                        gscript.fatal(
                            _("Import and reprojection step failed."))
                for identifier in process_id_list:
                    if "errors" in results[identifier]:
                        gscript.warning(results[identifier]["errors"])
                    else:
                        patch_names.append(results[identifier]["output"])
                        imported_tiles_num += 1
                # Empty the process list
                process_list = []
                process_id_list = []
                process_count = 0
        # no process should be left now
        assert not process_list
        assert not process_id_list
        assert not process_count

    gscript.verbose(
        _("Imported {imported} new tiles and"
          " used {used} existing tiles").format(
              used=used_existing_imported_tiles_num,
              imported=imported_tiles_num))

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # v.surf.rst lidar params
    rst_params = dict(tension=25, smooth=0.1, npmin=100)

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if len(patch_names) > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command("g.region",
                                        res=product_resolution,
                                        flags="a")
                if gui_product == "naip":
                    for i in ("1", "2", "3", "4"):
                        patch_names_i = [
                            name + "." + i for name in patch_names
                        ]
                        output = gui_output_layer + "." + i
                        gscript.run_command("r.patch",
                                            input=patch_names_i,
                                            output=output)
                        gscript.raster_history(output)
                elif gui_product == "lidar":
                    gscript.run_command(
                        "v.patch",
                        flags="nzb",
                        input=patch_names,
                        output=gui_output_layer,
                    )
                    gscript.run_command("v.surf.rst",
                                        input=gui_output_layer,
                                        elevation=gui_output_layer,
                                        nprocs=nprocs,
                                        **rst_params)
                else:
                    gscript.run_command("r.patch",
                                        input=patch_names,
                                        output=gui_output_layer)
                    gscript.raster_history(gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added"
                            ).format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if not -k flag
                if not preserve_imported_tiles:
                    if gui_product == "naip":
                        for i in ("1", "2", "3", "4"):
                            patch_names_i = [
                                name + "." + i for name in patch_names
                            ]
                            gscript.run_command("g.remove",
                                                type="raster",
                                                name=patch_names_i,
                                                flags="f")
                    elif gui_product == "lidar":
                        gscript.run_command(
                            "g.remove",
                            type="vector",
                            name=patch_names + [gui_output_layer],
                            flags="f",
                        )
                    else:
                        gscript.run_command("g.remove",
                                            type="raster",
                                            name=patch_names,
                                            flags="f")
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
            temp_down_count = _(
                "{0} of {1} tiles successfully imported and patched").format(
                    completed_tiles_count, tiles_needed_count)
            gscript.info(temp_down_count)
        elif len(patch_names) == 1:
            if gui_product == "naip":
                for i in ("1", "2", "3", "4"):
                    gscript.run_command(
                        "g.rename",
                        raster=(patch_names[0] + "." + i,
                                gui_output_layer + "." + i),
                    )
            elif gui_product == "lidar":
                if product_resolution:
                    gscript.run_command("g.region",
                                        res=product_resolution,
                                        flags="a")
                gscript.run_command("v.surf.rst",
                                    input=patch_names[0],
                                    elevation=gui_output_layer,
                                    nprocs=nprocs,
                                    **rst_params)
                if not preserve_imported_tiles:
                    gscript.run_command("g.remove",
                                        type="vector",
                                        name=patch_names[0],
                                        flags="f")
            else:
                gscript.run_command("g.rename",
                                    raster=(patch_names[0], gui_output_layer))
            temp_down_count = _("Tile successfully imported")
            gscript.info(temp_down_count)
        else:
            gscript.fatal(
                _("No tiles imported successfully. Nothing to patch."))
    else:
        gscript.fatal(
            _("Error in getting or importing the data (see above). Please retry."
              ))

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = (
            "<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == "ned":
        gscript.run_command("r.colors",
                            map=gui_output_layer,
                            color="elevation")

    # composite NAIP
    if gui_product == "naip":
        gscript.use_temp_region()
        gscript.run_command("g.region", raster=gui_output_layer + ".1")
        gscript.run_command(
            "r.composite",
            red=gui_output_layer + ".1",
            green=gui_output_layer + ".2",
            blue=gui_output_layer + ".3",
            output=gui_output_layer,
        )
        gscript.raster_history(gui_output_layer)
        gscript.del_temp_region()
Exemplo n.º 55
0
def main():
    """
    FlowFill
    """
    # netCDF4
    try:
        from netCDF4 import Dataset
    except:
        g.message(flags='e',
                  message=('netCDF4 not detected. Install pip3 and ' +
                           'then type at the command prompt: ' +
                           '"pip3 install netCDF4".'))

    options, flags = gscript.parser()
    _input = options['input']
    _np = options['np']
    _threshold = options['threshold']
    _h_runoff = options['h_runoff']
    _h_runoff_raster = options['h_runoff_raster']
    _ties = options['ties']
    _ffpath = options['ffpath']
    _output = options['output']
    _water = options['water']
    """
    import os
    import numpy as np
    from netCDF4 import Dataset
    # GRASS
    from grass import script as gscript
    from grass.script import array as garray
    from grass.pygrass.modules.shortcuts import raster as r
    from grass.pygrass.modules.shortcuts import general as g

    # FOR TESTING:
    _input = 'DEM_MODFLOW'
    _np = 4
    _threshold = 0.001
    #_h_runoff = 1.
    _h_runoff = ''
    #_h_runoff_raster = ''
    _h_runoff_raster = 'DEM_MODFLOW'
    _ties = 'PREF'
    _ffpath = 'flowfill'
    _output = 'tmpout'
    _water = 'tmpout_water'
    """

    # Check for overwrite -- should be unnecessary thanks to GRASS parser
    _rasters = np.array(gscript.parse_command('g.list', type='raster').keys())
    if (_rasters == _output).any() or (_water == _output).any():
        if gscript.overwrite() is False:
            g.message(flags='e', message="output would overwrite " + _output)

    # Check for proper number of processors
    try:
        _np = int(_np)
    except:
        g.message(flags='e',
                  message="Number of processors must be an integer.")

    if _np < 3:
        g.message(flags='e', message="FlowFill requires 3 or more processors.")

    # Check for proper option set
    if _h_runoff is not '':  # ????? possible ?????
        if _h_runoff_raster is not '':
            g.message(flags='e',
                      message='Only one of "h_runoff" and ' +
                      '"h_runoff_raster" may be set')
    elif _h_runoff_raster is '':
        g.message(flags='e',
                  message='Either "h_runoff" or ' +
                  '"h_runoff_raster" must be set')

    if _output is '' and _water is '':
        g.message(flags='w', message='No output is set.')

    # Set up runoff options
    if _h_runoff_raster is not '':
        _runoff_bool = 'Y'
    else:
        _h_runoff = float(_h_runoff)
        _runoff_bool = 'N'

    # Get computational region
    n_columns = gscript.region()['cols']
    n_rows = gscript.region()['rows']

    # Output DEM as temporary file for FORTRAN
    temp_FlowFill_input_file = gscript.tempfile(create=False)
    dem = garray.array()
    dem.read(_input, null=-999999)
    dem_array = np.array(dem[:]).astype(np.float32)
    del dem
    newnc = Dataset(temp_FlowFill_input_file, "w", format="NETCDF4")
    newnc.createDimension('x', n_columns)
    newnc.createDimension('y', n_rows)
    newnc.createVariable('value', 'f4', ('y', 'x'))  # z
    newnc.variables['value'][:] = dem_array
    newnc.close()
    del newnc
    #r.out_gdal(input=_input, output=temp_DEM_input_file, format='netCDF',
    #           overwrite=True)

    # Output runoff raster as temporary file for FORTRAN
    if _h_runoff_raster is not '':
        temp_FlowFill_runoff_file = gscript.tempfile(create=False)
        rr = garray.array()
        rr.read(_h_runoff_raster, null=0.0)
        rr_array = np.array(rr[:]).astype(np.float32)
        del rr
        newnc = Dataset(temp_FlowFill_runoff_file, "w", format="NETCDF4")
        newnc.createDimension('x', n_columns)
        newnc.createDimension('y', n_rows)
        newnc.createVariable('value', 'f4', ('y', 'x'))  # z
        newnc.variables['value'][:] = rr_array
        newnc.close()
        # Get the mean value for the floating-point depressions correction
        _h_runoff = np.mean(rr_array[dem_array != -999999])
    else:
        _h_runoff_raster = 'NoRaster'  # A dummy value for the parser
        temp_FlowFill_runoff_file = ''

    # Run FlowFill
    temp_FlowFill_output_file = gscript.tempfile(create=False)
    mpirunstr = 'mpirun -np '+str(_np)+' '+_ffpath+' ' +\
              str(_h_runoff)+' '+temp_FlowFill_input_file+' ' +\
              str(n_columns)+' '+str(n_rows)+' ' +\
              str(_threshold)+' '+temp_FlowFill_output_file+' ' +\
              _runoff_bool+' '+temp_FlowFill_runoff_file+' '+_ties
    print('')
    print('Sending command to FlowFill:')
    print(mpirunstr)
    print('')

    _mpirun_error_flag = False

    popen = subprocess.Popen(mpirunstr,
                             stdout=subprocess.PIPE,
                             shell=True,
                             universal_newlines=True)
    for stdout_line in iter(popen.stdout.readline, ""):
        print(stdout_line),
        if 'mpirun was unable to find the specified executable file' in \
                                      stdout_line:
            _mpirun_error_flag = True
    popen.stdout.close()
    if _mpirun_error_flag:
        print('')
        g.message(
            flags='e',
            message='FlowFill executable not found.\n' +
            'If you have not installed FlowFill, please download it ' +
            'from https://github.com/KCallaghan/FlowFill, ' +
            'and follow the directions in the README to compile and ' +
            'install it on your system.\n' +
            'This should then work with the default "ffpath". ' +
            'Otherwise, you may have simply have typed in an incorrect ' +
            '"ffpath".')

    #_stdout = subprocess.Popen(mpirunstr, shell=True, stdout=subprocess.PIPE)
    #
    #if 'mpirun was unable to find the specified executable file' in \
    #                              ''.join(_stdout.stdout.readlines()):
    #else:
    #    g.message('FlowFill Executable Found.')
    #    print('')

    #subprocess.Popen(mpirunstr, shell=True).wait()
    #os.system(mpirunstr)
    #subprocess.Popen(mpirunstr, shell=True)

    # Import the output -- padded by two cells (remove these)
    outrast = np.fromfile(temp_FlowFill_output_file + '.dat', dtype=np.float32)
    outrast_water = np.fromfile(temp_FlowFill_output_file + '_water.dat',
                                dtype=np.float32)
    outrast = outrast.reshape(n_rows + 2, n_columns + 2)[:-2, 1:-1]
    outrast_water = outrast_water.reshape(n_rows + 2, n_columns + 2)[:-2, 1:-1]

    # Mask to return NAN to NAN in GRASS -- FIX SHIFT ISSUE WITH KERRY
    dem_array_mask = dem_array.copy()
    dem_array_mask[dem_array_mask == -999999] = np.nan
    dem_array_mask = dem_array_mask * 0 + 1
    outrast *= dem_array_mask
    outrast_water *= dem_array_mask

    # Save the output to GRASS GIS
    dem = garray.array()
    dem[:] = outrast
    dem.write(_output, overwrite=gscript.overwrite())
    dem[:] = outrast_water
    dem.write(_water, overwrite=gscript.overwrite())
    del dem
Exemplo n.º 56
0
def main():
    """
    Input for GSFLOW
    """

    reg = grass.region()

    options, flags = grass.parser()

    basin_mouth_E = options['E']
    basin_mouth_N = options['N']

    accum_thresh = options['threshold']

    # Create drainage direction, flow accumulation, and rivers

    # Manually create streams from accumulation.
    # The one funny step is the cleaning w/ snap, because r.thin allows cells that are
    # diagonal to each other to be next to each other -- creating boxes along the channel
    # that are not consistenet with stream topology
    grass.mapcalc('streams_unthinned = flowAccum > '+str(accum_thresh), overwrite=True)
    grass.run_command('r.null', map='streams_unthinned', setnull=0)
    grass.run_command('r.thin', input='streams_unthinned', output='streams', overwrite=True)
    grass.run_command('r.to.vect', input='streams', output='streams_raw', type='line', overwrite=True)
    grass.run_command('v.clean', input='streams_raw', output='streams', tool='snap', threshold=1.42*(grass.region()['nsres'] + grass.region()['ewres'])/2., flags='c', overwrite=True) # threshold is one cell
    grass.run_command('v.to.rast', input='streams', output='streams_unthinned', use='val', val=1, overwrite=True)
    grass.run_command('r.thin', input='streams_unthinned', output='streams', overwrite=True)
    grass.run_command('r.to.vect', input='streams', output='streams', type='line', overwrite=True)
    grass.run_command('v.to.rast', input='streams', output='streams', use='cat', overwrite=True)
    # Create drainage basins
    grass.run_command('r.stream.basins', direction='drainageDirection', stream_rast='streams', basins='basins', overwrite=True)
    # If there is any more need to work with nodes, I should check the code I wrote for Kelly Monteleone's paper -- this has river identification and extraction, including intersection points.


    # Vectorize drainage basins
    grass.run_command('r.to.vect', input='basins', output='basins', type='area', flags='v', overwrite=True)

    # Then remove all sub-basins and segments that have negative flow accumulation
    # (i.e. have contributions from outside the map)

    ###################################################################
    # Intermediate step: Remove all basins that have offmap flow
    # i.e., those containing cells with negative flow accumulation
    ###################################################################

    # Method 3 -- even easier
    grass.mapcalc("has_offmap_flow = (flowAccum < 0)", overwrite=True)
    grass.run_command('r.null', map='has_offmap_flow', setnull=0)
    grass.run_command('r.to.vect', input='has_offmap_flow', output='has_offmap_flow', type='point', overwrite=True)
    grass.run_command('r.to.vect', input='has_offmap_flow', output='has_offmap_flow', type='point', overwrite=True)
    grass.run_command('v.db.addcolumn', map='has_offmap_flow', columns='badbasin_cats integer')
    grass.run_command('v.what.vect', map='has_offmap_flow', column='badbasin_cats', query_map='basins', query_column='cat', dmax=60)
    colNames = np.array(grass.vector_db_select('has_offmap_flow', layer=1)['columns'])
    # offmap incoming flow points
    colValues = np.array(grass.vector_db_select('has_offmap_flow', layer=1)['values'].values())
    badcats = colValues[:,colNames == 'badbasin_cats'].squeeze()
    badcats = badcats[badcats != '']
    badcats = badcats.astype(int)
    badcats = list(set(list(badcats)))
    # basins for full cat list
    colNames = np.array(grass.vector_db_select('basins', layer=1)['columns'])
    colValues = np.array(grass.vector_db_select('basins', layer=1)['values'].values())
    allcats = colValues[:,colNames == 'cat'].astype(int).squeeze()
    allcats = list(set(list(allcats)))
    # xor to goodcats
    #goodcats = set(badcats).symmetric_difference(allcats)
    # but better in case somehow there are badcats that are not allcats to do NOT
    goodcats = list(set(allcats) - set(badcats))
    goodcats_str = ''
    for cat in goodcats:
      goodcats_str += str(cat) + ','
    goodcats_str = goodcats_str[:-1] # super inefficient but quick
    grass.run_command('g.rename', vect='basins,tmp', overwrite=True)
    grass.run_command('v.extract', input='tmp', output='basins', cats=goodcats_str)
    grass.run_command('g.rename', vect='streams,tmp', overwrite=True)
    grass.run_command('v.extract', input='tmp', output='streams', cats=goodcats_str)
    #grass.run_command('g.rename', vect='stream_nodes,tmp', overwrite=True)
    #grass.run_command('v.extract', input='tmp', output='stream_nodes', cats=goodcats_str)

    # Fix pixellated pieces -- formerly here due to one-pixel-basin issue
    reg = grass.region()
    grass.run_command('g.rename', vect='basins,basins_messy', overwrite=True)
    grass.run_command('v.clean', input='basins_messy', output='basins', tool='rmarea', threshold=reg['nsres']*reg['ewres'], overwrite=True)

    # Optional, but recommended becuase not all basins need connect:
    # choose a subset of the region in which to do the PRMS calculation
    grass.run_command( 'r.water.outlet', input='drainageDirection', output='studyBasin', coordinates=str(basin_mouth_E)+','+str(basin_mouth_N) , overwrite=True)
    # Vectorize
    grass.run_command( 'r.to.vect', input='studyBasin', output='studyBasin', type='area', overwrite=True)
    # If there are dangling areas (single-pixel?), just drop them. Not sure if this is the best way to do it
    # No check for two equal areas -- if we have this, there are more fundamental problems in defining 
    # a watershed in contiguous units

    #"""
    # ONLY IF MORE THAN ONE STUDY BASIN -- remove small areas
    grass.run_command( 'v.db.addcolumn', map='studyBasin', columns='area_m2 double precision' )
    grass.run_command( 'v.db.dropcolumn', map='studyBasin', columns='label' )
    grass.run_command( 'v.to.db', map='studyBasin', columns='area_m2', option='area', units='meters')
    drainageAreasRaw = sorted( grass.parse_command( 'v.db.select', map='studyBasin', flags='c').keys() ) # could update to grass.vector_db_select
    drainageAreasList = []
    for row in drainageAreasRaw:
      # cat, area
      drainageAreasList.append(row.split('|'))
    drainageAreasOnly = np.array(drainageAreasList).astype(float)
    catsOnly = drainageAreasOnly[:,0].astype(int)
    drainageAreasOnly = drainageAreasOnly[:,1]
    row_with_max_drainage_area = (drainageAreasOnly == np.max(drainageAreasOnly)).nonzero()[0][0]
    cat_with_max_drainage_area = catsOnly[row_with_max_drainage_area]
    grass.run_command('g.rename', vect='studyBasin,tmp', overwrite=True)
    grass.run_command('v.extract', input='tmp', output='studyBasin', cats=cat_with_max_drainage_area, overwrite=True)
    grass.run_command('g.remove', type='vector', name='tmp', flags='f')
    grass.run_command('v.to.rast', input='studyBasin', output='studyBasin', use='val', value=1, overwrite=True)
    #"""
    """
    # Remove small areas -- easier, though not as sure, as the method above
    grass.run_command('v.rename', vect='studyBasin,tmp', overwrite=True)
    grass.run_command('v.clean', input='tmp', output='studyBasin', tool='rmarea', threshold=1.01*(grass.region()['nsres'] * grass.region()['ewres']), flags='c', overwrite=True) # threshold is one cell
    """


    ###############
    # PLACEHOLDER #
    ###################################################################
    # To do in near future: limit to this basin
    ###################################################################

    # Next, get the order of basins the old-fashioned way: coordinates of endpoints of lines
    # Because I can't use GRASS to query multiple points
    #grass.run_command('v.extract', input='streams', output='streamSegments', type='line', overwrite=True)
    # Maybe I don't even need nodes! 9/4/16 -- nope, doesn't seem so.
    grass.run_command('g.copy', rast='streams,streamSegments')
    grass.run_command('v.db.addcolumn', map='streamSegments', columns='z double precision, flow_accum double precision, x1 double precision, y1 double precision, x2 double precision, y2 double precision')
    grass.run_command('v.to.db', map='streamSegments', option='start', columns='x1, y1')
    grass.run_command('v.to.db', map='streamSegments', option='end', columns='x2, y2')

    colNames = np.array(grass.vector_db_select('streamSegments')['columns'])
    colValues = np.array(grass.vector_db_select('streamSegments')['values'].values())
    cats = colValues[:,colNames == 'cat'].astype(int).squeeze()
    xy1 = colValues[:,(colNames == 'x1') + (colNames == 'y1')].astype(float)
    xy2 = colValues[:,(colNames == 'x2') + (colNames == 'y2')].astype(float)
    xy  = np.vstack((xy1, xy2))

    # xy1: UPSTREAM
    # xy2: DOWNSTREAM
    # (I checked.)
    # So now can use this information to find headwaters and mouths

    # Not sure that thsi is necessary
    nsegs_at_point_1 = []
    nsegs_at_point_2 = []
    for row in xy1:
      nsegs_at_point_1.append(np.sum( np.prod(xy == row, axis=1)))
    for row in xy2:
      nsegs_at_point_2.append(np.sum( np.prod(xy == row, axis=1)))
    nsegs_at_point_1 = np.array(nsegs_at_point_1)
    nsegs_at_point_2 = np.array(nsegs_at_point_2)


    # HRU's have same numbers as their enclosed segments
    # NOT TRUE IN GENERAL -- JUST FOR THIS CASE WITH SUB-BASINS -- WILL NEED TO FIX IN FUTURE



    #############
    # Now, let's copy/rename the sub-basins to HRU and the streamSegments to segment and give them attributes
    ###########################################################################################################

    # Attributes (in order given in manual)

    # HRU
    hru_columns = []
    # Self ID
    hru_columns.append('id integer') # nhru
    # Basic Physical Attributes (Geometry)
    hru_columns.append('hru_area double precision') # acres (!!!!)
    hru_columns.append('hru_aspect double precision') # Mean aspect [degrees]
    hru_columns.append('hru_elev double precision') # Mean elevation
    hru_columns.append('hru_lat double precision') # Latitude of centroid
    hru_columns.append('hru_slope double precision') # Mean slope [percent]
    # Basic Physical Attributes (Other)
    #hru_columns.append('hru_type integer') # 0=inactive; 1=land; 2=lake; 3=swale; almost all will be 1
    #hru_columns.append('elev_units integer') # 0=feet; 1=meters. 0=default. I think I will set this to 1 by default.
    # Measured input
    hru_columns.append('outlet_sta integer') # Index of streamflow station at basin outlet:
                                         #   station number if it has one, 0 if not
    #    Note that the below specify projections and note lat/lon; they really seem
    #    to work for any projected coordinates, with _x, _y, in meters, and _xlong, 
    #    _ylat, in feet (i.e. they are just northing and easting). The meters and feet
    #    are not just simple conversions, but actually are required for different
    #    modules in the code, and are hence redundant but intentional.
    hru_columns.append('hru_x double precision') # Easting [m]
    hru_columns.append('hru_xlong double precision') # Easting [feet]
    hru_columns.append('hru_y double precision') # Northing [m]
    hru_columns.append('hru_ylat double precision') # Northing [feet]
    # Streamflow and lake routing
    hru_columns.append('K_coef double precision') # Travel time of flood wave to next downstream segment;
                                                  #   this is the Muskingum storage coefficient
                                                  #   1.0 for reservoirs, diversions, and segments flowing
                                                  #   out of the basin
    hru_columns.append('x_coef double precision') # Amount of attenuation of flow wave;
                                                  #   this is the Muskingum routing weighting factor
                                                  #   range: 0.0--0.5; default 0.2
                                                  #   0 for all segments flowing out of the basin
    hru_columns.append('hru_segment integer') # ID of stream segment to which flow will be routed
                                              #   this is for non-cascade routing (flow goes directly
                                              #   from HRU to stream segment)
    hru_columns.append('obsin_segment integer') # Index of measured streamflow station that replaces
                                                #   inflow to a segment

    # Segments
    segment_columns = []
    # Self ID
    segment_columns.append('id integer') # nsegment
    # Streamflow and lake routing
    segment_columns.append('tosegment integer') # Index of downstream segment to which a segment
                                                #   flows (thus differentiating it from hru_segment,
                                                #   which is for HRU's, though segment and HRU ID's
                                                #   are the same when HRU's are sub-basins

    # PRODUCE THE DATA TABLES
    ##########################

    # Create strings
    hru_columns = ",".join(hru_columns)
    segment_columns = ",".join(segment_columns)

    #"""
    # Copy
    grass.run_command('g.copy', vect='basins,HRU', overwrite=True)
    grass.run_command('g.copy', vect='streamSegments,segment', overwrite=True)
    #"""

    # Rename / subset
    """
    # OR GO BACK TO HRU_messy
    grass.run_command('v.overlay', ainput='basins', binput='studyBasin', operator='and', output='HRU_messy', overwrite=True)
    grass.run_command('v.overlay', ainput='streamSegments', binput='studyBasin', operator='and', output='segment_messy', overwrite=True)
    # And clean as well
    grass.run_command('v.clean', input='HRU_messy', output='HRU', tool='rmarea', threshold=reg['nsres']*reg['ewres']*40, overwrite=True)
    grass.run_command('v.clean', input='segment_messy', output='segment', tool='rmdangle', threshold=reg['nsres']*2, overwrite=True)
    # And now that the streams and HRU's no longer have the same cat values, fix 
    # this.
    grass.run_command('v.db.droptable', map='HRU', flags='f')
    grass.run_command('v.db.droptable', map='segment', flags='f')
    #grass.run_command('v.category', input='HRU', option='del', cat='-1', out='tmp', overwrite=True)
    #grass.run_command('v.category', input='tmp', option='add', out='HRU' overwrite=True)
    grass.run_command('v.db.addtable', map='HRU')
    grass.run_command('v.db.addtable', map='segment')

    grass.run_comm


    v.clean HRU
    v.clean
    v
    v.what.vect 
    """

    #grass.run_command('v.clean', input='segment_messy', output='HRU', tool='rmarea', threshold=reg['nsres']*reg['ewres']*20, overwrite=True)


    # Add columns to tables
    grass.run_command('v.db.addcolumn', map='HRU', columns=hru_columns)
    grass.run_command('v.db.addcolumn', map='segment', columns=segment_columns)


    # Produce the data table entries
    ##################################

    """
    # ID numbers
    # There should be a way to do this all at once, but...
    for i in range(len(cats)):
      grass.run_command('v.db.update', map='HRU', column='id', value=nhru[i], where='cat='+str(cats[i]))
    nsegment = nhru.copy() # ONLY FOR THIS SPECIAL CASE -- will be different in general
    for i in range(len(cats)):
      grass.run_command('v.db.update', map='segment', column='id', value=nsegment[i], where='cat='+str(cats[i]))
    """

    nhru = np.arange(1, xy1.shape[0]+1)
    nhrut = []
    for i in range(len(nhru)):
      nhrut.append( (nhru[i], cats[i]) )
    # Access the HRU's 
    hru = VectorTopo('HRU')
    # Open the map with topology:
    hru.open('rw')
    # Create a cursor
    cur = hru.table.conn.cursor()
    # Use it to loop across the table
    cur.executemany("update HRU set id=? where cat=?", nhrut)
    # Commit changes to the table
    hru.table.conn.commit()
    # Close the table
    hru.close()

    # if you want to append to table
    # cur.executemany("update HRU(id) values(?)", nhrut) # "insert into" will add rows

    # Same for segments
    nsegment = nhru.copy() # ONLY FOR THIS SPECIAL CASE -- will be different in general
    nsegmentt = nhrut # ONLY FOR THIS SPECIAL CASE -- will be different in general

    # Somehow only works after I v.clean, not right after v.overlay
    segment = VectorTopo('segment')
    segment.open('rw')
    cur = segment.table.conn.cursor()
    cur.executemany("update segment set id=? where cat=?", nsegmentt)
    segment.table.conn.commit()
    segment.close()

    #hru_columns.append('hru_area double precision')
    grass.run_command('v.to.db', map='HRU', option='area', columns='hru_area', units='acres')

    # GET MEAN VALUES FOR THESE NEXT ONES, ACROSS THE BASIN

    # hru_columns.append('hru_aspect double precision') # Mean aspect [degrees]
    # hru_columns.append('hru_slope double precision') # Mean slope [percent]
    # Slope
    grass.run_command('r.slope.aspect', elevation='srtm', slope='tmp', aspect='aspect', format='percent', overwrite=True) # zscale=0.01 also works to make percent be decimal 0-1
    grass.mapcalc('slope = tmp / 100.', overwrite=True)
    grass.run_command('v.rast.stats', map='HRU', raster='slope', method='average', column_prefix='tmp', flags='c')
    grass.run_command('v.db.update', map='HRU', column='hru_slope', query_column='tmp_average')
    grass.run_command('v.db.dropcolumn', map='HRU', column='tmp_average')
    # Dealing with conversion from degrees (no good average) to something I can
    # average -- x- and y-vectors
    # Geographic coordinates, so sin=x, cos=y.... not that it matters so long 
    # as I am consistent in how I return to degrees
    grass.mapcalc('aspect_x = sin(aspect)', overwrite=True)
    grass.mapcalc('aspect_y = cos(aspect)', overwrite=True)
    #grass.run_command('v.db.addcolumn', map='HRU', columns='aspect_x_sum double precision, aspect_y_sum double precision, ncells_in_hru integer')
    grass.run_command('v.rast.stats', map='HRU', raster='aspect_x', method='sum', column_prefix='aspect_x', flags='c')
    grass.run_command('v.rast.stats', map='HRU', raster='aspect_y', method='sum', column_prefix='aspect_y', flags='c')
    # Not actually needed, but maybe good to know
    #grass.run_command('v.rast.stats', map='HRU', raster='aspect_y', method='number', column_prefix='tmp', flags='c')
    #grass.run_command('v.db.renamecolumn', map='HRU', column='tmp_number,ncells_in_hru')
    # NO TRIG FUNCTIONS IN SQLITE!
    #grass.run_command('v.db.update', map='HRU', column='hru_aspect', query_column='DEGREES(ATN2(aspect_y_sum, aspect_x_sum))') # Getting 0, why?
    hru = VectorTopo('HRU')
    hru.open('rw')
    cur = hru.table.conn.cursor()
    cur.execute("SELECT cat,aspect_x_sum,aspect_y_sum FROM %s" %hru.name)
    _arr = np.array(cur.fetchall())
    _cat = _arr[:,0]
    _aspect_x_sum = _arr[:,1]
    _aspect_y_sum = _arr[:,2]
    aspect_angle = np.arctan2(_aspect_y_sum, _aspect_x_sum) * 180./np.pi
    aspect_angle[aspect_angle < 0] += 360 # all positive
    aspect_angle_cat = np.vstack((aspect_angle, _cat)).transpose()
    cur.executemany("update HRU set hru_aspect=? where cat=?", aspect_angle_cat)
    hru.table.conn.commit()
    hru.close()

    # hru_columns.append('hru_elev double precision') # Mean elevation
    grass.run_command('v.rast.stats', map='HRU', raster='srtm', method='average', column='tmp', flags='c')
    grass.run_command('v.db.update', map='HRU', column='hru_elev', query_column='tmp_average')
    grass.run_command('v.db.dropcolumn', map='HRU', column='tmp_average')

    # get x,y of centroid -- but have areas not in database table, that do have
    # centroids, and having a hard time finding a good way to get rid of them!
    # They have duplicate category values!
    # Perhaps these are little dangles on the edges of the vectorization where
    # the raster value was the same but pinched out into 1-a few cells?
    # From looking at map, lots of extra centroids on area boundaries, and removing
    # small areas (though threshold hard to guess) gets rid of these

    """
    g.copy vect=HRU,HRUorig # HACK!!!
    v.clean in=HRUorig out=HRU tool=rmarea --o thresh=15000
    """

    #grass.run_command( 'g.rename', vect='HRU,HRU_too_many_centroids')
    #grass.run_command( 'v.clean', input='HRU_too_many_centroids', output='HRU', tool='rmdac')
    grass.run_command('v.db.addcolumn', map='HRU', columns='centroid_x double precision, centroid_y double precision')
    grass.run_command( 'v.to.db', map='HRU', type='centroid', columns='centroid_x, centroid_y', option='coor', units='meters')

    # hru_columns.append('hru_lat double precision') # Latitude of centroid
    colNames = np.array(grass.vector_db_select('HRU', layer=1)['columns'])
    colValues = np.array(grass.vector_db_select('HRU', layer=1)['values'].values())
    xy = colValues[:,(colNames=='centroid_x') + (colNames=='centroid_y')]
    np.savetxt('_xy.txt', xy, delimiter='|', fmt='%s')
    grass.run_command('m.proj', flags='od', input='_xy.txt', output='_lonlat.txt', overwrite=True)
    lonlat = np.genfromtxt('_lonlat.txt', delimiter='|',)[:,:2]
    lonlat_cat = np.concatenate((lonlat, np.expand_dims(_cat, 2)), axis=1)

    # why not just get lon too?
    grass.run_command('v.db.addcolumn', map='HRU', columns='hru_lon double precision')

    hru = VectorTopo('HRU')
    hru.open('rw')
    cur = hru.table.conn.cursor()
    cur.executemany("update HRU set hru_lon=?, hru_lat=? where cat=?", lonlat_cat)
    hru.table.conn.commit()
    hru.close()

    # Easting and Northing for other columns
    grass.run_command('v.db.update', map='HRU', column='hru_x', query_column='centroid_x')
    grass.run_command('v.db.update', map='HRU', column='hru_xlong', query_column='centroid_x*3.28084') # feet
    grass.run_command('v.db.update', map='HRU', column='hru_y', query_column='centroid_y')
    grass.run_command('v.db.update', map='HRU', column='hru_ylat', query_column='centroid_y*3.28084') # feet


    # Streamflow and lake routing
    # tosegment
    """
    # THIS IS THE NECESSARY PART
    # CHANGED (BELOW) TO RE-DEFINE NUMBERS IN SEQUENCE AS HRU'S INSTEAD OF USING
    # THE CAT VALUES
    # Get the first channels in the segment
    tosegment = np.zeros(len(cats)) # default to 0 if they do not flow to another segment
    # Loop over all segments
    #for i in range(len(cats)):
    # From outlet segment
    for i in range(len(xy2)):
      # to inlet segment
      inlets = np.prod(xy1 == xy2[i], axis=1)
      # Update inlet segments with ID of outlets
      tosegment[inlets.nonzero()] = cats[i]
    tosegment_cat = tosegment.copy()
    """

    tosegment_cats = np.zeros(len(cats)).astype(int) # default to 0 if they do not flow to another segment
    tosegment = np.zeros(len(cats)).astype(int) # default to 0 if they do not flow to another segment
    # From outlet segment
    for i in range(len(xy2)):
      # to outlet segment
      outlets = np.prod(xy2 == xy1[i], axis=1)
      # Update outlet segments with ID of inlets
      tosegment[outlets.nonzero()] = nhru[i]
      tosegment_cats[outlets.nonzero()] = cats[i]

    """
      # BACKWARDS!
      # to inlet segment
      inlets = np.prod(xy1 == xy2[i], axis=1)
      # Update inlet segments with ID of outlets
      tosegment_cats[inlets.nonzero()] = cats[i]
    """

    # Now, just update tosegment (segments) and hru_segment (hru's)
    # In this case, they are the same.
    nsegment = nhru.copy() # ONLY FOR THIS SPECIAL CASE -- will be different in general
    nsegmentt = nhrut # ONLY FOR THIS SPECIAL CASE -- will be different in general
    # Tuple for upload to SQL
    # 0 is the default value if it doesn't go into any other segment (i.e flows
    # off-map)
    tosegmentt = []
    tosegment_cats_t = []
    for i in range(len(nsegment)):
      tosegmentt.append( (tosegment[i], nsegment[i]) )
      tosegment_cats_t.append( (tosegment_cats[i], cats[i]) )
    # Once again, special case
    hru_segmentt = tosegmentt

    # Loop check!
    # Weak loop checker - will only detect direct ping-pong.
    loops = []
    tosegmenta = np.array(tosegmentt)
    for i in range(len(tosegmenta)):
      for j in range(len(tosegmenta)):
        if (tosegmenta[i] == tosegmenta[j][::-1]).all():
          loops.append(tosegmenta[i])

    segment = VectorTopo('segment')
    segment.open('rw')
    cur = segment.table.conn.cursor()
    cur.executemany("update segment set tosegment=? where id=?", tosegmentt)
    segment.table.conn.commit()
    segment.close()

    hru = VectorTopo('HRU')
    hru.open('rw')
    cur = hru.table.conn.cursor()
    cur.executemany("update HRU set hru_segment=? where id=?", hru_segmentt)
    hru.table.conn.commit()
    hru.close()


    #grass.run_command('g.rename', vect='HRU_all_2,HRU', overwrite=True)
    #grass.run_command('g.rename', vect='segment_all_2,segment', overwrite=True)

    # In study basin?
    grass.run_command('v.db.addcolumn', map='segment', columns='in_study_basin int')
    grass.run_command('v.db.addcolumn', map='HRU', columns='in_study_basin int')
    grass.run_command('v.what.vect', map='segment', column='in_study_basin', query_map='studyBasin', query_column='value')
    grass.run_command('v.what.vect', map='HRU', column='in_study_basin', query_map='segment', query_column='in_study_basin')

    # Save global segment+HRU
    grass.run_command('g.rename', vect='HRU,HRU_all')
    grass.run_command('g.rename', vect='segment,segment_all')

    # Output HRU -- will need to ensure that this is robust!
    grass.run_command('v.extract', input='HRU_all', output='HRU', where='in_study_basin=1', overwrite=True)
    grass.run_command('v.extract', input='segment_all', output='segment', where='in_study_basin=1', overwrite=True)


    colNames = np.array(grass.vector_db_select('segment')['columns'])
    colValues = np.array(grass.vector_db_select('segment')['values'].values())
    cats = colValues[:,colNames == 'cat'].astype(int).squeeze()
    xy1 = colValues[:,(colNames == 'x1') + (colNames == 'y1')].astype(float)
    xy2 = colValues[:,(colNames == 'x2') + (colNames == 'y2')].astype(float)
    xy  = np.vstack((xy1, xy2))

    # Redo nhru down here
    nhru = np.arange(1, xy1.shape[0]+1)
    nhrut = []
    for i in range(len(nhru)):
      nhrut.append( (nhru[i], cats[i]) )
      """
      n = 1
      if i != 1:
        nhrut.append( (n, cats[i]) )
        n += 1
      """
      
    hru = VectorTopo('HRU')
    hru.open('rw')
    cur = hru.table.conn.cursor()
    cur.executemany("update HRU set id=? where cat=?", nhrut)
    hru.table.conn.commit()
    hru.close()

    # if you want to append to table
    # cur.executemany("update HRU(id) values(?)", nhrut) # "insert into" will add rows

    # Same for segments
    nsegment = nhru.copy() # ONLY FOR THIS SPECIAL CASE -- will be different in general
    nsegmentt = nhrut # ONLY FOR THIS SPECIAL CASE -- will be different in general

    # Somehow only works after I v.clean, not right after v.overlay
    segment = VectorTopo('segment')
    segment.open('rw')
    cur = segment.table.conn.cursor()
    cur.executemany("update segment set id=? where cat=?", nsegmentt)
    segment.table.conn.commit()
    segment.close()


    tosegment_cats = np.zeros(len(cats)).astype(int) # default to 0 if they do not flow to another segment
    tosegment = np.zeros(len(cats)).astype(int) # default to 0 if they do not flow to another segment
    # From outlet segment
    for i in range(len(xy2)):
      # to outlet segment
      outlets = np.prod(xy2 == xy1[i], axis=1)
      # Update outlet segments with ID of inlets
      tosegment[outlets.nonzero()] = nhru[i]
      tosegment_cats[outlets.nonzero()] = cats[i]

    # Now, just update tosegment (segments) and hru_segment (hru's)
    # In this case, they are the same.
    nsegment = nhru.copy() # ONLY FOR THIS SPECIAL CASE -- will be different in general
    nsegmentt = nhrut # ONLY FOR THIS SPECIAL CASE -- will be different in general
    # Tuple for upload to SQL
    # 0 is the default value if it doesn't go into any other segment (i.e flows
    # off-map)
    tosegmentt = []
    tosegment_cats_t = []
    for i in range(len(nsegment)):
      tosegmentt.append( (tosegment[i], nsegment[i]) )
      tosegment_cats_t.append( (tosegment_cats[i], cats[i]) )
    # Once again, special case
    hru_segmentt = tosegmentt

    # Loop check!
    # Weak loop checker - will only detect direct ping-pong.
    loops = []
    tosegmenta = np.array(tosegmentt)
    for i in range(len(tosegmenta)):
      for j in range(len(tosegmenta)):
        if (tosegmenta[i] == tosegmenta[j][::-1]).all():
          loops.append(tosegmenta[i])


    segment = VectorTopo('segment')
    segment.open('rw')
    cur = segment.table.conn.cursor()
    cur.executemany("update segment set tosegment=? where id=?", tosegmentt)
    segment.table.conn.commit()
    segment.close()

    hru = VectorTopo('HRU')
    hru.open('rw')
    cur = hru.table.conn.cursor()
    cur.executemany("update HRU set hru_segment=? where id=?", hru_segmentt)
    hru.table.conn.commit()
    hru.close()

    # More old-fashioned way:
    os.system('v.db.select segment sep=comma > segment.csv')
    os.system('v.db.select HRU sep=comma > HRU.csv')
    # and then sort by id, manually
    # And then manually change the last segment's "tosegment" to 0.
    # Except in this case, it was 0!
    # Maybe I managed to do this automatically above... but tired and late, 
    # so will check later
    # but hoping I did something right by re-doing all of the above before
    # saving (and doing so inside this smaller basin)

    print ""
    print "PRMS PORTION COMPLETE."
    print ""



    ###########
    # MODFLOW #
    ###########

    print ""
    print "STARTING MODFLOW PORTION."
    print ""

    # Generate coarse box for MODFLOW (ADW, 4 September, 2016)

    grass.run_command('g.region', rast='srtm')
    grass.run_command('g.region', n=7350000, s=7200000, w=170000, e=260000)
    reg = grass.region()
    MODFLOWres = 2000.
    grass.run_command('v.to.rast', input='HRU', output='allHRUs', use='val', val=1.0, overwrite=True)
    grass.run_command('r.null', map='allHRUs', null='0')
    grass.run_command('r.colors', map='allHRUs', color='grey', flags='n')
    grass.run_command('g.region', res=MODFLOWres)
    grass.run_command('r.resamp.stats', method='average', input='allHRUs', output='fraction_of_HRU_in_MODFLOW_cell', overwrite=True)
    grass.run_command('r.colors', map='fraction_of_HRU_in_MODFLOW_cell', color='grey', flags='n')


    print ""
    print "MODFLOW PORTION COMPLETE."
    print ""
Exemplo n.º 57
0
def main():
    res = options['res']
    poly1 = options['map']
    if "@" in poly1:
        poly1 = poly1.split("@")[0]
    poly2 = options['query_map']
    qcol = options['query_column']
    if not res:
        cur_region = grass.region()
        res = cur_region['nsres']
    
    grass.run_command('g.region', res = res, quiet = True)
    grass.run_command('v.to.rast', type_ = 'area',
                      input_ = poly2, output = 'raster_tmp',
                      use = 'attr', attribute_column = 'cat',
                      label_column = qcol,
                      quiet = True)

    p = grass.pipe_command('r.category', map = 'raster_tmp',
                           separator = '|', quiet = True)
    cats = []
    labels = []
    for line in p.stdout:
        cats.append(line.rstrip('\r\n').split('|')[0])
        labels.append(line.rstrip('\r\n').split('|')[1])
    p.wait()

    query_dict = dict(zip(cats,labels))

    grass.run_command('v.extract', input_ = poly1,
                      output = 'vector_tmp1',
                      type_ = 'centroid',
                      overwrite = True, quiet = True)
    grass.run_command('v.category', input_ = poly1,
                      output = 'vector_tmp2',
                      option = 'add', flags = 't',
                      type_ = 'boundary',
                      overwrite = True)
    # grass.run_command('v.extract', input_ = 'vector_tmp2',
    #                   output = 'vector_tmp3',
    #                   type_ = 'boundary',
    #                   overwrite = True)
    # grass.run_command('v.edit', map_ = 'vector_tmp3',
    #                   tool = 'delete',
    #                   type_ = 'centroid',
    #                   ids = 0-99999999,
    #                   overwrite = True)
    # grass.run_command('v.category', input_ = 'vector_tmp3',
    #                   output = 'vector_tmp4',
    #                   option = 'del',
    #                   type_ = 'boundary',
    #                   overwrite = True)    
    grass.run_command('v.db.addcolumn', map_ = 'vector_tmp1',
                      column = 'rast_cat int', quiet = True)
    grass.run_command('v.db.addcolumn', map_ = 'vector_tmp1',
                      column = qcol, quiet = True)
    grass.run_command('v.what.rast', map_ = 'vector_tmp1',
                      raster = 'raster_tmp',
                      column = 'rast_cat',
                      type_ = 'centroid',
                      overwrite = True, quiet = True)    

    for key,value in query_dict.items():
        grass.run_command('v.db.update', map_ = 'vector_tmp1',
                          col = qcol, value = value,
                          where = "rast_cat = %s" % key,
                          quiet = True)

    grass.run_command('v.db.dropcolumn', map_ = 'vector_tmp1',
                      column = 'rast_cat', quiet = True)

    grass.run_command('v.edit', map_ = 'vector_tmp1',
                      tool = 'copy', bgmap = 'vector_tmp2',
                      type_ = 'boundary', cats = 0-99999999)

    sys.exit(0)
    
    grass.run_command('g.rename', vector = ('vector_tmp1', poly1),
                      overwrite = True, quiet = True)
Exemplo n.º 58
0
def main():
    global temp_ng, temp_ncin, temp_ncout

    # we discard stderrs when not debugging
    # ideally stderrs should be printed when an exception was raised
    # this would be done easily with StringIO
    # but it doesn't work with subprocess
    if not grass.debug_level():
        nuldev = open(os.devnull, 'w')
    else:
        nuldev = sys.stderr

    # Initalise temporary verctor map names   
    temp_ng = "v_lidar_mcc_tmp_ng_" + str(os.getpid())
    temp_ncin = "v_lidar_mcc_tmp_ncin_" + str(os.getpid())
    temp_ncout = "v_lidar_mcc_tmp_ncout_" + str(os.getpid())

    input = options['input']
    g_output = options['ground']
    ng_output = options['nonground']

    # does map exist?
    if not grass.find_file(input, element = 'vector')['file']:
        grass.fatal(_("Vector map <%s> not found") % input)

    # Count points in input map
    n_input = grass.vector_info(input)['points']
    
    # does map contain points ?
    if not ( n_input > 0 ):
        grass.fatal(_("Vector map <%s> does not contain points") % input)

    flag_n=flags['n']
    
    ### Scale domain (l)
    # Evans & Hudak 2007 used scale domains 1 to 3
    l = int(1)
    l_stop = int(options['nl'])
    if ( l_stop < 1 ):
        grass.fatal("The minimum number of scale domains is 1.")

    ### Curvature tolerance threshold (t)
    # Evans & Hudak 2007 used a t-value of 0.3
    t = float(options['t'])
    ###Increase of curvature tolerance threshold for each
    ti = t / 3.0
    
    ### Convergence threshold (j)
    # Evans & Hudak 2007 used a convergence threshold of 0.3
    j = float(options['j'])
    if ( j <= 0 ):
        grass.fatal("The convergence threshold has to be > 0.")
    
    ### Tension parameter (f)
    # Evans & Hudak 2007 used a tension parameter 1.5
    f = float(options['f'])
    if ( f <= 0 ):
        grass.fatal("The tension parameter has to be > 0.")
    
    ### Spline steps parameter (s)
    # Evans & Hudak 2007 used the 12 nearest neighbors
    # (used spline steps $res * 5 before)
    s = int(options['s'])
    if ( s <= 0 ):
        grass.fatal("The spline step parameter has to be > 0.")
    
    ###Read desired resolution from region
    #Evans & Hudak 2007 used a desired resolution (delta) of 1.5
    gregion = grass.region()
    x_res_fin=gregion['ewres']
    y_res_fin=gregion['nsres']

    # Defineresolution steps in iteration 
    n_res_steps = ( l_stop + 1 ) / 2
        
    # Pass ame of input map to v.outlier
    nc_points = input

    # controls first creation of the output map before patching
    ng_output_exists = False
    # append and do not build topology
    vpatch_flags = 'ab'

    # 7.x requires topology to see z coordinate
    # 7.1 v.patch has flags to use z even without topology
    # see #2433 on Trac and r66822 in Subversion
    build_before_patch = True
    unused, gver_minor, unused = grass.version()['version'].split('.')
    if int(gver_minor) >= 1:
        build_before_patch = False
        # do not expect topology and expect z
        vpatch_flags += 'nz'

    # Loop through scale domaines
    while ( l <= l_stop ) :
        i = 1
        convergence = 100
        if (l < ( ( l_stop + 1 ) / 2 ) ) :
            xres = x_res_fin / ( n_res_steps - ( l - 1 ) )
            yres = y_res_fin / ( n_res_steps - ( l - 1 ) )
        elif ( l == ( ( l_stop + 1 ) / 2 ) ) :
             xres = x_res_fin
             yres = y_res_fin
        else :
            xres = x_res_fin * ( ( l + 1 ) - n_res_steps )
            yres = y_res_fin * ( ( l + 1 ) - n_res_steps )
        
        grass.use_temp_region()
        grass.run_command("g.region", s=gregion['s'], w=gregion['w'], nsres=yres, ewres=xres, flags="a")
        xs_s = xres * s
        ys_s = yres * s
        grass.message("Processing scale domain " + str(l) + "...")
        # Repeat application of v.outlier until convergence level is reached
        while ( convergence > j ) :
            grass.verbose("Number of input points in iteration " + str(i) + ": " + str(n_input) )
            # Run v.outlier
            if flag_n == False :
                grass.run_command('v.outlier',
                    input=nc_points, output=temp_ncout, outlier=temp_ng,
                    ew_step=xs_s, ns_step=ys_s, lambda_=f, threshold=t,
                    filter='positive',
                    overwrite=True, quiet=True, stderr=nuldev)
            else :
                grass.run_command('v.outlier',
                    input=nc_points, output=temp_ncout, outlier=temp_ng,
                    ew_step=xs_s, ns_step=ys_s, lambda_=f, threshold=t,
                    filter='negative',
                    overwrite=True, quiet=True, stderr=nuldev)
            
            # Get information about results for calculating convergence level
            ng=grass.vector_info(temp_ng)['points']
            nc = n_input - ng
            n_input = nc
            grass.run_command('g.remove', flags='f', type='vector', name= temp_ncin, quiet = True, stderr = nuldev)
            grass.run_command("g.rename", vector = temp_ncout + "," + temp_ncin, quiet = True, stderr = nuldev )
            nc_points = temp_ncin        
            # Give information on process status
            grass.verbose("Unclassified points after iteration " + str(i) + ": " + str(nc) )
            grass.verbose("Points classified as non ground after iteration " + str(i) + ": " + str(ng) )
            # Set convergence level
            if ( nc > 0 ) :
                convergence = float( float(ng) / float(nc) )
                if build_before_patch:
                    grass.run_command('v.build', map=temp_ng, stderr=nuldev)
                # Patch non-ground points to non-ground output map
                if ng_output_exists:
                    grass.run_command('v.patch', input=temp_ng,
                                      output=ng_output, flags=vpatch_flags,
                                      overwrite=True, quiet=True, stderr=nuldev)
                else:
                    grass.run_command('g.copy', vector=(temp_ng, ng_output), stderr=nuldev)
                    ng_output_exists = True
            else :
                convergence = 0
            # Give information on convergence level
            grass.verbose("Convergence level after run " + str(i) + " in scale domain " + str(l) + ": " + str( round( convergence, 3 ) ) )
            # Increase iterator
            i = i + 1
        # Adjust curvature tolerance and reset scale domain
        t = t + ti 
        l = l + 1
        # Delete temporary region
        grass.del_temp_region()
    
    # Rename temporary map of points whichhave not been classified as non-ground to output vector map containing ground points
    grass.run_command("g.rename", vector = nc_points + "," + g_output, quiet = True, stderr = nuldev )
Exemplo n.º 59
0
    def SetupProfile(self):
        """!Create coordinate string for profiling. Create segment list for
           transect segment markers.
        """

        #
        # create list of coordinate points for r.profile
        #                
        dist = 0
        cumdist = 0
        self.coordstr = ''
        lasteast = lastnorth = None
        
        region = grass.region()
        insideRegion = True
        if len(self.mapwin.polycoords) > 0:
            for point in self.mapwin.polycoords:
                if not (region['w'] <= point[0] <= region['e'] and region['s'] <= point[1] <= region['n']):
                    insideRegion = False
                # build string of coordinate points for r.profile
                if self.coordstr == '':
                    self.coordstr = '%d,%d' % (point[0], point[1])
                else:
                    self.coordstr = '%s,%d,%d' % (self.coordstr, point[0], point[1])

        if not insideRegion:
            GWarning(message = _("Not all points of profile lie inside computational region."),
                     parent = self)

        if len(self.rasterList) == 0:
            return

        # title of window
        self.ptitle = _('Profile of')

        #
        # create list of coordinates for transect segment markers
        #
        if len(self.mapwin.polycoords) > 0:
            self.seglist = []
            for point in self.mapwin.polycoords:
                # get value of raster cell at coordinate point
                ret = RunCommand('r.what',
                                 parent = self,
                                 read = True,
                                 input = self.rasterList[0],
                                 east_north = '%d,%d' % (point[0],point[1]))
                
                val = ret.splitlines()[0].split('|')[3]
                if val == None or val == '*': continue
                val = float(val)
                
                # calculate distance between coordinate points
                if lasteast and lastnorth:
                    dist = math.sqrt(math.pow((lasteast-point[0]),2) + math.pow((lastnorth-point[1]),2))
                cumdist += dist
                
                #store total transect length
                self.transect_length = cumdist

                # build a list of distance,value pairs for each segment of transect
                self.seglist.append((cumdist,val))
                lasteast = point[0]
                lastnorth = point[1]

            # delete extra first segment point
            try:
                self.seglist.pop(0)
            except:
                pass

        #
        # create datalist of dist/value pairs and y labels for each raster map
        #    
        self.ylabel = ''
        i = 0
        
        for r in self.raster.iterkeys():
            self.raster[r]['datalist'] = []
            datalist = self.CreateDatalist(r, self.coordstr)
            if len(datalist) > 0:   
                self.raster[r]['datalist'] = datalist

                # update ylabel to match units if they exist           
                if self.raster[r]['units'] != '':
                    self.ylabel += '%s (%d),' % (r['units'], i)
                i += 1

                # update title
                self.ptitle += ' %s ,' % r.split('@')[0]

        self.ptitle = self.ptitle.rstrip(',')
            
        if self.ylabel == '':
            self.ylabel = _('Raster values')
        else:
            self.ylabel = self.ylabel.rstrip(',')
Exemplo n.º 60
0
def main():
    dem = options['demraster']
    #SDR = options['output']
    weightmap = options['weightmap']
    #get outlet point
    outlet=options['outlets']
    outlet = outlet.split(',')


    #starting WORKING with temporay region
    grass.use_temp_region()

    #get region in order to estimate the threshold as 1/1000 of total cells
    grass.run_command('g.region',raster=dem)
    #region setting
    gregion=grass.region()
    cell_s=gregion['nsres']
    cell_s=float(cell_s)

    threshold=float(gregion['cells'])/3000
	#stream and drainage determination
    grass.run_command('r.watershed', elevation=dem, threshold=threshold, stream='raster_streams', drainage='drainage',overwrite=True,flags='s')

	#the radius is little more than the current resolution
    radius=gregion['nsres']*1.4

    #watershed delineation
#    outlet = outlet.split('|')[1:-1]
    print ', '.join(outlet)
    grass.run_command('r.circle', output='circle', coordinates=','.join(outlet), max=radius,overwrite=True)
    #get the distances and take the shortest distance
    distances=grass.read_command('r.distance', map='circle,raster_streams')
    list_dist=distances.split('\n')
    list_dist.remove('')
    list_tuple=[]
    for distance in list_dist:
        dist=distance.split(':')
        my_tupla=dist[0],dist[1],float(dist[2]),dist[3],dist[4],dist[5],dist[6]
        list_tuple.append(my_tupla)
    tuple_orderedByDistance=sorted(list_tuple, key=lambda distanza: distanza[2])
    del(distances,list_tuple,list_dist)
    #calculate the basin and read its statistics
    outlet=tuple_orderedByDistance[0][-2:]
    xoutlet=float(outlet[0])
    youtlet=float(outlet[1])
    grass.run_command('r.water.outlet',input='drainage',output='basin',coordinates=str(xoutlet)+','+str(youtlet) , overwrite=True)
    statistics=grass.read_command('r.univar',map=dem, zones='basin')
    main_stat=statistics.splitlines()[-9:]
    #order the stream network
    grass.run_command('r.mask',raster='basin')
    grass.run_command('r.stream.order',stream_rast='raster_streams', direction='drainage', elevation=dem,horton='horton',overwrite=True)
    stream_stat=grass.read_command('r.stream.stats', stream_rast='horton', direction='drainage', elevation=dem,flags='o')
    network_statistics=stream_stat.split('\n')
    network_statistics.remove('')
    #get the max order
    network_statistics[-1].split()
    total_length=float(network_statistics[-1].split(',')[2])
    area_basin=float(network_statistics[-1].split(',')[3])
    average_gradient_reach=float(network_statistics[-1].split(',')[5])


    area_basin_Ha=area_basin*100
    mean_elev=float(main_stat[3].split(':')[-1])
    min_elev=float(main_stat[0].split(':')[-1])
    max_elev=float(main_stat[1].split(':')[-1])
    deltaH=max_elev-min_elev
    average_slope=float(network_statistics[-1].split(',')[4])
    grass.run_command('r.mask',flags='r')

    print 'pendenza media:'
    print average_slope
    print 'gradiente medio'
    print average_gradient_reach
    print 'area bacino:'
    print area_basin

    print 'SDR Vanoni 2006:'
    vanoni = 0.4724 * area_basin**(-0.125)
    print vanoni
    print 'SDR Boyce (1975)'
    boyce = 0.3750 *area_basin**(-0.2382)
    print boyce
    print 'SDR USDA 1972'
    USDA72 = 0.5656 *area_basin**(-0.11)
    print USDA72
    print 'Williams and Berndt [43]'
    Williams_Berndt = 0.627 * average_gradient_reach**0.403
    print Williams_Berndt



    # region named by it, it is possible to use del_temp_region
    grass.del_temp_region()

    #cleaning PART
#    grass.run_command('g.remove',flags='f', type='raster', name='raster_streams')
#	grass.run_command('g.remove',type='vector',pattern='main_stream*',flags='f')

    sys.exit()


    # r.slope.aspect
    # elevation = dem_tinitaly_rocchetta @ SDR
    # slope = slope
    rasterTemp = []
    vectTemp = []
    grass.run_command('r.slope.aspect', elevation=dem, slope="slope1", overwrite=True)
    rasterTemp.append('slope1')

    grass.run_command('r.watershed', flags='s', elevation=dem, accumulation='accD8', drainage='drainD8', overwrite=True)
    rasterTemp.append('accD8')
    rasterTemp.append('drainD8')

    grass.run_command('r.slope.aspect',elevation = dem,slope = 'slope',format ='percent',overwrite=True)
    rasterTemp.append('slope')

    # tif_fdir8 coincide con drainD8

    # read drainage direction map
    tif_fdir8_ar = garray.array()
    tif_fdir8_ar.read('drainD8')
    # converto il float
    tif_fdir8_ar = tif_fdir8_ar.astype(numpy.float)  # otherwise overflow in future operations
    # r.watershead: Negative numbers indicate that those cells possibly have surface runoff from outside of the current geographic region.
    tif_fdir8_ar[(tif_fdir8_ar <= 0)] = 0
    ndv = numpy.min(tif_fdir8_ar)
    tif_fdir8_ar[tif_fdir8_ar == ndv] = numpy.NaN

    # create constant array to trasform into raster
    const_ar = tif_fdir8_ar * 0 + cell_s

    ### zero matrix bigger than F_dir8, to avoid border indexing problems
    # sorrounding tif_fdir8_ar with one width zeros cells
    Fd8 = numpy.zeros(shape=((tif_fdir8_ar.shape[0]) + 1, (tif_fdir8_ar.shape[1]) + 1), dtype=numpy.float32)
    # popolo la matrice
    Fd8[1:Fd8.shape[0], 1:Fd8.shape[1]] = Fd8[1:Fd8.shape[0], 1:Fd8.shape[1]] + tif_fdir8_ar
    # adding bottom row and right y axis with zeros
    Fdir8 = numpy.zeros(shape=((Fd8.shape[0]) + 1, (Fd8.shape[1]) + 1), dtype=numpy.float32)
    Fdir8[:Fdir8.shape[0] - 1, :Fdir8.shape[1] - 1] = Fd8
    ##------------
    # read weight map an slope
    tif_wgt_ar = garray.array()
    #TODO controllare la mappa weight che va presa da input
    tif_wgt_ar.read('weight')
    tif_slope = garray.array()
    tif_slope.read('slope')

    tif_slope=tif_slope/100. #converting percentage from r.slope.aspect to value in range 0 - 1

    # imposing upper and lower limits to slope, no data here are -1
    tif_slope[(tif_slope >= 0) & (tif_slope < 0.005)] = 0.005
    tif_slope[(tif_slope > 1)] = 1
    tif_slope[(tif_slope < 0)] = -1

    #imposing a value bigger than zero in weight map
    tif_wgt_ar[tif_wgt_ar==0]=1e-10

    Ws_1 = 1 / (tif_wgt_ar * tif_slope)
    # converto il float
    Ws_1 = Ws_1.astype(numpy.float)  # otherwise overflow in future operations
    # r.watershead: Negative numbers indicate that those cells possibly have surface runoff from outside of the current geographic region.
    # tif_fdir8_ar[(tif_fdir8_ar <= 0)] = 0
    ndv = numpy.min(Ws_1)
    Ws_1[Ws_1 == ndv] = numpy.NaN
    #
    # zero matrix bigger than weight, to avoid border indexing problems, and have same indexing as Fdir8
    Wg = numpy.zeros(shape=((tif_wgt_ar.shape[0]) + 1, (tif_wgt_ar.shape[1]) + 1), dtype=numpy.float32)
    # TODO da sostituire la variabile con Ws_1 ovvero il denom di Ddn
    Wg[1:Wg.shape[0], 1:Wg.shape[1]] = Wg[1:Fd8.shape[0],
                                       1:Wg.shape[1]] + Ws_1  # the weigth to weigth tha flow length
    # adding bottom row and right y axis with zeros
    Wgt = numpy.zeros(shape=((Wg.shape[0]) + 1, (Wg.shape[1]) + 1), dtype=numpy.float32)
    Wgt[:Wgt.shape[0] - 1, :Wgt.shape[1] - 1] = Wg
    #
    start = time.clock()  # for computational time
    # Creating a bigger matrix as large as weight(and all the matrices) to store the weighted flow length values
    W_Fl = numpy.zeros(shape=((Wgt.shape[0]), (Wgt.shape[1])), dtype=numpy.float32)
    W_Fl = W_Fl - 1  # to give -1 to NoData after the while loop calculation
    #
    # Let's go for the search and algo-rhytm for the weighted-Flow-Length
    ND = numpy.where(numpy.isnan(Fdir8) == True)  # fast coordinates all the NoData values, starting from them to go forward and compute flow length
    #
    Y = ND[0]  # rows, NoData indexes
    X = ND[1]  # columns, NoData indexes pay attention not to invert values !!!!!!!!!!!!!!
    #
    # initializing lists for outlet and moving cell coordinates, in function of their position
    YC1 = []
    YC2 = []
    YC3 = []
    YC4 = []
    YC5 = []
    YC6 = []
    YC7 = []
    YC8 = []
    XC1 = []
    XC2 = []
    XC3 = []
    XC4 = []
    XC5 = []
    XC6 = []
    XC7 = []
    XC8 = []
    #
    #   Flow Directions r.watershead
    #   4   3   2
    #   5   -   1
    #   6   7   8
    #
    #   Draining in Direction Matrix
    #   8   7   6
    #   1   -   5
    #   2   3   4
    #
    i1 = Fdir8[Y, X - 1]  # Searching for NoData with cells draining into them, 8 directions
    D1 = numpy.where(i1 == 1)  # l
    YC1.extend(Y[D1])  # coordinates satisfacting the conditions
    XC1.extend(X[D1])
    W_Fl[YC1, XC1] = 0  # initialize flow length at cells draining to NoData
    #
    i2 = Fdir8[Y + 1, X - 1]  # Searching for NoData with cells draining into them, 8 directions
    D2 = numpy.where(i2 == 2)  # lrad2
    YC2.extend(Y[D2])  # coordinates satisfacting the conditions
    XC2.extend(X[D2])
    W_Fl[YC2, XC2] = 0  # initialize flow length at cells draining to NoData
    #
    i3 = Fdir8[Y + 1, X]  # Searching for NoData with cells draining into them, 8 directions
    D3 = numpy.where(i3 == 3)  # l
    YC3.extend(Y[D3])  # coordinates satisfacting the conditions
    XC3.extend(X[D3])
    W_Fl[YC3, XC3] = 0  # initialize flow length at cells draining to NoData
    #
    i4 = Fdir8[Y + 1, X + 1]  # Searching for NoData with cells draining into them, 8 directions
    D4 = numpy.where(i4 == 4)  # lrad2
    YC4.extend(Y[D4])  # coordinates satisfacting the conditions
    XC4.extend(X[D4])
    W_Fl[YC4, XC4] = 0  # initialize flow length at cells draining to NoData
    #
    i5 = Fdir8[Y, X + 1]  # Searching for NoData with cells draining into them, 8 directions
    D5 = numpy.where(i5 == 5)  # l
    YC5.extend(Y[D5])  # coordinates satisfacting the conditions
    XC5.extend(X[D5])
    W_Fl[YC5, XC5] = 0  # initialize flow length at cells draining to NoData
    #
    i6 = Fdir8[Y - 1, X + 1]  # Searching for NoData with cells draining into them, 8 directions
    D6 = numpy.where(i6 == 6)  # lrad2
    YC6.extend(Y[D6])  # coordinates satisfacting the conditions
    XC6.extend(X[D6])
    W_Fl[YC6, XC6] = 0  # initialize flow length at cells draining to NoData
    #
    i7 = Fdir8[Y - 1, X]  # Searching for NoData with cells draining into them, 8 directions
    D7 = numpy.where(i7 == 7)  # l
    YC7.extend(Y[D7])  # coordinates satisfacting the conditions
    XC7.extend(X[D7])
    W_Fl[YC7, XC7] = 0  # initialize flow length at cells draining to NoData
    #
    i8 = Fdir8[Y - 1, X - 1]  # Searching for NoData with cells draining into them, 8 directions
    D8 = numpy.where(i8 == 8)  # lrad2
    YC8.extend(Y[D8])  # coordinates satisfacting the conditions
    XC8.extend(X[D8])
    W_Fl[YC8, XC8] = 0  # initialize flow length at cells draining to NoData
    #
    #start =time.clock()#da cancellare poi.....!!!!!! Solo per check
    count = 1  # "0" passage already done during the previous step
    while len(YC1) or len(YC2) or len(YC3) or len(YC4) or len(YC5) or len(YC6) or len(YC7) or len(YC8) > 0:
        # Converting into array to be able to do operations
        YYC1=numpy.asarray(YC1);XXC1=numpy.asarray(XC1)
        YYC2=numpy.asarray(YC2);XXC2=numpy.asarray(XC2)
        YYC3=numpy.asarray(YC3);XXC3=numpy.asarray(XC3)
        YYC4=numpy.asarray(YC4);XXC4=numpy.asarray(XC4)
        YYC5=numpy.asarray(YC5);XXC5=numpy.asarray(XC5)
        YYC6=numpy.asarray(YC6);XXC6=numpy.asarray(XC6)
        YYC7=numpy.asarray(YC7);XXC7=numpy.asarray(XC7)
        YYC8=numpy.asarray(YC8);XXC8=numpy.asarray(XC8)
        #
        # Now I can do operations and moving towards the right cell!!!!!!!!
        # Weigthing flow length, weights are half sum of pixels weight * travelled length
        # I'm chosing the directions accordingly to Flow_dir step by step going from outlet-nodata to the ridges,
        # each time account for distance (l or l*rad2) multiplied by the half of the weigths of the 2 travelled cells.
        # Then, with variables substitution I'm moving a step further, and adding the prevous pixel value to the new calculated.
        #
        YYC1 = (YYC1);XXC1 = (XXC1 - 1)  # l
        YYC2 = (YYC2 + 1);XXC2 = (XXC2 - 1)  # lrad2
        YYC3 = (YYC3 + 1);XXC3 = (XXC3)  # l
        YYC4 = (YYC4 + 1);XXC4 = (XXC4 + 1)  # lrad2
        YYC5 = (YYC5);XXC5 = (XXC5 + 1)  # l
        YYC6 = (YYC6 - 1);XXC6 = (XXC6 + 1)  # lrad2
        YYC7 = (YYC7 - 1);XXC7 = (XXC7)  # l
        YYC8 = (YYC8 - 1);XXC8 = (XXC8 - 1)  # lrad2
        #
        if count == 1:  # first run zero, like TauDEM, need to check if there is a Nodata pixel receiving flow for all the 8 directions
            if len(YYC1) > 0:
                W_Fl[YYC1, XXC1] = 0
            else:
                pass
            if len(YYC2) > 0:
                W_Fl[YYC2, XXC2] = 0
            else:
                pass
            if len(YYC3) > 0:
                W_Fl[YYC3, XXC3] = 0
            else:
                pass
            if len(YYC4) > 0:
                W_Fl[YYC4, XXC4] = 0
            else:
                pass
            if len(YYC5) > 0:
                W_Fl[YYC5, XXC5] = 0
            else:
                pass
            if len(YYC6) > 0:
                W_Fl[YYC6, XXC6] = 0
            else:
                pass
            if len(YYC7) > 0:
                W_Fl[YYC7, XXC7] = 0
            else:
                pass
            if len(YYC8) > 0:
                W_Fl[YYC8, XXC8] = 0
            else:
                pass
        else:
            W_Fl[YYC1, XXC1] = W_Fl[YC1, XC1] + (cell_s * ((Wgt[YC1, XC1] + Wgt[YYC1, XXC1]) / 2))
            W_Fl[YYC2, XXC2] = W_Fl[YC2, XC2] + (cell_s * math.sqrt(2) * ((Wgt[YC2, XC2] + Wgt[YYC2, XXC2]) / 2))
            W_Fl[YYC3, XXC3] = W_Fl[YC3, XC3] + (cell_s * ((Wgt[YC3, XC3] + Wgt[YYC3, XXC3]) / 2))
            W_Fl[YYC4, XXC4] = W_Fl[YC4, XC4] + (cell_s * math.sqrt(2) * ((Wgt[YC4, XC4] + Wgt[YYC4, XXC4]) / 2))
            W_Fl[YYC5, XXC5] = W_Fl[YC5, XC5] + (cell_s * ((Wgt[YC5, XC5] + Wgt[YYC5, XXC5]) / 2))
            W_Fl[YYC6, XXC6] = W_Fl[YC6, XC6] + (cell_s * math.sqrt(2) * ((Wgt[YC6, XC6] + Wgt[YYC6, XXC6]) / 2))
            W_Fl[YYC7, XXC7] = W_Fl[YC7, XC7] + (cell_s * ((Wgt[YC7, XC7] + Wgt[YYC7, XXC7]) / 2))
            W_Fl[YYC8, XXC8] = W_Fl[YC8, XC8] + (cell_s * math.sqrt(2) * ((Wgt[YC8, XC8] + Wgt[YYC8, XXC8]) / 2))
            #
        #
        # Reconstructing all X and Y of this step and moving on upwards (Downstream if you think in GIS, right?)
        YY = [];XX = []
        YY.extend(YYC1);XX.extend(XXC1)
        YY.extend(YYC2);XX.extend(XXC2)
        YY.extend(YYC3);XX.extend(XXC3)
        YY.extend(YYC4);XX.extend(XXC4)
        YY.extend(YYC5);XX.extend(XXC5)
        YY.extend(YYC6);XX.extend(XXC6)
        YY.extend(YYC7);XX.extend(XXC7)
        YY.extend(YYC8);XX.extend(XXC8)
        #
        YY = numpy.asarray(YY)
        XX = numpy.asarray(XX)
        #
        i1 = Fdir8[YY, XX - 1]  # Searching for cells draining into them, 8 directions
        D1 = numpy.where(i1 == 1)  # l
        YC1 = YY[D1]  # coordinates satisfacting the conditions, HERE i NEED TO ADD ACTUAL LENGTH VALUE + PREVIOUS ONE
        XC1 = XX[D1]
        #
        i2 = Fdir8[YY + 1, XX - 1]  # Searching for cells draining into them, 8 directions
        D2 = numpy.where(i2 == 2)  # lrad2
        YC2 = YY[D2]  # coordinates satisfacting the conditions
        XC2 = XX[D2]
        #
        i3 = Fdir8[YY + 1, XX]  # Searching for cells draining into them, 8 directions
        D3 = numpy.where(i3 == 3)  # l
        YC3 = YY[D3]  # coordinates satisfacting the conditions
        XC3 = XX[D3]
        #
        i4 = Fdir8[YY + 1, XX + 1]  # Searching for cells draining into them, 8 directions
        D4 = numpy.where(i4 == 4)  # lrad2
        YC4 = YY[D4]  # coordinates satisfacting the conditions
        XC4 = XX[D4]
        #
        i5 = Fdir8[YY, XX + 1]  # Searching for cells draining into them, 8 directions
        D5 = numpy.where(i5 == 5)  # l
        YC5 = YY[D5]  # coordinates satisfacting the conditions
        XC5 = XX[D5]
        #
        i6 = Fdir8[YY - 1, XX + 1]  # Searching for cells draining into them, 8 directions
        D6 = numpy.where(i6 == 6)  # lrad2
        YC6 = YY[D6]  # coordinates satisfacting the conditions
        XC6 = XX[D6]
        #
        i7 = Fdir8[YY - 1, XX]  # Searching for cells draining into them, 8 directions
        D7 = numpy.where(i7 == 7)  # l
        YC7 = YY[D7]  # coordinates satisfacting the conditions
        XC7 = XX[D7]
        #
        i8 = Fdir8[YY - 1, XX - 1]  # Searching for cells draining into them, 8 directions
        D8 = numpy.where(i8 == 8)  # lrad2
        YC8 = YY[D8]  # coordinates satisfacting the conditions
        XC8 = XX[D8]
        count = count + 1
    #
    elapsed = (time.clock() - start)  # computational time
    print time.strftime("%d/%m/%Y %H:%M:%S    "), "Process concluded succesfully \n", "%.2f" % elapsed, 'seconds for Weighted-Flow Length calculation with ', int(count), ' iterations'  # truncating the precision

    W_fl = W_Fl[1:W_Fl.shape[0] - 1, 1:W_Fl.shape[1] - 1]#reshaping weigthed flow length, we need this step to homogenize matrices dimensions!!!!!!!!!!
    del W_Fl
    #imposto il valore di zero a 1 per evitare divisioni per zero
    D_down_ar = garray.array()
    W_fl[W_fl == 0] = 1
    D_down_ar[...] = W_fl
    del W_fl
    D_down_ar.write('w_flow_length',null=numpy.nan,overwrite=True)

    """
    --------------------------------------
    WORKING ON D_UP COMPONENT
    --------------------------------------
    """

    grass.run_command('r.watershed',elevation = 'dem',accumulation = 'accMDF',convergence=5, memory=300)
    rasterTemp.append('accMDF')

    tif_dtmsca = garray.array()
    tif_dtmsca.read('acc_watershead_dinf')

    tif_dtmsca = abs(tif_dtmsca)*cell_s

    acc_final_ar = tif_dtmsca / const_ar

    grass.run_command('r.watershed',elevation = 'dem',flow = 'weight',accumulation = "accW",convergence = 5,memory = 300)
    rasterTemp.append('accW')

    acc_W_ar = garray.array()
    acc_W_ar.read('accW')


    grass.run_command('r.watershed', elevation='dem', flow='slope', accumulation="accS", convergence=5, memory=300)
    rasterTemp.append('accS')

    acc_S_ar = garray.array()
    acc_S_ar.read('accS')

    # Computing C_mean as (accW+weigth)/acc_final
    C_mean_ar = (acc_W_ar + tif_wgt_ar) / acc_final_ar
    del (acc_W_ar)  # free memory
    #
    # Computing S mean (accS+s)/acc_final
    S_mean_ar = (acc_S_ar + tif_fdir8_ar) / acc_final_ar
    del (acc_S_ar, tif_fdir8_ar)  # free memory
    #
    # Computing D_up as "%cmean.tif%" * "%smean.tif%" * SquareRoot("%ACCfinal.tif%" * "%resolution.tif%" * "%resolution.tif%")
    cell_area = (const_ar) ** 2  # change of variables, to be sure
    D_up_ar = C_mean_ar * S_mean_ar * numpy.sqrt(acc_final_ar * cell_area)  # to transform from unit values to square units
    #
    # Computing Connectivity index
    ic_ar = numpy.log10(D_up_ar / D_down_ar)

    SDRmax = 0.8;IC0=0.5;k=1
    SDRmap = SDRmax / (1+math.exp((IC0-ic_ar/k)))

    '''
    Cleaning tempfile
    '''


    for rast in rasterTemp:
        grass.run_command('g.remove', flags='f', type='raster', name=rast)