def get_mapsets(self): """ load mapsets when location, dbase, both or nothing is defined :return: mapsets in one string """ try: try: # exists both location and dbase mapsets = script.start_command( 'g.mapset', flags='l', location=self.code_dict['location'], dbase=self.code_dict['dbase'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) return mapsets.communicate()[0].splitlines()[0] except: try: # just location mapsets = script.start_command( 'g.mapset', flags='l', location=self.code_dict['location'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) return mapsets.communicate()[0].splitlines()[0] except: # just dbase mapsets = script.start_command( 'g.mapset', flags='l', dbase=self.code_dict['dbase'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) return mapsets.communicate()[0].splitlines()[0] except: # no dependencies defined mapsets = script.start_command('g.mapsets', flags='l', stdout=subprocess.PIPE, stderr=subprocess.PIPE) return mapsets.communicate()[0].splitlines()[0]
def _download(self): """!Downloads data from WCS server using GDAL WCS driver @return ret (exit code of r.in.gdal module) """ self._debug("_download", "started") self.xml_file = self._createXML() self.vrt_file = self._createVRT() grass.message('Starting module r.in.gdal ...') env = os.environ.copy() env['GRASS_MESSAGE_FORMAT'] = 'gui' if self.params['location'] == "": p = grass.start_command('r.in.gdal', input=self.vrt_file, output=self.params['output'], stdout = grass.PIPE, stderr = grass.PIPE, env = env ) else: p = grass.start_command('r.in.gdal', input=self.vrt_file, output=self.params['output'], location = self.params['location'], stdout = grass.PIPE, stderr=grass.PIPE, env = env ) while p.poll() is None: line = p.stderr.readline() linepercent = line.replace('GRASS_INFO_PERCENT:','').strip() if linepercent.isdigit(): #print linepercent grass.percent(int(linepercent),100,1) else: grass.verbose(line) grass.percent(100,100,5) ret = p.wait() if ret != 0: grass.fatal('r.in.gdal for %s failed.' % self.vrt_file) else: grass.message('r.in.gdal was successful for new raster map %s ' % self.params['output']) grass.try_remove(self.vrt_file) grass.try_remove(self.xml_file) self._debug("_download", "finished") return ret
def init_grass(path, grasslib): """ Initialisation of Grass GIS in lambert 93. in : path : directory where create grassdata directory grasslib : install directory of Grass GIS """ global gscript # Grass folder Initialisation if not os.path.exists(os.path.join(path, "grassdata")): os.mkdir(os.path.join(path, "grassdata")) path_grassdata = os.path.join(path, "grassdata") # Init Grass environment gisbase = os.environ['GISBASE'] = grasslib gisdb = os.path.join(path_grassdata) sys.path.append(os.path.join(os.environ["GISBASE"], "etc", "python")) os.environ["GISBASE"] = gisbase # Overwrite and verbose parameters os.environ["GRASS_OVERWRITE"] = "1" os.environ['GRASS_VERBOSE'] = '-1' # Grass functions import import grass.script.setup as gsetup import grass.script as gscript # Init Grass gsetup.init(gisbase, gisdb) # Delete existing location if os.path.exists(os.path.join(gisdb, "demolocation")): shutil.rmtree(os.path.join(gisdb, "demolocation")) # Create the location in Lambert 93 gscript.run_command("g.proj", flags="c", epsg="2154", location="demolocation") # Create datas mapset if not os.path.exists(os.path.join(gisdb, "/demolocation/datas")): try: gscript.start_command("g.mapset", flags="c", mapset="datas", location="demolocation", dbase=gisdb) except: raise Exception("Folder '%s' does not own to current user") % ( gisdb)
def main(): # Input data inraster = options['input'] outraster = options['output'] nsize = options['nsize'] statistic = options['statistic'] circular = "c" if flags['c'] else "" # Get process id pid = os.getpid() # Check overwrite settings # If output raster file exists, but overwrite option isn't selected if not grass.overwrite(): if grass.find_file(outraster)['name']: grass.message( _("Output raster map <%s> already exists") % outraster) sys.exit() # Choose the statistic if statistic == "Deviation from Mean": # First, get neighbourhood mean rasters tmp_avg = "tmp_avg_%d" % pid # Create a temporary filename tmp_rlayers.append(tmp_avg) proc_avg = grass.start_command('r.neighbors', overwrite=True, flags=circular, input=inraster, output=tmp_avg, size=nsize) # Get neighbourhood standard deviation rasters tmp_stddev = "tmp_stddev_%d" % pid # Create a temporary filename tmp_rlayers.append(tmp_stddev) proc_stddev = grass.start_command('r.neighbors', overwrite=True, flags=circular, method="stddev", input=inraster, output=tmp_stddev, size=nsize) # Wait for the processes to finish proc_avg.wait() proc_stddev.wait() # Calculate Deviation from Mean grass.mapcalc("$outmap = ($inraster - $avgraster) / $stddevraster", outmap=outraster, inraster=inraster, avgraster=tmp_avg, stddevraster=tmp_stddev)
def main(): # Input data inraster = options["input"] outraster = options["output"] nsize = options["nsize"] statistic = options["statistic"] circular = "c" if flags["c"] else "" # Get process id pid = os.getpid() # Check overwrite settings # If output raster file exists, but overwrite option isn't selected if not grass.overwrite(): if grass.find_file(outraster)["name"]: grass.message(_("Output raster map <%s> already exists") % outraster) sys.exit() # Choose the statistic if statistic == "Deviation from Mean": # First, get neighbourhood mean rasters tmp_avg = "tmp_avg_%d" % pid # Create a temporary filename tmp_rlayers.append(tmp_avg) proc_avg = grass.start_command( "r.neighbors", overwrite=True, flags=circular, input=inraster, output=tmp_avg, size=nsize ) # Get neighbourhood standard deviation rasters tmp_stddev = "tmp_stddev_%d" % pid # Create a temporary filename tmp_rlayers.append(tmp_stddev) proc_stddev = grass.start_command( "r.neighbors", overwrite=True, flags=circular, method="stddev", input=inraster, output=tmp_stddev, size=nsize, ) # Wait for the processes to finish proc_avg.wait() proc_stddev.wait() # Calculate Deviation from Mean grass.mapcalc( "$outmap = ($inraster - $avgraster) / $stddevraster", outmap=outraster, inraster=inraster, avgraster=tmp_avg, stddevraster=tmp_stddev, )
def _download(self): """!Downloads data from WCS server using GDAL WCS driver @return ret (exit code of r.in.gdal module) """ self._debug("_download", "started") self.xml_file = self._createXML() self.vrt_file = self._createVRT() gscript.message('Starting module r.in.gdal ...') if self.params['location'] == "": p = gscript.start_command('r.in.gdal', input=self.vrt_file, output=self.params['output'], stderr = gscript.PIPE, env = self._env ) else: p = gscript.start_command('r.in.gdal', input=self.vrt_file, output=self.params['output'], location = self.params['location'], stderr = gscript.PIPE, env = self._env ) # percent status messagei while p.poll() is None: line = p.stderr.readline() linepercent = line.replace('GRASS_INFO_PERCENT:','').strip() if linepercent.isdigit(): #print linepercent gscript.percent(int(linepercent),100,1) else: gscript.verbose(line) gscript.percent(100,100,5) ret = p.wait() if ret != 0: gscript.fatal('r.in.gdal for %s failed.' % self.vrt_file ) else: gscript.message('r.in.gdal was successful for new raster map %s ' % self.params['output'] ) gscript.try_remove(self.vrt_file) gscript.try_remove(self.xml_file) self._debug("_download", "finished") return ret
def main(): isis3 = {} outfile = options["out"] body = options["body"] pj = grasslib.G_get_projinfo() if options["outres"] and options["restype"] == "mpp": isis3["PixelResolution"] = "%f <meters>" % float(options["outres"]) if options["outres"] and options["restype"] == "ppd": isis3["Scale"] = "%f <pixel/degree>" % float(options["outres"]) isis3["TargetName"] = body if flags["a"]: ret = grass.start_command("g.region", flags="gl", stdout=subprocess.PIPE) exec(ret.communicate()[0]) isis3["MinimumLatitude"] = "%f" % min(se_lat, sw_lat) isis3["MaximumLatitude"] = "%f" % max(ne_lat, nw_lat) isis3["MinimumLongitude"] = "%f" % min(sw_long, nw_long) isis3["MaximumLongitude"] = "%f" % max(se_long, ne_long) for p in paradict.keys(): if grasslib.G_find_key_value(p, pj): k = paradict[p] v = grasslib.G_find_key_value(p, pj) if p == "a" or p == "b": v = v + " <meters>" if p == "proj" and v == "ll": sys.exit( "This GRASS location is in LatLong, no cartographic projection is set.\nExiting..." ) if p == "proj": k, v = "ProjectionName", projdict[v] isis3[k] = v isis3mt = IsisMapTemplate(isis3) of = open(outfile, "w") isis3mt.dump(of) sys.stderr.write("Done writing %s ISIS3 MapTemplate file\n" % outfile)
def reproject_region(region, from_proj, to_proj): region = region.copy() proj_input = '{east} {north}\n{west} {south}'.format(**region) proc = gs.start_command('m.proj', input='-', separator=' , ', proj_in=from_proj, proj_out=to_proj, stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE) proc.stdin.write(gs.encode(proj_input)) proc.stdin.close() proc.stdin = None proj_output, stderr = proc.communicate() if proc.returncode: raise RuntimeError("reprojecting region: m.proj error: " + stderr) enws = gs.decode(proj_output).split(os.linesep) elon, nlat, unused = enws[0].split(' ') wlon, slat, unused = enws[1].split(' ') region['east'] = elon region['north'] = nlat region['west'] = wlon region['south'] = slat return region
def main(): isis3 = {} outfile = options['out'] body = options['body'] pj = grasslib.G_get_projinfo() if options['outres'] and options['restype'] == 'mpp': isis3['PixelResolution'] = "%f <meters>" % float(options['outres']) if options['outres'] and options['restype'] == 'ppd': isis3['Scale'] = "%f <pixel/degree>" % float(options['outres']) isis3['TargetName'] = body if flags['a']: ret = grass.start_command("g.region", flags="gl", stdout=subprocess.PIPE) exec(ret.communicate()[0]) isis3['MinimumLatitude'] = "%f" % min(se_lat, sw_lat) isis3['MaximumLatitude'] = "%f" % max(ne_lat, nw_lat) isis3['MinimumLongitude'] = "%f" % min(sw_long, nw_long) isis3['MaximumLongitude'] = "%f" % max(se_long, ne_long) for p in paradict.keys(): if grasslib.G_find_key_value(p, pj): k = paradict[p] v = grasslib.G_find_key_value(p, pj) if p == 'a' or p == 'b': v = v + ' <meters>' if p == 'proj' and v == 'll': sys.exit( "This GRASS location is in LatLong, no cartographic projection is set.\nExiting..." ) if p == 'proj': k, v = 'ProjectionName', projdict[v] isis3[k] = v isis3mt = IsisMapTemplate(isis3) of = open(outfile, 'w') isis3mt.dump(of) sys.stderr.write("Done writing %s ISIS3 MapTemplate file\n" % outfile)
def map_exists(name, element, env, mapset=None): """Check is map is present in the mapset given in the environment :param name: name of the map :param element: data type ('raster', 'raster_3d', and 'vector') :param env environment created by function gscript.create_environment """ if not mapset: mapset = gscript.run_command("g.mapset", flags="p", env=env).strip() # change type to element used by find file if element == "raster": element = "cell" elif element == "raster_3d": element = "grid3" # g.findfile returns non-zero when file was not found # se we ignore return code and just focus on stdout process = gscript.start_command( "g.findfile", flags="n", element=element, file=name, mapset=mapset, stdout=gscript.PIPE, stderr=gscript.PIPE, env=env, ) output, errors = process.communicate() info = gscript.parse_key_val(output, sep="=") # file is the key questioned in grass.script.core find_file() # return code should be equivalent to checking the output if info["file"]: return True else: return False
def read2_command(*args, **kwargs): import grass.script as grass kwargs['stdout'] = grass.PIPE kwargs['stderr'] = grass.PIPE ps = grass.start_command(*args, **kwargs) print ps.communicate() return ps.communicate()
def map_exists(name, element, mapset=None, env=None): """Check is map is present in the mapset given in the environment :param name: Name of the map :param element: Data type ('raster', 'raster_3d', and 'vector') :param env: Environment created by function grass.script.create_environment :param mapset: Mapset name, "." for current mapset only, None for all mapsets in the search path """ # change type to element used by find file if element == "raster": element = "cell" elif element == "raster_3d": element = "grid3" # g.findfile returns non-zero when file was not found # se we ignore return code and just focus on stdout process = gs.start_command( "g.findfile", flags="n", element=element, file=name, mapset=mapset, stdout=gs.PIPE, stderr=gs.PIPE, env=env, ) output, unused_errors = process.communicate() info = gs.parse_key_val(output, sep="=") # file is the key questioned in grass.script.core find_file() # return code should be equivalent to checking the output if info["file"]: return True return False
def map_exists(element, name, mapset): """Check is map is present in the mapset given in the environment :param name: name of the map :param element: data type ('raster', 'raster_3d', and 'vector') """ # change type to element used by find file if element == 'raster': element = 'cell' elif element == 'raster_3d': element = 'grid3' # g.findfile returns non-zero when file was not found # se we ignore return code and just focus on stdout process = gscript.start_command( 'g.findfile', flags='n', element=element, file=name, mapset=mapset, stdout=gscript.PIPE, stderr=gscript.PIPE) output, errors = process.communicate() info = gscript.parse_key_val(output, sep='=') # file is the key questioned in grass.script.core find_file() # return code should be equivalent to checking the output if info['file']: return True else: return False
def read2_command(*args, **kwargs): kwargs['stdout'] = grass.PIPE kwargs['stderr'] = grass.PIPE ps = grass.start_command(*args, **kwargs) print "=============" print ps.communicate() print "----------"
def start_grass_cmd(*args, **kwargs): """Calls the grass module's start_command to run the inputed GRASS command. Returns Popen object """ kwargs['stdout'] = grass.PIPE kwargs['stderr'] = grass.PIPE return grass.start_command(*args, **kwargs)
def StartCalibration(id, dir, inputvector, target, factornames, fuzzysets, iterations, runs, treduce, sdreduce, breakcrit, bootstrapOn=False, samplingfactor=None): minBIC = 999999 flags="" if bootstrapOn: flags += "b" BICList = [] errorList = [] AICList = [] MAFList = [] RsquaredList = [] procList = [] # Parallel model runs for i in range(runs): run_id = "%s_run_%i"%(id, i) print "Running calibration", i, inputvector, target, factornames, fuzzysets procList.append(grass.start_command("v.fuzzy.calibrator", flags=flags, overwrite=True, \ input=inputvector, factors=factornames,\ target=target, fuzzysets=fuzzysets, iterations=iterations, \ samplingfactor=samplingfactor, \ parameter=os.path.join(dir, (run_id + ".xml")), \ log=os.path.join(dir, (run_id + ".log")), treduce=treduce, \ sdreduce=sdreduce, breakcrit=breakcrit)) # Wait for all created processes for i in range(runs): procList[i].wait() # Analyze the logfiles for i in range(runs): run_id = "%s_run_%i"%(id, i) # We need to read the logfile in the correct order # Logfile of the calibrator # 1. Error # 3. BIC # 4. AIC # 5. MAF (Model Assessment Factor) # 6. Rsquared logfile = open(os.path.join(dir, run_id + ".log"), "r") errorList.append(float(logfile.readline().split(":")[1])) runBIC = float(logfile.readline().split(":")[1]) BICList.append(runBIC) AICList.append(float(logfile.readline().split(":")[1])) MAFList.append(float(logfile.readline().split(":")[1])) RsquaredList.append(float(logfile.readline().split(":")[1])) logfile.close() if runBIC < minBIC: minBIC = runBIC print "Finished", runs, "runs" return selectBestModel(minBIC, errorList, BICList, AICList, MAFList, RsquaredList)
def Scan(self, continuous): if self.process and self.process.poll() is None: return self.status.SetLabel("Scanning...") wx.SafeYield() params = self.GatherParameters(editMode=False, continuous=continuous) self.process = gscript.start_command('r.in.kinect', overwrite=True, quiet=True, stdin=PIPE, **params) return self.process
def start_grass_cmd(*args, **kwargs): """Call the grass module's start_command to run the inputed GRASS command. Returns Popen object """ startupinfo = hideprocess() kwargs['startupinfo'] = startupinfo kwargs['stdout'] = grass.PIPE kwargs['stderr'] = grass.PIPE return grass.start_command(*args, **kwargs)
def main(): options, flags = gs.parser() vector = options["input"] layer = 1 raster = options["output"] method = options["method"] z = 3 sep = "pipe" out_args = {} if not gs.find_file(vector, element="vector")["fullname"]: gs.fatal("Vector map <{0}> not found".format(vector)) if options["column"]: z = 4 out_args["column"] = options["column"] out_args["where"] = "{0} IS NOT NULL".format(options["column"]) columns = gs.vector_columns(vector) if options["column"] not in columns: gs.fatal(_("Column <{0}> not found".format(options["column"]))) if columns[options["column"]]["type"] not in ("INTEGER", "DOUBLE PRECISION"): gs.fatal(_("Column <{0}> is not numeric".format( options["column"]))) out_process = gs.pipe_command( "v.out.ascii", input=vector, layer=layer, format="point", separator=sep, flags="r", **out_args, ) in_process = gs.start_command( "r.in.xyz", input="-", output=raster, method=method, z=z, separator=sep, stdin=out_process.stdout, ) in_process.communicate() out_process.wait() return 0
def get_tables(self): """ get tables from user's database :return: tables in one string """ driver, database = self.get_db_info() tables = script.start_command('db.tables', flags='p', driver=driver, database=database, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return tables.communicate()[0]
def proj_to_wgs84(region): proj_in = '{east} {north}\n{west} {south}'.format(**region) proc = gs.start_command('m.proj', input='-', separator=' , ', flags='od', stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE) proc.stdin.write(proj_in) proc.stdin.close() proc.stdin = None proj_out, errors = proc.communicate() if proc.returncode: raise RuntimeError("m.proj error: %s" % errors) enws = proj_out.split(os.linesep) elon, nlat, unused = enws[0].split(' ') wlon, slat, unused = enws[1].split(' ') return {'east': elon, 'north': nlat, 'west': wlon, 'south': slat}
def proj_to_wgs84(region): proj_in = '{east} {north}\n{west} {south}'.format(**region) proc = gs.start_command('m.proj', input='-', separator=' , ', flags='od', stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE) proc.stdin.write(gs.encode(proj_in)) proc.stdin.close() proc.stdin = None proj_out, errors = proc.communicate() if proc.returncode: raise RuntimeError("m.proj error: %s" % errors) enws = gs.decode(proj_out).split(os.linesep) elon, nlat, unused = enws[0].split(' ') wlon, slat, unused = enws[1].split(' ') return {'east': elon, 'north': nlat, 'west': wlon, 'south': slat}
def reproject_region(region, from_proj, to_proj): region = copy.copy(region) proj_input = '{east} {north}\n{west} {south}'.format(**region) proc = gs.start_command('m.proj', input='-', separator=' , ', proj_in=from_proj, proj_out=to_proj, stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE) proc.stdin.write(proj_input) proc.stdin.close() proc.stdin = None proj_output, stderr = proc.communicate() if proc.returncode: raise RuntimeError("reprojecting region: m.proj error: " + stderr) enws = proj_output.split(os.linesep) elon, nlat, unused = enws[0].split(' ') wlon, slat, unused = enws[1].split(' ') region['east'] = elon region['north'] = nlat region['west'] = wlon region['south'] = slat return region
def reproject_region(region, from_proj, to_proj): """Reproject boundary of region from one projection to another. :param dict region: region to reproject as a dictionary with long key names output of get_region :param str from_proj: PROJ.4 string of region; output of get_location_proj_string :param str in_proj: PROJ.4 string of target location; output of get_location_proj_string :return dict region: reprojected region as a dictionary with long key names """ region = region.copy() proj_input = ( f"{region['east']} {region['north']}\n{region['west']} {region['south']}" ) proc = gs.start_command( "m.proj", input="-", separator=" , ", proj_in=from_proj, proj_out=to_proj, flags="d", stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE, ) proc.stdin.write(gs.encode(proj_input)) proc.stdin.close() proc.stdin = None proj_output, stderr = proc.communicate() if proc.returncode: raise RuntimeError( _("Encountered error while running m.proj: {}").format(stderr)) enws = gs.decode(proj_output).split(os.linesep) elon, nlat, unused = enws[0].split(" ") wlon, slat, unused = enws[1].split(" ") region["east"] = elon region["north"] = nlat region["west"] = wlon region["south"] = slat return region
def executeCommand(self, *args, **kwargs): """Command execution method using Popen in two modes : shell mode or not.""" p = None shell = True if 'shell' in kwargs and kwargs['shell'] is True else False toLog = True if 'toLog' in kwargs and kwargs['toLog'] is True \ and self.logOutput is True else False ##now remove this key as it is not expected by grass.start_command if 'toLog' in kwargs: del kwargs['toLog'] command = args[0] if shell is True: if toLog is True: command = command + ' >> ' + self.logfile ##always use redirection on stdout only command = command + " 2>&1" p = Popen(command, shell=shell, stdout=PIPE) else: kwargs['stdout'] = PIPE kwargs['stderr'] = PIPE p = grass.start_command(*args, **kwargs) retcode = p.wait() message = '' if shell is True and toLog is True: message = 'Read logfile for details.' else: com = p.communicate() r = re.compile('\n') for std in com: if std is not None: lines = r.split(std) for elem in lines: message += str(elem).strip() + "\n" if toLog is True: self.__writeLog(message) if retcode == 0: return message else: raise GrassPostGisImporterError(message)
def Scan(self, continuous): if self.process and self.process.poll() is None: return self.status.SetLabel("Scanning...") wx.SafeYield() params = {} if self.scan['interpolate']: method = 'interpolation' else: method = 'mean' if self.calib_matrix: params['calib_matrix'] = self.calib_matrix if self.scan['elevation']: params['raster'] = self.scan['elevation'] elif self.scan['region']: params['region'] = self.scan['region'] if self.scan['trim_tolerance']: params['trim_tolerance'] = self.scan['trim_tolerance'] trim_nsew = ','.join(self.scan['trim_nsewtb'].split(',')[:4]) zrange = ','.join(self.scan['trim_nsewtb'].split(',')[4:]) if continuous: params['flags'] = 'l' if self.scan['equalize']: if 'flags' in params and params['flags']: params['flags'] += 'e' if self.settings['tangible']['analyses']['contours']: params['contours'] = self.settings['tangible']['analyses']['contours'] params['contours_step'] = self.settings['tangible']['analyses']['contours_step'] if 'export' in self.settings['tangible'] and self.settings['tangible']['export']['active'] and \ self.settings['tangible']['export']['file']: params['ply'] = self.settings['tangible']['export']['file'] self.process = gscript.start_command('r.in.kinect', output=self.scan['scan_name'], trim=trim_nsew, smooth_radius=float(self.scan['smooth'])/1000, method=method, zrange=zrange, rotate=self.scan['rotation_angle'], resolution=float(self.scan['resolution'])/1000, zexag=self.scan['zexag'], numscan=self.scan['numscans'], overwrite=True, quiet=True, **params) return self.process
def main(): options, flags = gs.parser() vector = options['input'] layer = 1 raster = options['output'] method = options['method'] z = 3 sep = 'pipe' out_args = {} if not gs.find_file(vector, element='vector')['fullname']: gs.fatal('Vector map <{0}> not found'.format(vector)) if options['column']: z = 4 out_args['column'] = options['column'] out_args['where'] = '{0} IS NOT NULL'.format(options['column']) columns = gs.vector_columns(vector) if options['column'] not in columns: gs.fatal(_('Column <{0}> not found'.format(options['column']))) if columns[options['column']]['type'] not in ('INTEGER', 'DOUBLE PRECISION'): gs.fatal(_('Column <{0}> is not numeric'.format(options['column']))) out_process = gs.pipe_command( 'v.out.ascii', input=vector, layer=layer, format='point', separator=sep, flags='r', **out_args) in_process = gs.start_command( 'r.in.xyz', input='-', output=raster, method=method, z=z, separator=sep, stdin=out_process.stdout) in_process.communicate() out_process.wait() return 0
def import_stds(input, output, directory, title=None, descr=None, location=None, link=False, exp=False, overr=False, create=False, stds_type="strds", base=None, set_current_region=False, memory=300): """Import space time datasets of type raster and vector :param input: Name of the input archive file :param output: The name of the output space time dataset :param directory: The extraction directory :param title: The title of the new created space time dataset :param descr: The description of the new created space time dataset :param location: The name of the location that should be created, maps are imported into this location :param link: Switch to link raster maps instead importing them :param exp: Extend location extents based on new dataset :param overr: Override projection (use location's projection) :param create: Create the location specified by the "location" parameter and exit. Do not import the space time datasets. :param stds_type: The type of the space time dataset that should be imported :param base: The base name of the new imported maps, it will be extended using a numerical index. :param memory: Cache size for raster rows, used in r.in.gdal """ global raise_on_error old_state = gscript.raise_on_error gscript.set_raise_on_error(True) # Check if input file and extraction directory exits if not os.path.exists(input): gscript.fatal( _("Space time raster dataset archive <%s> not found") % input) if not create and not os.path.exists(directory): gscript.fatal(_("Extraction directory <%s> not found") % directory) tar = tarfile.open(name=input, mode='r') # Check for important files msgr = get_tgis_message_interface() msgr.message( _("Checking validity of input file (size: %0.1f MB). Make take a while..." % (os.path.getsize(input) / (1024 * 1024.0)))) members = tar.getnames() # Make sure that the basenames of the files are used for comparison member_basenames = [os.path.basename(name) for name in members] if init_file_name not in member_basenames: gscript.fatal(_("Unable to find init file <%s>") % init_file_name) if list_file_name not in member_basenames: gscript.fatal(_("Unable to find list file <%s>") % list_file_name) if proj_file_name not in member_basenames: gscript.fatal( _("Unable to find projection file <%s>") % proj_file_name) msgr.message(_("Extracting data...")) tar.extractall(path=directory) tar.close() # We use a new list file name for map registration new_list_file_name = list_file_name + "_new" # Save current working directory path old_cwd = os.getcwd() # Switch into the data directory os.chdir(directory) # Check projection information if not location: temp_name = gscript.tempfile() temp_file = open(temp_name, "w") proj_name = os.path.abspath(proj_file_name) # We need to convert projection strings generated # from other programs than g.proj into # new line format so that the grass file comparison function # can be used to compare the projections proj_name_tmp = temp_name + "_in_projection" proj_file = open(proj_name, "r") proj_content = proj_file.read() proj_content = proj_content.replace(" +", "\n+") proj_content = proj_content.replace("\t+", "\n+") proj_file.close() proj_file = open(proj_name_tmp, "w") proj_file.write(proj_content) proj_file.close() p = gscript.start_command("g.proj", flags="j", stdout=temp_file) p.communicate() temp_file.close() if not gscript.compare_key_value_text_files( temp_name, proj_name_tmp, sep="="): if overr: gscript.warning( _("Projection information does not match. " "Proceeding...")) else: diff = ''.join(gscript.diff_files(temp_name, proj_name)) gscript.warning( _("Difference between PROJ_INFO file of " "imported map and of current location:" "\n{diff}").format(diff=diff)) gscript.fatal( _("Projection information does not match. " "Aborting.")) # Create a new location based on the projection information and switch # into it old_env = gscript.gisenv() if location: try: proj4_string = open(proj_file_name, 'r').read() gscript.create_location(dbase=old_env["GISDBASE"], location=location, proj4=proj4_string) # Just create a new location and return if create: os.chdir(old_cwd) return except Exception as e: gscript.fatal( _("Unable to create location %(l)s. Reason: %(e)s") % { 'l': location, 'e': str(e) }) # Switch to the new created location try: gscript.run_command("g.mapset", mapset="PERMANENT", location=location, dbase=old_env["GISDBASE"]) except CalledModuleError: gscript.fatal(_("Unable to switch to location %s") % location) # create default database connection try: gscript.run_command("t.connect", flags="d") except CalledModuleError: gscript.fatal( _("Unable to create default temporal database " "in new location %s") % location) try: # Make sure the temporal database exists factory.init() fs = "|" maplist = [] mapset = get_current_mapset() list_file = open(list_file_name, "r") new_list_file = open(new_list_file_name, "w") # get number of lines to correctly form the suffix max_count = -1 for max_count, l in enumerate(list_file): pass max_count += 1 list_file.seek(0) # Read the map list from file line_count = 0 while True: line = list_file.readline() if not line: break line_list = line.split(fs) # The filename is actually the base name of the map # that must be extended by the file suffix filename = line_list[0].strip().split(":")[0] if base: mapname = "%s_%s" % ( base, gscript.get_num_suffix(line_count + 1, max_count)) mapid = "%s@%s" % (mapname, mapset) else: mapname = filename mapid = mapname + "@" + mapset row = {} row["filename"] = filename row["name"] = mapname row["id"] = mapid row["start"] = line_list[1].strip() row["end"] = line_list[2].strip() new_list_file.write("%s%s%s%s%s\n" % (mapname, fs, row["start"], fs, row["end"])) maplist.append(row) line_count += 1 list_file.close() new_list_file.close() # Read the init file fs = "=" init = {} init_file = open(init_file_name, "r") while True: line = init_file.readline() if not line: break kv = line.split(fs) init[kv[0]] = kv[1].strip() init_file.close() if "temporal_type" not in init or \ "semantic_type" not in init or \ "number_of_maps" not in init: gscript.fatal( _("Key words %(t)s, %(s)s or %(n)s not found in init" " file.") % { 't': "temporal_type", 's': "semantic_type", 'n': "number_of_maps" }) if line_count != int(init["number_of_maps"]): gscript.fatal(_("Number of maps mismatch in init and list file.")) format_ = "GTiff" type_ = "strds" if "stds_type" in init: type_ = init["stds_type"] if "format" in init: format_ = init["format"] if stds_type != type_: gscript.fatal( _("The archive file is of wrong space time dataset" " type")) # Check the existence of the files if format_ == "GTiff": for row in maplist: filename = row["filename"] + ".tif" if not os.path.exists(filename): gscript.fatal( _("Unable to find GeoTIFF raster file " "<%s> in archive.") % filename) elif format_ == "AAIGrid": for row in maplist: filename = row["filename"] + ".asc" if not os.path.exists(filename): gscript.fatal( _("Unable to find AAIGrid raster file " "<%s> in archive.") % filename) elif format_ == "GML": for row in maplist: filename = row["filename"] + ".xml" if not os.path.exists(filename): gscript.fatal( _("Unable to find GML vector file " "<%s> in archive.") % filename) elif format_ == "pack": for row in maplist: if type_ == "stvds": filename = str(row["filename"].split(":")[0]) + ".pack" else: filename = row["filename"] + ".pack" if not os.path.exists(filename): gscript.fatal( _("Unable to find GRASS package file " "<%s> in archive.") % filename) else: gscript.fatal(_("Unsupported input format")) # Check the space time dataset id = output + "@" + mapset sp = dataset_factory(type_, id) if sp.is_in_db() and gscript.overwrite() is False: gscript.fatal( _("Space time %(t)s dataset <%(sp)s> is already in" " the database. Use the overwrite flag.") % { 't': type_, 'sp': sp.get_id() }) # Import the maps if type_ == "strds": if format_ == "GTiff" or format_ == "AAIGrid": _import_raster_maps_from_gdal(maplist, overr, exp, location, link, format_, set_current_region, memory) if format_ == "pack": _import_raster_maps(maplist, set_current_region) elif type_ == "stvds": if format_ == "GML": _import_vector_maps_from_gml(maplist, overr, exp, location, link) if format_ == "pack": _import_vector_maps(maplist) # Create the space time dataset if sp.is_in_db() and gscript.overwrite() is True: gscript.info( _("Overwrite space time %(sp)s dataset " "<%(id)s> and unregister all maps.") % { 'sp': sp.get_new_map_instance(None).get_type(), 'id': sp.get_id() }) sp.delete() sp = sp.get_new_instance(id) temporal_type = init["temporal_type"] semantic_type = init["semantic_type"] relative_time_unit = None if temporal_type == "relative": if "relative_time_unit" not in init: gscript.fatal( _("Key word %s not found in init file.") % ("relative_time_unit")) relative_time_unit = init["relative_time_unit"] sp.set_relative_time_unit(relative_time_unit) gscript.verbose( _("Create space time %s dataset.") % sp.get_new_map_instance(None).get_type()) sp.set_initial_values(temporal_type=temporal_type, semantic_type=semantic_type, title=title, description=descr) sp.insert() # register the maps fs = "|" register_maps_in_space_time_dataset( type=sp.get_new_map_instance(None).get_type(), name=output, file=new_list_file_name, start="file", end="file", unit=relative_time_unit, dbif=None, fs=fs, update_cmd_list=False) os.chdir(old_cwd) except: raise # Make sure the location is switched back correctly finally: if location: # Switch to the old location try: gscript.run_command("g.mapset", mapset=old_env["MAPSET"], location=old_env["LOCATION_NAME"], gisdbase=old_env["GISDBASE"]) except CalledModuleError: grass.warning(_("Switching to original location failed")) gscript.set_raise_on_error(old_state)
def _handler(self, request, response): nome_bacino = request.inputs['nome_bacino'][0].data coordX = float(request.inputs['InputX'][0].data) coordY = float(request.inputs['InputY'][0].data) #Set variabili per richiamare grass script GISBASE = config.get('grass', 'gisbase') os.environ['GRASS_SKIP_MAPSET_OWNER_CHECK'] = '1' ## aggiunto dopo import grass.script as grass import grass.script.setup as gsetup print("---- Start renerfor_delimitazione process ---- ") #Set up location and mapset (eventualmente da cambiare) per richiamare questo mapset grass da python GISDBASE = config.get("grass", "gisdbase") location = "EPSG32632" mapset = "PROVA" gsetup.init(GISBASE, GISDBASE, location, mapset) gisenv = grass.parse_command('g.gisenv', flags='n') print("Test gisenv: %s" % gisenv) list = grass.parse_command('g.list', type="rast") print("g.list rast: %s " % list) # pulizia preventiva LOGGER.info(" ---- Pulizia mapset ---- ") grass.run_command('g.remove', flags='f', type='raster', name='MASK') grass.run_command('g.remove', flags='f', type='raster', name='BACINO') grass.run_command('g.remove', flags='f', type='vector', name='BACINOvect') # import raster map_drain import subprocess p1 = grass.start_command('g.region', raster='piemonte_drain_r100@PROVA', stderr=subprocess.PIPE) stdoutdata, stderrdata = p1.communicate() print("Error occured: %s" % stderrdata) #grass.run_command('g.region', raster='piemonte_drain_r100@PERMANENT', quiet=True) #estrazione bacino p2 = grass.start_command('r.water.outlet', input='piemonte_drain_r100@PROVA', output='BACINO', coordinates='%f,%f' % (coordX, coordY), overwrite=True, stderr=subprocess.PIPE) stdoutdata, stderrdata = p2.communicate() print("Error occured: %s" % stderrdata) p3 = grass.start_command('r.to.vect', input='BACINO', output='BACINOvect', type="area", stderr=subprocess.PIPE) stdoutdata, stderrdata = p3.communicate() print("Error occured: %s" % stderrdata) # OUTPUT SHAPE FILE #tmpFolderPath=os.getcwd() #uid=str(self.pywps.UUID) outpath = config.get("server", "outputpath") outfile = os.path.join(outpath, nome_bacino + ".gml") if os.path.isfile(outfile): os.remove(outfile) res = grass.start_command('v.out.ogr', flags='c', input='BACINOvect', type="area", output=outfile, format="GML", stderr=subprocess.PIPE) stdoutdata, stderrdata = res.communicate() print("Error occured: %s" % stderrdata) #NEW - creo anche l'output in formato geojson outfile2 = os.path.join(outpath, nome_bacino + ".geojson") if os.path.isfile(outfile2): os.remove(outfile2) grass.start_command('v.out.ogr', flags='c', input='BACINOvect', type="area", output=outfile2, format="GeoJSON", stderr=subprocess.PIPE) #res=grass.run_command('v.out.ogr', 'c', input='BACINOvect', type='area', dsn='%s' %("shapefilebacino-"+str(uuid)+".shp"), format='ESRI_Shapefile') # pulizia LOGGER.info(" ---- Pulizia mapset ---- ") grass.run_command('g.remove', flags='f', type='raster', name='MASK') grass.run_command('g.remove', flags='f', type='raster', name='BACINO') grass.run_command('g.remove', flags='f', type='vector', name='BACINOvect') #outfile2=os.getcwd()+"/shapefilebacino-"+str(uuid)+".gml" response.outputs['shapefilebacino'].file = outfile return
def _handler(self, request, response): response.update_status('PyWPS Process started.', 0) LOGGER.info("starting ...") max_outputs = 1 # Variabili in input vectorbacino=request.inputs['vectorbacino'][0].file nomebacino=request.inputs['namebacino'][0].data # Variabili per output workdir=self.workdir file_path = config.get('server', 'outputpath') file_url = config.get('server', 'outputurl') nome_report_PDF="Report_CDP_"+str(self.uuid)+".pdf" nome_grafico="CDP_"+str(self.uuid)+".png" file_report_PDF= os.path.join(file_path, nome_report_PDF) url_report_PDF = os.path.join(file_url, nome_report_PDF) file_grafico=os.path.join(file_path, nome_grafico) url_grafico = os.path.join(file_url, nome_grafico) #Definizione ambiente di GRASS import grass.script as grass import grass.script.setup as gsetup GISBASE=config.get('grass', 'gisbase') GISDBASE=config.get("grass", "gisdbase") location="EPSG32632" mapset="PROVA" gsetup.init(GISBASE,GISDBASE, location, mapset) gisenv=grass.parse_command('g.gisenv', flags='n') print("Test gisenv: %s" % gisenv) list=grass.parse_command('g.list', type="rast") print("g.list rast: %s " %list) ######### ESTRAZIONE DESCRITTORI DEL BACINO DA GRASS ######### print('######### ESTRAZIONE DESCRITTORI DEL BACINO DA GRASS #########') #caricamento vettoriale in GRASS res=grass.start_command('v.in.ogr', input=vectorbacino, output='basin', overwrite = True, min_area='0',stderr=subprocess.PIPE) stdoutdata, stderrdata = res.communicate() print("Error occured: %s" % stderrdata) # Configurazione della regione di GRASS grass.run_command('g.region', vector='basin') #trasforma il vettore del bacino in un raster grass.run_command('v.to.rast', input='basin', output='BASIN', use='cat', type='area', overwrite = True) #quota media e area ('piemonte_dem_r100') stats_dem = grass.parse_command('r.univar', flags='eg', map='piemonte_dem_r100@PROVA', zones='BASIN') quota_media=float(stats_dem['mean']) quota_max=float(stats_dem['max']) area_km=float(stats_dem['n']) * 0.01 ipso75=float(stats_dem['first_quartile']) print(quota_media, quota_max, area_km, ipso75) #media afflusso annuo ('piemonte_MAP_r250') #grass.run_command('g.region', vect='basin', res='250') stats_MAP = grass.parse_command('r.univar', flags='g', map='piemonte_MAP_r250@PROVA', zones='BASIN') MAP_media = float(stats_MAP['mean']) MAP_std = float(stats_MAP['stddev']) #media e STD coefficiente pluviale orario CPP ('piemonte_IDFa_r250') #grass.run_command('g.region', vect='basin', res='250') stats_IDFa = grass.parse_command('r.univar', flags='g', map='piemonte_IDFa_r250@PROVA', zones='BASIN') IDFa_media = float(stats_IDFa['mean']) IDFa_std = float(stats_IDFa['stddev']) #media coefficiente regime pluviometrico B1 ('piemonte_fourierB1_r50') #grass.run_command('g.region', vect='basin', res='50') stats_fourierB1 = grass.parse_command('r.univar', flags='g', map='piemonte_fourierB1_r50@PROVA', zones='BASIN') fourierB1_media = float(stats_fourierB1['mean']) #media coefficiente variazione regime pluviometrico ('piemonte_rp_cv_r50') #grass.run_command('g.region', vect='basin', res='50') stats_rpcv = grass.parse_command('r.univar', flags='g', map='piemonte_pioggemensili_cv_r50@PROVA',zones='BASIN') rpcv_media = float(stats_rpcv['mean']) #percentuale classi CORINE riclassifcato cells_CLC = grass.read_command('r.stats', flags='1n', input='italy_CLC2000_r100@PROVA') all_cells_CLC = cells_CLC.count('1') + cells_CLC.count('2') + cells_CLC.count('3') + cells_CLC.count('4') + cells_CLC.count('5') clc2_percentuale = float(cells_CLC.count('2')) / float(all_cells_CLC) * 100 clc3_percentuale = float(cells_CLC.count('3')) / float(all_cells_CLC) * 100 # pulizia del workspace di GRASS grass.run_command('g.remove', flags='f', type='raster', name='MASK') grass.run_command('g.remove', flags='f', type='raster', name='BASIN') grass.run_command('g.remove', flags='f', type='vector', name='basin') testo ="" testo1 = "I descrittori del bacino '%s' sono: \n" %(nomebacino) testo1 += "Area (km2): "+ str(round(area_km,3)) + "\n"+ "quota_media (m slm): "+ str(round(quota_media,3)) + "\n" + "quota_massima (m slm): " + str(round(quota_max,3)) + "\n" + "curva_ipso_75percento (m slm): " + str(round(ipso75,3)) + "\n" + "MAP (mm): " + str(round(MAP_media,3)) + "\n" + "IDFa (mm): " + str(round(IDFa_media,3)) + "\n" + "IDFa_std (mm/h): " + str(round(IDFa_std,3)) + "\n" + "fourier_B1: " + str(round(fourierB1_media,3)) + "\n" +"CV rp: " + str(round(rpcv_media,3)) + "\n" + "clc2_perc: " + str(round(clc2_percentuale,3)) + "\n" + "clc3_perc: " + str(round(clc3_percentuale,3))+"\n" print(testo1) ########## STIMA L-MOMENTI REGIONALI E PARAMETRI DISTRIBUZIONE ########## ## Calcolo portata L-momenti regionali c_int=IDFa_media/MAP_media Y=-7.3605*10**2+1.2527*MAP_media+3.2569*10**(-1)*quota_media+5.2674*fourierB1_media-6.7185*clc2_percentuale LCV=-2.896*10**(-1)-2.688*10**(-3)*clc3_percentuale+9.643*10**(-5)*ipso75+1.688*10**(-4)*MAP_media+2.941*10*c_int LCA=4.755*quota_max**(-0.2702)*IDFa_std**0.06869*rpcv_media**0.2106 L1=Y*area_km/31536.0 testo2 = "\n Gli L-momenti della CDP stimati, per l' area di studio sulla base delle caratteristiche geomorfologice del bacino, secondo la procedura regionale Renerfor sono: \n" testo2 += "L1:" + str(round(L1,3)) + "\n" + "LCV: "+str(round(LCV,3))+ "\n"+"LCA:" + str(round(LCA,3))+"\n \n" print(testo2) ## Calcolo dei parametri della distribuzione funzioni riscritte a partire dal pacchetto per R Hydroapps(Ganora) per RENERFOR d=np.array(range(1,366)) p=1-d/366.0 LCAinf=fun.tau3BurrXII_WeibullBound(LCV) LCAsup=fun.tau3BurrXII_ParetoBound(LCV) risultati=fun.parBurrXIIapprox(L1, LCV, LCA) #risultati=('BurrXII','a: 8.5; b: 1; c: 2.8', p) distribuzione=risultati[0] parametri=risultati[1] x=risultati[2] testo3 ="Gli L-momenti L-CV e L-CA della Curva di Durata delle Portate (CDP), stimati a partire dai descrittori di bacino, ricadono, come riportato nella seguente figura, nel dominio di esistenza della distribuzione: "+ str(distribuzione)+".\n" testo3 += "I parametri stimati della distribuzione indicata hanno valore: \n"+ str(parametri)+". \n \n" testo4 =" La Curva di durata delle portate in regime naturale (non influenzata da derivazioni), ottenuta dal modello regionale Renerfor, viene riportata nel presente Report." #Creazione grafico CDP fun.grafico_FDC_semplice_browser(x,file_grafico) #fun.figura_FDC_due_assi(x) #prova ########################################################################################## ########################################################################################## # OUTPUT testo=testo1+testo2+testo3+testo4 #Creazione Report PDF with PdfPages(file_report_PDF) as pdf: #plt.rc('text', usetex=False) figura_testo=plt.figure(figsize=(8,6)) # Pagina 1: Risultati testuali plt.text(-0.12, 1.01,testo, ha='left',va='top', wrap=True,fontsize=10) plt.ylim(0, 1) plt.xlim(0, 1) plt.setp(plt.gca(), frame_on=False, xticks=(), yticks=()) pdf.savefig(figura_testo) plt.close() # Pagina 2: Dominio di Burr figura_dominio=fun.figura_dominio_burr(LCV,LCA) pdf.savefig(figura_dominio) plt.close() # Pagina 2: Curva di Durata figura_FDC=fun.figura_FDC_due_assi(x) pdf.savefig(figura_FDC) plt.close() #output = "Puoi visualizzare e scaricare il grafico della curva di durata delle portate all'url: \n %s" %('http://130.192.28.30/wpsoutputs/'+str(nome_grafico)) #output += "\n"+"Puoi visualizzare e scaricare il Report PDF all'url: \n %s" %(url_report_PDF)) # generate MetaLink v3 output ml3 = MetaLink('Report PDF', 'MetaLink', workdir=self.workdir) mf = MetaFile('REPORT_PDF.pdf', 'Report PDF CDP', fmt=FORMATS.TEXT) mf.url=url_report_PDF ml3.append(mf) response.outputs['output'].data = ml3.xml # ... OR generate MetaLink v4 output (recommended) ml4 = MetaLink4('Report PDF', 'MetaLink4', workdir=self.workdir) mf = MetaFile('REPORT_PDF.pdf', 'Report PDF CDP', fmt=FORMATS.TEXT) mf.file=file_report_PDF ml4.append(mf) response.outputs['output_meta4'].data = ml4.xml response.update_status('PyWPS Process completed.', 100) return response
def read2_command(*args, **kwargs): kwargs['stdout'] = grass.PIPE kwargs['stderr'] = grass.PIPE ps = grass.start_command(*args, **kwargs) return ps.communicate()
def main(): inputraster = options['input'] number_lines = int(options['number_lines']) edge_detection_algorithm = options['edge_detection'] no_edge_friction = int(options['no_edge_friction']) lane_border_multiplier = int(options['lane_border_multiplier']) min_tile_size = None if options['min_tile_size']: min_tile_size = float(options['min_tile_size']) existing_cutlines = None if options['existing_cutlines']: existing_cutlines = options['existing_cutlines'].split(',') tiles = options['output'] memory = int(options['memory']) tiled = False if options['tile_width']: tiled = True gscript.message(_("Using tiles processing for edge detection")) width = int(options['tile_width']) height = int(options['tile_height']) overlap = int(options['overlap']) processes = int(options['processes']) global temp_maps temp_maps = [] r = 'raster' v = 'vector' if existing_cutlines: existingcutlinesmap = 'temp_icutlines_existingcutlinesmap_%i' % os.getpid( ) if len(existing_cutlines) > 1: gscript.run_command('v.patch', input_=existing_cutlines, output=existingcutlinesmap, quiet=True, overwrite=True) existing_cutlines = existingcutlinesmap gscript.run_command('v.to.rast', input_=existing_cutlines, output=existingcutlinesmap, use='val', type_='line,boundary', overwrite=True, quiet=True) temp_maps.append([existingcutlinesmap, r]) temp_edge_map = "temp_icutlines_edgemap_%d" % os.getpid() temp_maps.append([temp_edge_map, r]) gscript.message( _("Creating edge map using <%s> edgedetection algorithm") % edge_detection_algorithm) if edge_detection_algorithm == 'zc': kwargs = { 'input': inputraster, 'output': temp_edge_map, 'width_': int(options['zc_width']), 'threshold': float(options['zc_threshold']), 'quiet': True } if tiled: grd = GridModule('i.zc', width=width, height=height, overlap=overlap, processes=processes, split=False, **kwargs) grd.run() else: gscript.run_command('i.zc', **kwargs) elif edge_detection_algorithm == 'canny': if not gscript.find_program('i.edge', '--help'): message = _("You need to install the addon i.edge to use ") message += _("the Canny edge detector.\n") message += _( " You can install the addon with 'g.extension i.edge'") gscript.fatal(message) kwargs = { 'input': inputraster, 'output': temp_edge_map, 'low_threshold': float(options['canny_low_threshold']), 'high_threshold': float(options['canny_high_threshold']), 'sigma': float(options['canny_sigma']), 'quiet': True } if tiled: grd = GridModule('i.edge', width=width, height=height, overlap=overlap, processes=processes, split=False, flags='n', **kwargs) grd.run() else: gscript.run_command('i.edge', flags='n', **kwargs) else: gscript.fatal( "Only zero-crossing and Canny available as edge detection algorithms." ) region = gscript.region() gscript.message(_("Finding cutlines in both directions")) nsrange = float(region.n - region.s - region.nsres) ewrange = float(region.e - region.w - region.ewres) if nsrange > ewrange: hnumber_lines = number_lines vnumber_lines = max(int(number_lines * (ewrange / nsrange)), 1) else: vnumber_lines = number_lines hnumber_lines = max(int(number_lines * (nsrange / ewrange)), 1) # Create the lines in horizonal direction nsstep = float(region.n - region.s - region.nsres) / hnumber_lines hpointsy = [((region.n - i * nsstep) - region.nsres / 2.0) for i in range(0, hnumber_lines + 1)] hlanepointsy = [y - nsstep / 2.0 for y in hpointsy] hstartpoints = listzip([region.w + 0.2 * region.ewres] * len(hpointsy), hpointsy) hstoppoints = listzip([region.e - 0.2 * region.ewres] * len(hpointsy), hpointsy) hlanestartpoints = listzip([region.w + 0.2 * region.ewres] * len(hlanepointsy), hlanepointsy) hlanestoppoints = listzip([region.e - 0.2 * region.ewres] * len(hlanepointsy), hlanepointsy) hlanemap = 'temp_icutlines_hlanemap_%i' % os.getpid() temp_maps.append([hlanemap, v]) temp_maps.append([hlanemap, r]) os.environ['GRASS_VERBOSE'] = '0' new = VectorTopo(hlanemap) new.open('w') for line in listzip(hlanestartpoints, hlanestoppoints): new.write(geom.Line(line), cat=1) new.close() del os.environ['GRASS_VERBOSE'] gscript.run_command('v.to.rast', input_=hlanemap, output=hlanemap, use='val', type_='line', overwrite=True, quiet=True) hbasemap = 'temp_icutlines_hbasemap_%i' % os.getpid() temp_maps.append([hbasemap, r]) # Building the cost maps using the following logic # - Any pixel not on an edge, nor on an existing cutline gets a # no_edge_friction cost, or no_edge_friction_cost x 10 if there are # existing cutlines # - Any pixel on an edge gets a cost of 1 if there are no existing cutlines, # and a cost of no_edge_friction if there are # - A lane line gets a very high cost (lane_border_multiplier x cost of no # edge pixel - the latter depending on the existence of cutlines). mapcalc_expression = "%s = " % hbasemap mapcalc_expression += "if(isnull(%s), " % hlanemap if existing_cutlines: mapcalc_expression += "if(%s == 0 && isnull(%s), " % ( temp_edge_map, existingcutlinesmap) mapcalc_expression += "%i, " % (no_edge_friction * 10) mapcalc_expression += "if(isnull(%s), %s, 1))," % (existingcutlinesmap, no_edge_friction) mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction * 10) else: mapcalc_expression += "if(%s == 0, " % temp_edge_map mapcalc_expression += "%i, " % no_edge_friction mapcalc_expression += "1), " mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction) gscript.run_command('r.mapcalc', expression=mapcalc_expression, quiet=True, overwrite=True) hcumcost = 'temp_icutlines_hcumcost_%i' % os.getpid() temp_maps.append([hcumcost, r]) hdir = 'temp_icutlines_hdir_%i' % os.getpid() temp_maps.append([hdir, r]) # Create the lines in vertical direction ewstep = float(region.e - region.w - region.ewres) / vnumber_lines vpointsx = [((region.e - i * ewstep) - region.ewres / 2.0) for i in range(0, vnumber_lines + 1)] vlanepointsx = [x + ewstep / 2.0 for x in vpointsx] vstartpoints = listzip(vpointsx, [region.n - 0.2 * region.nsres] * len(vpointsx)) vstoppoints = listzip(vpointsx, [region.s + 0.2 * region.nsres] * len(vpointsx)) vlanestartpoints = listzip(vlanepointsx, [region.n - 0.2 * region.nsres] * len(vlanepointsx)) vlanestoppoints = listzip(vlanepointsx, [region.s + 0.2 * region.nsres] * len(vlanepointsx)) vlanemap = 'temp_icutlines_vlanemap_%i' % os.getpid() temp_maps.append([vlanemap, v]) temp_maps.append([vlanemap, r]) os.environ['GRASS_VERBOSE'] = '0' new = VectorTopo(vlanemap) new.open('w') for line in listzip(vlanestartpoints, vlanestoppoints): new.write(geom.Line(line), cat=1) new.close() del os.environ['GRASS_VERBOSE'] gscript.run_command('v.to.rast', input_=vlanemap, output=vlanemap, use='val', type_='line', overwrite=True, quiet=True) vbasemap = 'temp_icutlines_vbasemap_%i' % os.getpid() temp_maps.append([vbasemap, r]) mapcalc_expression = "%s = " % vbasemap mapcalc_expression += "if(isnull(%s), " % vlanemap if existing_cutlines: mapcalc_expression += "if(%s == 0 && isnull(%s), " % ( temp_edge_map, existingcutlinesmap) mapcalc_expression += "%i, " % (no_edge_friction * 10) mapcalc_expression += "if(isnull(%s), %s, 1))," % (existingcutlinesmap, no_edge_friction) mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction * 10) else: mapcalc_expression += "if(%s == 0, " % temp_edge_map mapcalc_expression += "%i, " % no_edge_friction mapcalc_expression += "1), " mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction) gscript.run_command('r.mapcalc', expression=mapcalc_expression, quiet=True, overwrite=True) vcumcost = 'temp_icutlines_vcumcost_%i' % os.getpid() temp_maps.append([vcumcost, r]) vdir = 'temp_icutlines_vdir_%i' % os.getpid() temp_maps.append([vdir, r]) if processes > 1: pmemory = memory / 2.0 rcv = gscript.start_command('r.cost', input_=vbasemap, startcoordinates=vstartpoints, stopcoordinates=vstoppoints, output=vcumcost, outdir=vdir, memory=pmemory, quiet=True, overwrite=True) rch = gscript.start_command('r.cost', input_=hbasemap, startcoordinates=hstartpoints, stopcoordinates=hstoppoints, output=hcumcost, outdir=hdir, memory=pmemory, quiet=True, overwrite=True) rcv.wait() rch.wait() else: gscript.run_command('r.cost', input_=vbasemap, startcoordinates=vstartpoints, stopcoordinates=vstoppoints, output=vcumcost, outdir=vdir, memory=memory, quiet=True, overwrite=True) gscript.run_command('r.cost', input_=hbasemap, startcoordinates=hstartpoints, stopcoordinates=hstoppoints, output=hcumcost, outdir=hdir, memory=memory, quiet=True, overwrite=True) hlines = 'temp_icutlines_hlines_%i' % os.getpid() temp_maps.append([hlines, r]) vlines = 'temp_icutlines_vlines_%i' % os.getpid() temp_maps.append([vlines, r]) if processes > 1: rdh = gscript.start_command('r.drain', input_=hcumcost, direction=hdir, startcoordinates=hstoppoints, output=hlines, flags='d', quiet=True, overwrite=True) rdv = gscript.start_command('r.drain', input_=vcumcost, direction=vdir, startcoordinates=vstoppoints, output=vlines, flags='d', quiet=True, overwrite=True) rdh.wait() rdv.wait() else: gscript.run_command('r.drain', input_=hcumcost, direction=hdir, startcoordinates=hstoppoints, output=hlines, flags='d', quiet=True, overwrite=True) gscript.run_command('r.drain', input_=vcumcost, direction=vdir, startcoordinates=vstoppoints, output=vlines, flags='d', quiet=True, overwrite=True) # Combine horizonal and vertical lines temp_raster_tile_borders = 'temp_icutlines_raster_tile_borders_%i' % os.getpid( ) temp_maps.append([temp_raster_tile_borders, r]) gscript.run_command('r.patch', input_=[hlines, vlines], output=temp_raster_tile_borders, quiet=True, overwrite=True) gscript.message(_("Creating vector polygons")) # Create vector polygons # First we need to shrink the region a bit to make sure that all vector # points / lines fall within the raster gscript.use_temp_region() gscript.run_command('g.region', s=region.s + region.nsres, e=region.e - region.ewres, quiet=True) region_map = 'temp_icutlines_region_map_%i' % os.getpid() temp_maps.append([region_map, v]) temp_maps.append([region_map, r]) gscript.run_command('v.in.region', output=region_map, type_='line', quiet=True, overwrite=True) gscript.del_temp_region() gscript.run_command('v.to.rast', input_=region_map, output=region_map, use='val', type_='line', quiet=True, overwrite=True) temp_raster_polygons = 'temp_icutlines_raster_polygons_%i' % os.getpid() temp_maps.append([temp_raster_polygons, r]) gscript.run_command('r.patch', input_=[temp_raster_tile_borders, region_map], output=temp_raster_polygons, quiet=True, overwrite=True) temp_raster_polygons_thin = 'temp_icutlines_raster_polygons_thin_%i' % os.getpid( ) temp_maps.append([temp_raster_polygons_thin, r]) gscript.run_command('r.thin', input_=temp_raster_polygons, output=temp_raster_polygons_thin, quiet=True, overwrite=True) # Create a series of temporary map names as we have to go # through several steps until we reach the final map. temp_vector_polygons1 = 'temp_icutlines_vector_polygons1_%i' % os.getpid() temp_maps.append([temp_vector_polygons1, v]) temp_vector_polygons2 = 'temp_icutlines_vector_polygons2_%i' % os.getpid() temp_maps.append([temp_vector_polygons2, v]) temp_vector_polygons3 = 'temp_icutlines_vector_polygons3_%i' % os.getpid() temp_maps.append([temp_vector_polygons3, v]) temp_vector_polygons4 = 'temp_icutlines_vector_polygons4_%i' % os.getpid() temp_maps.append([temp_vector_polygons4, v]) gscript.run_command('r.to.vect', input_=temp_raster_polygons_thin, output=temp_vector_polygons1, type_='line', flags='t', quiet=True, overwrite=True) # Erase all category values from the lines gscript.run_command('v.category', input_=temp_vector_polygons1, op='del', cat='-1', output=temp_vector_polygons2, quiet=True, overwrite=True) # Transform lines to boundaries gscript.run_command('v.type', input_=temp_vector_polygons2, from_type='line', to_type='boundary', output=temp_vector_polygons3, quiet=True, overwrite=True) # Add centroids gscript.run_command('v.centroids', input_=temp_vector_polygons3, output=temp_vector_polygons4, quiet=True, overwrite=True) # If a threshold is given erase polygons that are too small if min_tile_size: gscript.run_command('v.clean', input_=temp_vector_polygons4, tool=['rmdangle', 'rmarea'], threshold=[-1, min_tile_size], output=tiles, quiet=True, overwrite=True) else: gscript.run_command('g.copy', vect=[temp_vector_polygons4, tiles], quiet=True, overwrite=True) gscript.vector_history(tiles)
def main(): if not hasNumPy: grass.fatal(_("Required dependency NumPy not found. Exiting.")) sharpen = options['method'] # sharpening algorithm ms1_orig = options['blue'] # blue channel ms2_orig = options['green'] # green channel ms3_orig = options['red'] # red channel pan_orig = options['pan'] # high res pan channel out = options['output'] # prefix for output RGB maps bits = options['bitdepth'] # bit depth of image channels bladjust = flags['l'] # adjust blue channel sproc = flags['s'] # serial processing rescale = flags[ 'r'] # rescale to spread pixel values to entire 0-255 range # Checking bit depth bits = float(bits) if bits < 2 or bits > 30: grass.warning(_("Bit depth is outside acceptable range")) return outb = grass.core.find_file('%s_blue' % out) outg = grass.core.find_file('%s_green' % out) outr = grass.core.find_file('%s_red' % out) if (outb['name'] != '' or outg['name'] != '' or outr['name'] != '') and not grass.overwrite(): grass.warning( _('Maps with selected output prefix names already exist.' ' Delete them or use overwrite flag')) return pid = str(os.getpid()) # convert input image channels to 8 bit for processing ms1 = 'tmp%s_ms1' % pid ms2 = 'tmp%s_ms2' % pid ms3 = 'tmp%s_ms3' % pid pan = 'tmp%s_pan' % pid if rescale == False: if bits == 8: grass.message(_("Using 8bit image channels")) if sproc: # serial processing grass.run_command('g.copy', raster='%s,%s' % (ms1_orig, ms1), quiet=True, overwrite=True) grass.run_command('g.copy', raster='%s,%s' % (ms2_orig, ms2), quiet=True, overwrite=True) grass.run_command('g.copy', raster='%s,%s' % (ms3_orig, ms3), quiet=True, overwrite=True) grass.run_command('g.copy', raster='%s,%s' % (pan_orig, pan), quiet=True, overwrite=True) else: # parallel processing pb = grass.start_command('g.copy', raster='%s,%s' % (ms1_orig, ms1), quiet=True, overwrite=True) pg = grass.start_command('g.copy', raster='%s,%s' % (ms2_orig, ms2), quiet=True, overwrite=True) pr = grass.start_command('g.copy', raster='%s,%s' % (ms3_orig, ms3), quiet=True, overwrite=True) pp = grass.start_command('g.copy', raster='%s,%s' % (pan_orig, pan), quiet=True, overwrite=True) pb.wait() pg.wait() pr.wait() pp.wait() else: grass.message(_("Converting image chanels to 8bit for processing")) maxval = pow(2, bits) - 1 if sproc: # serial processing grass.run_command('r.rescale', input=ms1_orig, from_='0,%f' % maxval, output=ms1, to='0,255', quiet=True, overwrite=True) grass.run_command('r.rescale', input=ms2_orig, from_='0,%f' % maxval, output=ms2, to='0,255', quiet=True, overwrite=True) grass.run_command('r.rescale', input=ms3_orig, from_='0,%f' % maxval, output=ms3, to='0,255', quiet=True, overwrite=True) grass.run_command('r.rescale', input=pan_orig, from_='0,%f' % maxval, output=pan, to='0,255', quiet=True, overwrite=True) else: # parallel processing pb = grass.start_command('r.rescale', input=ms1_orig, from_='0,%f' % maxval, output=ms1, to='0,255', quiet=True, overwrite=True) pg = grass.start_command('r.rescale', input=ms2_orig, from_='0,%f' % maxval, output=ms2, to='0,255', quiet=True, overwrite=True) pr = grass.start_command('r.rescale', input=ms3_orig, from_='0,%f' % maxval, output=ms3, to='0,255', quiet=True, overwrite=True) pp = grass.start_command('r.rescale', input=pan_orig, from_='0,%f' % maxval, output=pan, to='0,255', quiet=True, overwrite=True) pb.wait() pg.wait() pr.wait() pp.wait() else: grass.message(_("Rescaling image chanels to 8bit for processing")) min_ms1 = int(grass.raster_info(ms1_orig)['min']) max_ms1 = int(grass.raster_info(ms1_orig)['max']) min_ms2 = int(grass.raster_info(ms2_orig)['min']) max_ms2 = int(grass.raster_info(ms2_orig)['max']) min_ms3 = int(grass.raster_info(ms3_orig)['min']) max_ms3 = int(grass.raster_info(ms3_orig)['max']) min_pan = int(grass.raster_info(pan_orig)['min']) max_pan = int(grass.raster_info(pan_orig)['max']) maxval = pow(2, bits) - 1 if sproc: # serial processing grass.run_command('r.rescale', input=ms1_orig, from_='%f,%f' % (min_ms1, max_ms1), output=ms1, to='0,255', quiet=True, overwrite=True) grass.run_command('r.rescale', input=ms2_orig, from_='%f,%f' % (min_ms2, max_ms2), output=ms2, to='0,255', quiet=True, overwrite=True) grass.run_command('r.rescale', input=ms3_orig, from_='%f,%f' % (min_ms3, max_ms3), output=ms3, to='0,255', quiet=True, overwrite=True) grass.run_command('r.rescale', input=pan_orig, from_='%f,%f' % (min_pan, max_pan), output=pan, to='0,255', quiet=True, overwrite=True) else: # parallel processing pb = grass.start_command('r.rescale', input=ms1_orig, from_='%f,%f' % (min_ms1, max_ms1), output=ms1, to='0,255', quiet=True, overwrite=True) pg = grass.start_command('r.rescale', input=ms2_orig, from_='%f,%f' % (min_ms2, max_ms2), output=ms2, to='0,255', quiet=True, overwrite=True) pr = grass.start_command('r.rescale', input=ms3_orig, from_='%f,%f' % (min_ms3, max_ms3), output=ms3, to='0,255', quiet=True, overwrite=True) pp = grass.start_command('r.rescale', input=pan_orig, from_='%f,%f' % (min_pan, max_pan), output=pan, to='0,255', quiet=True, overwrite=True) pb.wait() pg.wait() pr.wait() pp.wait() # get PAN resolution: kv = grass.raster_info(map=pan) nsres = kv['nsres'] ewres = kv['ewres'] panres = (nsres + ewres) / 2 # clone current region grass.use_temp_region() grass.run_command('g.region', res=panres, align=pan) # Select sharpening method grass.message( _("Performing pan sharpening with hi res pan image: %f" % panres)) if sharpen == "brovey": brovey(pan, ms1, ms2, ms3, out, pid, sproc) elif sharpen == "ihs": ihs(pan, ms1, ms2, ms3, out, pid, sproc) elif sharpen == "pca": pca(pan, ms1, ms2, ms3, out, pid, sproc) # Could add other sharpening algorithms here, e.g. wavelet transformation grass.message( _("Assigning grey equalized color tables to output images...")) # equalized grey scales give best contrast grass.message(_("setting pan-sharpened channels to equalized grey scale")) for ch in ['red', 'green', 'blue']: grass.run_command('r.colors', quiet=True, map="%s_%s" % (out, ch), flags="e", color='grey') # Landsat too blue-ish because panchromatic band less sensitive to blue # light, so output blue channed can be modified if bladjust: grass.message(_("Adjusting blue channel color table...")) blue_colors = ['0 0 0 0\n5% 0 0 0\n67% 255 255 255\n100% 255 255 255'] # these previous colors are way too blue for landsat # blue_colors = ['0 0 0 0\n10% 0 0 0\n20% 200 200 200\n40% 230 230 230\n67% 255 255 255\n100% 255 255 255'] bc = grass.feed_command('r.colors', quiet=True, map="%s_blue" % out, rules="-") bc.stdin.write('\n'.join(blue_colors)) bc.stdin.close() # output notice grass.verbose( _("The following pan-sharpened output maps have been generated:")) for ch in ['red', 'green', 'blue']: grass.verbose(_("%s_%s") % (out, ch)) grass.verbose( _("To visualize output, run: g.region -p raster=%s_red" % out)) grass.verbose(_("d.rgb r=%s_red g=%s_green b=%s_blue" % (out, out, out))) grass.verbose( _("If desired, combine channels into a single RGB map with 'r.composite'." )) grass.verbose( _("Channel colors can be rebalanced using i.colors.enhance.")) # write cmd history: for ch in ['red', 'green', 'blue']: grass.raster_history("%s_%s" % (out, ch)) # create a group with the three outputs #grass.run_command('i.group', group=out, # input="{n}_red,{n}_blue,{n}_green".format(n=out)) # Cleanup grass.message(_("cleaning up temp files")) try: grass.run_command('g.remove', flags="f", type="raster", pattern="tmp%s*" % pid, quiet=True) except: ""
workers = int(os.environ["WORKERS"]) if workers < 1: workers = 1 proc = {} # global nuldev # nuldev = file(os.devnull, 'w') for year in years: i = 0 # for f in glob.glob(str(year) + '/tif-' + args.product + '-qa-mask/' + '*.tif'): for f in glob.glob(str(year) + '/tif-' + args.product + '-qa/' + '*.tif'): i += 1 proc[i] = grass.start_command('r.in.gdal', input_ = f, output=str(year) + '_' + str(i), overwrite = True) # grass.run_command('r.in.gdal', input_ = f, output=str(year) + '_' + str(i)) # try: # grass.run_command('r.info', map_ = str(year) + '_' + str(i), quiet = True, stdout = nuldev) # except: # print (str(year) + '_' + str(i)) + ' is not found' # grass.run_command('r.in.gdal', input_ = f, output=str(year) + '_' + str(i), overwrite = True) if i % workers is 0: for j in range(workers): proc[i - j].wait() #Calculate counts and medians for N time slices
def import_stds(input, output, directory, title=None, descr=None, location=None, link=False, exp=False, overr=False, create=False, stds_type="strds", base=None, set_current_region=False): """Import space time datasets of type raster and vector :param input: Name of the input archive file :param output: The name of the output space time dataset :param directory: The extraction directory :param title: The title of the new created space time dataset :param descr: The description of the new created space time dataset :param location: The name of the location that should be created, maps are imported into this location :param link: Switch to link raster maps instead importing them :param exp: Extend location extents based on new dataset :param overr: Override projection (use location's projection) :param create: Create the location specified by the "location" parameter and exit. Do not import the space time datasets. :param stds_type: The type of the space time dataset that should be imported :param base: The base name of the new imported maps, it will be extended using a numerical index. """ global raise_on_error old_state = gscript.raise_on_error gscript.set_raise_on_error(True) # Check if input file and extraction directory exits if not os.path.exists(input): gscript.fatal(_("Space time raster dataset archive <%s> not found") % input) if not create and not os.path.exists(directory): gscript.fatal(_("Extraction directory <%s> not found") % directory) tar = tarfile.open(name=input, mode='r') # Check for important files msgr = get_tgis_message_interface() msgr.message(_("Checking validity of input file (size: %0.1f MB). Make take a while..." % (os.path.getsize(input)/(1024*1024.0)))) members = tar.getnames() # Make sure that the basenames of the files are used for comparison member_basenames = [os.path.basename(name) for name in members] if init_file_name not in member_basenames: gscript.fatal(_("Unable to find init file <%s>") % init_file_name) if list_file_name not in member_basenames: gscript.fatal(_("Unable to find list file <%s>") % list_file_name) if proj_file_name not in member_basenames: gscript.fatal(_("Unable to find projection file <%s>") % proj_file_name) msgr.message(_("Extracting data...")) tar.extractall(path=directory) tar.close() # We use a new list file name for map registration new_list_file_name = list_file_name + "_new" # Save current working directory path old_cwd = os.getcwd() # Switch into the data directory os.chdir(directory) # Check projection information if not location: temp_name = gscript.tempfile() temp_file = open(temp_name, "w") proj_name = os.path.abspath(proj_file_name) # We need to convert projection strings generated # from other programms than g.proj into # new line format so that the grass file comparison function # can be used to compare the projections proj_name_tmp = temp_name + "_in_projection" proj_file = open(proj_name, "r") proj_content = proj_file.read() proj_content = proj_content.replace(" +", "\n+") proj_content = proj_content.replace("\t+", "\n+") proj_file.close() proj_file = open(proj_name_tmp, "w") proj_file.write(proj_content) proj_file.close() p = gscript.start_command("g.proj", flags="j", stdout=temp_file) p.communicate() temp_file.close() if not gscript.compare_key_value_text_files(temp_name, proj_name_tmp, sep="="): if overr: gscript.warning(_("Projection information does not match. " "Proceeding...")) else: diff = ''.join(gscript.diff_files(temp_name, proj_name)) gscript.warning(_("Difference between PROJ_INFO file of " "imported map and of current location:" "\n{diff}").format(diff=diff)) gscript.fatal(_("Projection information does not match. " "Aborting.")) # Create a new location based on the projection information and switch # into it old_env = gscript.gisenv() if location: try: proj4_string = open(proj_file_name, 'r').read() gscript.create_location(dbase=old_env["GISDBASE"], location=location, proj4=proj4_string) # Just create a new location and return if create: os.chdir(old_cwd) return except Exception as e: gscript.fatal(_("Unable to create location %(l)s. Reason: %(e)s") % {'l': location, 'e': str(e)}) # Switch to the new created location try: gscript.run_command("g.mapset", mapset="PERMANENT", location=location, dbase=old_env["GISDBASE"]) except CalledModuleError: gscript.fatal(_("Unable to switch to location %s") % location) # create default database connection try: gscript.run_command("t.connect", flags="d") except CalledModuleError: gscript.fatal(_("Unable to create default temporal database " "in new location %s") % location) try: # Make sure the temporal database exists factory.init() fs = "|" maplist = [] mapset = get_current_mapset() list_file = open(list_file_name, "r") new_list_file = open(new_list_file_name, "w") # get number of lines to correctly form the suffix max_count = -1 for max_count, l in enumerate(list_file): pass max_count += 1 list_file.seek(0) # Read the map list from file line_count = 0 while True: line = list_file.readline() if not line: break line_list = line.split(fs) # The filename is actually the base name of the map # that must be extended by the file suffix filename = line_list[0].strip().split(":")[0] if base: mapname = "%s_%s" % (base, gscript.get_num_suffix(line_count + 1, max_count)) mapid = "%s@%s" % (mapname, mapset) else: mapname = filename mapid = mapname + "@" + mapset row = {} row["filename"] = filename row["name"] = mapname row["id"] = mapid row["start"] = line_list[1].strip() row["end"] = line_list[2].strip() new_list_file.write("%s%s%s%s%s\n" % (mapname, fs, row["start"], fs, row["end"])) maplist.append(row) line_count += 1 list_file.close() new_list_file.close() # Read the init file fs = "=" init = {} init_file = open(init_file_name, "r") while True: line = init_file.readline() if not line: break kv = line.split(fs) init[kv[0]] = kv[1].strip() init_file.close() if "temporal_type" not in init or \ "semantic_type" not in init or \ "number_of_maps" not in init: gscript.fatal(_("Key words %(t)s, %(s)s or %(n)s not found in init" " file.") % {'t': "temporal_type", 's': "semantic_type", 'n': "number_of_maps"}) if line_count != int(init["number_of_maps"]): gscript.fatal(_("Number of maps mismatch in init and list file.")) format_ = "GTiff" type_ = "strds" if "stds_type" in init: type_ = init["stds_type"] if "format" in init: format_ = init["format"] if stds_type != type_: gscript.fatal(_("The archive file is of wrong space time dataset" " type")) # Check the existence of the files if format_ == "GTiff": for row in maplist: filename = row["filename"] + ".tif" if not os.path.exists(filename): gscript.fatal(_("Unable to find GeoTIFF raster file " "<%s> in archive.") % filename) elif format_ == "AAIGrid": for row in maplist: filename = row["filename"] + ".asc" if not os.path.exists(filename): gscript.fatal(_("Unable to find AAIGrid raster file " "<%s> in archive.") % filename) elif format_ == "GML": for row in maplist: filename = row["filename"] + ".xml" if not os.path.exists(filename): gscript.fatal(_("Unable to find GML vector file " "<%s> in archive.") % filename) elif format_ == "pack": for row in maplist: if type_ == "stvds": filename = str(row["filename"].split(":")[0]) + ".pack" else: filename = row["filename"] + ".pack" if not os.path.exists(filename): gscript.fatal(_("Unable to find GRASS package file " "<%s> in archive.") % filename) else: gscript.fatal(_("Unsupported input format")) # Check the space time dataset id = output + "@" + mapset sp = dataset_factory(type_, id) if sp.is_in_db() and gscript.overwrite() is False: gscript.fatal(_("Space time %(t)s dataset <%(sp)s> is already in" " the database. Use the overwrite flag.") % {'t': type_, 'sp': sp.get_id()}) # Import the maps if type_ == "strds": if format_ == "GTiff" or format_ == "AAIGrid": _import_raster_maps_from_gdal(maplist, overr, exp, location, link, format_, set_current_region) if format_ == "pack": _import_raster_maps(maplist, set_current_region) elif type_ == "stvds": if format_ == "GML": _import_vector_maps_from_gml( maplist, overr, exp, location, link) if format_ == "pack": _import_vector_maps(maplist) # Create the space time dataset if sp.is_in_db() and gscript.overwrite() is True: gscript.info(_("Overwrite space time %(sp)s dataset " "<%(id)s> and unregister all maps.") % {'sp': sp.get_new_map_instance(None).get_type(), 'id': sp.get_id()}) sp.delete() sp = sp.get_new_instance(id) temporal_type = init["temporal_type"] semantic_type = init["semantic_type"] relative_time_unit = None if temporal_type == "relative": if "relative_time_unit" not in init: gscript.fatal(_("Key word %s not found in init file.") % ("relative_time_unit")) relative_time_unit = init["relative_time_unit"] sp.set_relative_time_unit(relative_time_unit) gscript.verbose(_("Create space time %s dataset.") % sp.get_new_map_instance(None).get_type()) sp.set_initial_values(temporal_type=temporal_type, semantic_type=semantic_type, title=title, description=descr) sp.insert() # register the maps fs = "|" register_maps_in_space_time_dataset( type=sp.get_new_map_instance(None).get_type(), name=output, file=new_list_file_name, start="file", end="file", unit=relative_time_unit, dbif=None, fs=fs, update_cmd_list=False) os.chdir(old_cwd) except: raise # Make sure the location is switched back correctly finally: if location: # Switch to the old location try: gscript.run_command("g.mapset", mapset=old_env["MAPSET"], location=old_env["LOCATION_NAME"], gisdbase=old_env["GISDBASE"]) except CalledModuleError: grass.warning(_("Switching to original location failed")) gscript.set_raise_on_error(old_state)
def main(): pts_input = options["input"] output = options["output"] cost_map = options["cost_map"] post_mask = options["post_mask"] column = options["column"] friction = float(options["friction"]) layer = options["layer"] where = options["where"] workers = int(options["workers"]) if workers == 1 and "WORKERS" in os.environ: workers = int(os.environ["WORKERS"]) if workers < 1: workers = 1 pid = str(os.getpid()) tmp_base = "tmp_icw_" + pid + "_" # do the maps exist? if not grass.find_file(pts_input, element="vector")["file"]: grass.fatal(_("Vector map <%s> not found") % pts_input) if post_mask: if grass.find_file("MASK")["file"]: grass.fatal( _("A MASK already exists; remove it before using the post_mask option.") ) if not grass.find_file(post_mask)["file"]: grass.fatal(_("Raster map <%s> not found") % post_mask) grass.verbose(_("v.surf.icw -- Inverse Cost Weighted Interpolation")) grass.verbose( _("Processing %s -> %s, column=%s, Cf=%g") % (pts_input, output, column, friction) ) if flags["r"]: grass.verbose(_("Using (d^n)*log(d) radial basis function.")) grass.verbose( "------------------------------------------------------------------------" ) # adjust so that tiny numbers don't hog all the FP precision space # if friction = 4: divisor ~ 10.0 # if friction = 5: divisor ~ 100.0 # if friction = 6: divisor ~ 500.0 if friction >= 4: divisor = 0.01 * pow(friction, 6) else: divisor = 1 # Check that we have the column and it is the correct type try: coltype = grass.vector_columns(pts_input, layer)[column] except KeyError: grass.fatal( _("Data column <%s> not found in vector points map <%s>") % (column, pts_input) ) if coltype["type"] not in ("INTEGER", "DOUBLE PRECISION"): grass.fatal(_("Data column must be numberic")) # cleanse cost area mask to a flat =1 for my porpoises area_mask = tmp_base + "area" grass.mapcalc( "$result = if($cost_map, 1, null())", result=area_mask, cost_map=cost_map, quiet=True, ) ## done with prep work, ######################################################################## ## Commence crunching .. # crop out only points in region addl_opts = {} if where: addl_opts["where"] = "%s" % where points_list = grass.read_command( "v.out.ascii", input=pts_input, output="-", flags="r", **addl_opts ).splitlines() # Needed to strip away empty entries from MS Windows newlines # list() is needed for Python 3 compatibility points_list = list([_f for _f in points_list if _f]) # convert into a 2D list, drop unneeded cat column # to drop cat col, add this to the end of the line [:-1] # fixme: how does this all react for 3D starting points? for i in range(len(points_list)): points_list[i] = points_list[i].split("|") # count number of starting points (n). This value will later be decremented # if points are found to be off the cost map or out of region. n = len(points_list) if n > 200: grass.warning( _( "Computation is expensive! Please consider " + "fewer points or get ready to wait a while ..." ) ) import time time.sleep(5) #### generate cost maps for each site in range grass.message(_("Generating cost maps ...")) # avoid do-it-yourself brain surgery points_list_orig = list(points_list) proc = {} num = 1 for i in range(n): position = points_list_orig[i] easting = position[0] northing = position[1] cat = int(position[-1]) # retrieve data value from vector's attribute table: data_value = grass.vector_db_select(pts_input, columns=column)["values"][cat][0] if not data_value: grass.message( _("Site %d of %d, e=%.4f n=%.4f cat=%d data=?") % (num, n, float(easting), float(northing), cat) ) grass.message(_(" -- Skipping, no data here.")) del points_list[num - 1] n -= 1 continue else: grass.message( _("Site %d of %d, e=%.4f n=%.4f cat=%d data=%.8g") % (num, n, float(easting), float(northing), cat, float(data_value)) ) # we know the point is in the region, but is it in a non-null area of the cost surface? rast_val = ( grass.read_command( "r.what", map=area_mask, coordinates="%s,%s" % (position[0], position[1]), ) .strip() .split("|")[-1] ) if rast_val == "*": grass.message(_(" -- Skipping, point lays outside of cost_map.")) del points_list[num - 1] n -= 1 continue # it's ok to proceed try: data_value = float(data_value) except: grass.fatal("Data value [%s] is non-numeric" % data_value) cost_site_name = tmp_base + "cost_site." + "%05d" % num proc[num - 1] = grass.start_command( "r.cost", flags="k", input=area_mask, output=cost_site_name, start_coordinates=easting + "," + northing, quiet=True, ) # stall to wait for the nth worker to complete, if num % workers == 0: proc[num - 1].wait() num += 1 # make sure everyone is finished for i in range(n): if proc[i].wait() != 0: grass.fatal(_("Problem running %s") % "r.cost") grass.message(_("Removing anomalies at site positions ...")) proc = {} for i in range(n): cost_site_name = tmp_base + "cost_site." + "%05d" % (i + 1) # max_cost="$GIS_OPT_MAX_COST" : commented out until r.null cleansing/continue code is sorted out # start_points=tmp_idw_cost_site_$$ # we do this so the divisor exists and the weighting is huge at the exact sample spots # more efficient to reclass to 1? proc[i] = grass.mapcalc_start( "$cost_n_cleansed = if($cost_n == 0, 0.1, $cost_n)", cost_n_cleansed=cost_site_name + ".cleansed", cost_n=cost_site_name, quiet=True, ) # stall to wait for the nth worker to complete, if (i + 1) % workers == 0: # print 'stalling ...' proc[i].wait() # make sure everyone is finished for i in range(n): if proc[i].wait() != 0: grass.fatal(_("Problem running %s") % "r.mapcalc") grass.message(_("Applying radial decay ...")) proc = {} for i in range(n): cost_site_name = tmp_base + "cost_site." + "%05d" % (i + 1) grass.run_command( "g.remove", flags="f", type="raster", name=cost_site_name, quiet=True ) grass.run_command( "g.rename", raster=cost_site_name + ".cleansed" + "," + cost_site_name, quiet=True, ) # r.to.vect then r.patch output # v.to.rast in=tmp_idw_cost_site_29978 out=tmp_idw_cost_val_$$ use=val val=10 if not flags["r"]: # exp(3,2) is 3^2 etc. as is pow(3,2) # r.mapcalc "1by_cost_site_sqrd.$NUM = 1.0 / exp(cost_site.$NUM , $FRICTION)" # EXPRESSION="1.0 / pow(cost_site.$NUM $DIVISOR, $FRICTION )" expr = "1.0 / pow($cost_n / " + str(divisor) + ", $friction)" else: # use log10() or ln() ? # EXPRESSION="1.0 / ( pow(cost_site.$NUM, $FRICTION) * log (cost_site.$NUM) )" expr = '1.0 / ( pow($cost_n, $friction) * log($cost_n) )"' grass.debug("r.mapcalc expression is: [%s]" % expr) one_by_cost_site_sq_n = tmp_base + "1by_cost_site_sq." + "%05d" % (i + 1) proc[i] = grass.mapcalc_start( "$result = " + expr, result=one_by_cost_site_sq_n, cost_n=cost_site_name, friction=friction, quiet=True, ) # stall to wait for the nth worker to complete, if (i + 1) % workers == 0: # print 'stalling ...' proc[i].wait() # r.patch in=1by_cost_site_sqrd.${NUM},tmp_idw_cost_val_$$ out=1by_cost_site_sqrd.${NUM} --o # g.remove type=rast name=cost_site.$NUM -f # make sure everyone is finished for i in range(n): if proc[i].wait() != 0: grass.fatal(_("Problem running %s") % "r.mapcalc") grass.run_command( "g.remove", flags="f", type="raster", pattern=tmp_base + "cost_site.*", quiet=True, ) # grass.run_command('g.list', type = 'raster', mapset = '.') ####################################################### #### Step 3) find sum(cost^2) grass.verbose("") grass.verbose(_("Finding sum of squares ...")) # todo: test if MASK exists already, fatal exit if it does? if post_mask: grass.message(_("Setting post_mask <%s>"), post_mask) grass.mapcalc("MASK = $maskmap", maskmap=post_mask, overwrite=True) grass.message(_("Summation of cost weights ...")) input_maps = tmp_base + "1by_cost_site_sq.%05d" % 1 global TMP_FILE TMP_FILE = grass.tempfile() with open(TMP_FILE, "w") as maplist: for i in range(2, n + 1): mapname = "%s1by_cost_site_sq.%05d" % (tmp_base, i) maplist.write(mapname + "\n") # grass.run_command('g.list', type = 'raster', mapset = '.') sum_of_1by_cost_sqs = tmp_base + "sum_of_1by_cost_sqs" try: grass.run_command( "r.series", method="sum", file=TMP_FILE, output=sum_of_1by_cost_sqs ) except CalledModuleError: grass.fatal(_("Problem running %s") % "r.series") if post_mask: grass.message(_("Removing post_mask <%s>"), post_mask) grass.run_command("g.remove", flags="f", name="MASK", quiet=True) ####################################################### #### Step 4) ( 1/di^2 / sum(1/d^2) ) * ai grass.verbose("") grass.message(_("Creating partial weights ...")) proc = {} num = 1 for position in points_list: easting = position[0] northing = position[1] cat = int(position[-1]) data_value = grass.vector_db_select(pts_input, columns=column)["values"][cat][0] data_value = float(data_value) # failsafe: at this point the data values should all be valid if not data_value: grass.message(_("Site %d of %d, cat = %d, data value = ?") % (num, n, cat)) grass.message(_(" -- Skipping, no data here. [Probably programmer error]")) n -= 1 continue else: grass.message( _("Site %d of %d, cat = %d, data value = %.8g") % (num, n, cat, data_value) ) # we know the point is in the region, but is it in a non-null area of the cost surface? rast_val = ( grass.read_command( "r.what", map=area_mask, coordinates="%s,%s" % (position[0], position[1]), ) .strip() .split("|")[-1] ) if rast_val == "*": grass.message( _( " -- Skipping, point lays outside of cost_map. [Probably programmer error]" ) ) n -= 1 continue partial_n = tmp_base + "partial." + "%05d" % num one_by_cost_site_sq = tmp_base + "1by_cost_site_sq." + "%05d" % num # "( $DATA_VALUE / $N ) * (1.0 - ( cost_sq_site.$NUM / sum_of_cost_sqs ))" # "( cost_sq_site.$NUM / sum_of_cost_sqs ) * ( $DATA_VALUE / $N )" proc[num - 1] = grass.mapcalc_start( "$partial_n = ($data * $one_by_cost_sq) / $sum_of_1by_cost_sqs", partial_n=partial_n, data=data_value, one_by_cost_sq=one_by_cost_site_sq, sum_of_1by_cost_sqs=sum_of_1by_cost_sqs, quiet=True, ) # stall to wait for the nth worker to complete, if num % workers == 0: proc[num - 1].wait() # free up disk space ASAP # grass.run_command('g.remove', flags = 'f', type = 'raster', name = one_by_cost_site_sq, quiet = True) num += 1 if num > n: break # make sure everyone is finished for i in range(n): proc[i].wait() # free up disk space ASAP grass.run_command( "g.remove", flags="f", type="raster", pattern=tmp_base + "1by_cost_site_sq.*", quiet=True, ) # grass.run_command('g.list', type = 'raster', mapset = '.') ####################################################### grass.message("") grass.message(_("Calculating final values ...")) input_maps = tmp_base + "partial.%05d" % 1 for i in range(2, n + 1): input_maps += ",%spartial.%05d" % (tmp_base, i) try: grass.run_command("r.series", method="sum", input=input_maps, output=output) except CalledModuleError: grass.fatal(_("Problem running %s") % "r.series") # TODO: r.patch in v.to.rast of values at exact seed site locations. currently set to null grass.run_command("r.colors", map=output, color="bcyr", quiet=True) grass.run_command( "r.support", map=output, history="", title="Inverse cost-weighted interpolation" ) grass.run_command("r.support", map=output, history="v.surf.icw interpolation:") grass.run_command( "r.support", map=output, history=" input map=" + pts_input + " attribute column=" + column, ) grass.run_command( "r.support", map=output, history=" cost map=" + cost_map + " coefficient of friction=" + str(friction), ) if flags["r"]: grass.run_command( "r.support", map=output, history=" (d^n)*log(d) as radial basis function" ) if post_mask: grass.run_command( "r.support", map=output, history=" post-processing mask=" + post_mask ) if where: grass.run_command( "r.support", map=output, history=" SQL query= WHERE " + where ) # save layer #? to metadata? command line hist? ####################################################### # Step 5) rm cost and cost_sq maps, tmp_icw_points, etc cleanup() ####################################################### # Step 6) done! grass.message(_("Done! Results written to <%s>." % output))
def landscapeEvol(m, o, p, q, res, s, f): """ Now define "landscapeEvol", our main block of code, here defined because of the way g.parser needs to be called with python codes for grass (see below) m = last iteration number, o = iteration number, p = prefx, q = statsout, res = resolution of input elev map, s = master list of lists of climate data f = name of text file to write stats to """ # Get the process id to tag any temporary maps we make for easy clean up in the loop pid = os.getpid() # Get variables from user input elev = options["elev"] transp_eq = options["transp_eq"] initbdrk = options["initbdrk"] outdem = options["outdem"] outsoil = options["outsoil"] sdensity = options["sdensity"] K = options["k"] P = options["p"] C = options["c"] exp_m = options["exp_m"].split(",") exp_n = options["exp_n"].split(",") flowcontrib = options["flowcontrib"] convergence = options["convergence"] manningn = options["manningn"] p = options["prefx"] # Make some variables for temporary map names aspect = "%saspect%04d" % (p, o) flowacc = "%sflowacc%04d" % (p, o) flowdir = "%sflowdir%04d" % (p, o) flacclargenum = "%sflowacclargenum%04d" % (p, o) pc = "%spc%04d" % (p, o) tc = "%stc%04d" % (p, o) qsx = "%sQsx_%04d" % (p, o) qsy = "%sQsy_%04d" % (p, o) qsxdx = "%sDelta_Qsx_%04d" % (p, o) qsydy = "%sDelta_Qsy_%04d" % (p, o) rainexcess = "%s_rainfall_excess_map_%04d" % (p, o) tmpnetchange = "tmp%s_netchange%04d" % (pid, o) tmp90qle = "tmp%s_netchange_90qle%04d" % (pid, o) tmp10qle = "tmp%s_netchange_10qle%04d" % (pid, o) tmperosion = "tmp%s_erosion%04d" % (pid, o) tmpdep = "tmp%s_deposition%04d" % (pid, o) # List of temp maps to remove unless user wants to keep them all mapstoremove = [ aspect, flowacc, flowdir, flacclargenum, pc, tc, rainexcess, tmpnetchange, tmp10qle, tmp90qle, tmperosion, tmpdep, ] # Variables that come in as a list of lists and can update with each iteration # masterlist = [R2,rain2,stormlength2,storms2,stormi2] R = s[0][m] rain = s[1][m] stormtimet = float(s[2][m]) * 3600.00 # Convert storm length to seconds storms = s[3][m] stormi = (float(s[4][m]) * stormtimet ) # Calculate the length of time at peak flow depth # Maps that will update at each iteration to record state of landscape old_dem = "%s%s%04d" % (p, outdem, m) old_soil = "%s%s%04d" % (p, outsoil, m) slope = "%sslope%04d" % (p, o) netchange = "%sED_rate%04d" % (p, o) new_dem = "%s%s%04d" % (p, outdem, o) new_soil = "%s%s%04d" % (p, outsoil, o) # If first iteration, use input maps. Otherwise, use maps generated from # previous iterations if o == 1: grass.run_command("g.copy", raster=elev + "," + old_dem, quiet=True) # Grab the number of cells in the starting DEM numcells = grass.parse_command( "r.univar", flags="g", map=old_dem, )["n"] # Calculate soil as difference between surface and bedrock grass.mapcalc( "${old_soil}=${old_dem}-${initbdrk}", overwrite=True, quiet=True, old_soil=old_soil, old_dem=old_dem, initbdrk=initbdrk, ) grass.message("\n*************************\n" + "Iteration %s -- " % o + "step 1/6: calculating slope\n" + "*************************\n") grass.run_command("r.slope.aspect", quiet=True, elevation=old_dem, aspect=aspect, slope=slope) grass.message("\n*************************\n" + "Iteration %s -- " % o + "step 2/6: calculating accumulated flow depths\n" + "*************************\n") # Make map of rainfall excess (proportion each cell contributes to # downstrem flow) from flowcontrib. Note that if flowcontrib is a map, we # are just making a copy of it. This map is a percentage, but has to be # scaled from 0-100, because r.watershed will only allow values greater # than 1 as input in it's 'flow' variable. This creates a flow accumulation # map with large numbers, which will be divided by 100 after it is # made, bringing the values back down to what they should be. grass.mapcalc( "${rainexcess}=int(${flowcontrib})", quiet=True, rainexcess=rainexcess, flowcontrib=flowcontrib, ) grass.run_command( "r.watershed", quiet=True, flags="a", elevation=old_dem, threshold=numcells, flow=rainexcess, accumulation=flacclargenum, drainage=flowdir, convergence=convergence, ) grass.mapcalc( "${flowacc}=${flacclargenum}/100", quiet=True, flowacc=flowacc, flacclargenum=flacclargenum, ) # again, do something different if we are only making an evaluation of cutoffs if flags["p"] is True: samplePoints(old_dem, aspect, slope, pc, tc, flowacc, p) grass.message("\n*************************\n" + "Iteration %s -- " % o + "step 3/6: calculating sediment transport rates \n" + "*************************\n") # Figure out which transport equation to run. All equations estimate transport capacity as kg/m.s. Note that we integrate the step to calculate the Tc in the east and west directions, to simplify the divergence calculations in the next step (i.e., to reduce the overall number of mapcalc statements and intermediate maps) if transp_eq == "StreamPower": # Stream power equation: Tc=Kt*gw*1/N*h^m*B^n # where: h = depth of flow = (i*A)/(0.595*t) # and: B = change in slope # GIS Implementation: # Tc=K*C*P*gw*(1/N)*((i*A)/(0.595*t))^m*(tan(S)^n) # Variables: # Tc=Transport Capacity [kg/meters.second] # K*C*P=Kt=mitigating effects of soil type, vegetation cover, and landuse practices. [unitless] # gw=Hydrostatic pressure of water 9810 [kg/m2.second] # N=Manning's coefficient ~0.3-0.6 for different types of stream channesl [unitless] # i=rainfall intentsity [m/rainfall event] # A=uplsope accumulated area per contour (cell) width [m2/m] = [m] # 0.595 = constant for time-lagged peak flow (assumes symmetrical unit hydrograph) # t=length of rainfall event [seconds] # S=topographic slope [degrees] # m = transport coefficient for upslope area [unitless] # n transport coefficient for slope [unitless] # SLOPE VERSISON e1 = """${qsx}=${K}*${C}*${P} * exp(${manningn}, -1) * 9810. * \ exp((((${rain}/1000.)*${flowacc})/(0.595*${stormtimet})), \ graph(${flowacc}, ${exp_m1a},${exp_m1b}, ${exp_m2a},${exp_m2b}) ) * \ exp(tan(${slope}), graph(${slope}, ${exp_n1a},${exp_n1b}, ${exp_n2a},${exp_n2b}))\ * cos(${aspect})""" e2 = """${qsy}=${K}*${C}*${P} * exp(${manningn}, -1) * 9810. * \ exp((((${rain}/1000.)*${flowacc})/(0.595*${stormtimet})), \ graph(${flowacc}, ${exp_m1a},${exp_m1b}, ${exp_m2a},${exp_m2b})) * \ exp(tan(${slope}), graph(${slope}, ${exp_n1a},${exp_n1b}, ${exp_n2a},${exp_n2b}))\ * sin(${aspect})""" elif transp_eq == "ShearStress": # Shear stress equation: Tc=Kt*tau^m (critical shear stress assumed to be 0) # where: tau = shear stress = gw*h*B # and: S = change in slope # and: h = depth of flow = (i*A)/(0.595*t) # GIS Implmentation: # Tc=K*C*P*(gw*((i*A)/(0.595*t)*(tan(S))))^m # Variables: # Tc=Transport Capacity [kg/meters.second] # K*C*P=Kt=mitigating effects of soil type, vegetation cover, and landuse practices. [unitless] # gw=Hydrostatic pressure of water 9810 [kg/m2.second] # N=Manning's coefficient ~0.3-0.6 for different types of stream channesl [unitless] # i=rainfall intentsity [m/rainfall event] # A=uplsope accumulated area per contour (cell) width [m2/m] = [m] # 0.595 = constant for time-lagged peak flow (assumes symmetrical unit hydrograph) # t=length of rainfall event [seconds] # B=topographic slope [degrees] # m = transport coefficient (here assumed to be scaled to upslope area) [unitless] e1 = """${qsx}=(${K}*${C}*${P} * \ exp(9810.*(((${rain}/1000)*${flowacc})/(0.595*${stormtimet}))*tan(${slope}), \ graph(${flowacc}, ${exp_n1a},${exp_n1b}, ${exp_n2a},${exp_n2b}))) * \ cos(${aspect})""" e2 = """${qsy}=(${K}*${C}*${P} * \ exp(9810.*(((${rain}/1000)*${flowacc})/(0.595*${stormtimet}))*tan(${slope}), \ graph(${flowacc}, ${exp_n1a},${exp_n1b}, ${exp_n2a},${exp_n2b}) )) * \ sin(${aspect})""" elif transp_eq == "USPED": # USPED equation: Tc=R*K*C*P*A^m*B^n # where: B = change in slope # GIS Implementation: # Tc=R*K*C*P*A^m*tan(S)^n # Variables: # Tc=Transport Capacity [kg/meters.second] # R=Rainfall intensivity factor [MJ.mm/ha.h.yr] # A=uplsope accumulated area per contour (cell) width [m2/m] = [m] # S=topographic slope [degrees] # m = transport coefficient for upslope area [unitless] # n transport coefficient for slope [unitless] e1 = """${qsx}=((${R}*${K}*${C}*${P}*\ exp((${flowacc}*${res}),graph(${flowacc}, ${exp_m1a},${exp_m1b}, ${exp_m2a},${exp_m2b}))*\ exp(sin(${slope}), graph(${slope}, ${exp_n1a},${exp_n1b}, ${exp_n2a},${exp_n2b})))\ * cos(${aspect}))""" e2 = """${qsy}=((${R}*${K}*${C}*${P}*\ exp((${flowacc}*${res}),graph(${flowacc}, ${exp_m1a},${exp_m1b}, ${exp_m2a},${exp_m2b}))*\ exp(sin(${slope}), graph(${slope}, ${exp_n1a},${exp_n1b}, ${exp_n2a},${exp_n2b})))\ * sin(${aspect}))""" else: grass.fatal( 'You have entered a non-viable tranport equation name. Please ensure option "transp_eq" is one of "StreamPower," "ShearStress," or "USPED."' ) # Actually do the mapcalc statement for chosen transport equation x = grass.mapcalc_start( e1, quiet=True, qsx=qsx, slope=slope, aspect=aspect, R=R, K=K, C=C, P=P, res=res, flowacc=flowacc, rain=rain, stormtimet=stormtimet, stormi=stormi, exp_m1a=exp_m[0], exp_m1b=exp_m[1], exp_m2a=exp_m[2], exp_m2b=exp_m[3], exp_n1a=exp_n[0], exp_n1b=exp_n[1], exp_n2a=exp_n[2], exp_n2b=exp_n[3], manningn=manningn, ) y = grass.mapcalc_start( e2, quiet=True, qsy=qsy, slope=slope, aspect=aspect, R=R, K=K, C=C, P=P, res=res, flowacc=flowacc, rain=rain, stormtimet=stormtimet, stormi=stormi, exp_m1a=exp_m[0], exp_m1b=exp_m[1], exp_m2a=exp_m[2], exp_m2b=exp_m[3], exp_n1a=exp_n[0], exp_n1b=exp_n[1], exp_n2a=exp_n[2], exp_n2b=exp_n[3], manningn=manningn, ) x.wait() y.wait() grass.message( "\n*************************\n" + "Iteration %s -- " % o + "step 4/6: calculating divergence/difference of sediment transport and the actual amount of erosion or deposition in vertical meters/cell/year\n" + "*************************\n") # Taking divergence of transport capacity Tc converts kg/m.s to kg/m2.s sax = grass.start_command("r.slope.aspect", quiet=True, elevation=qsx, dx=qsxdx) say = grass.start_command("r.slope.aspect", quiet=True, elevation=qsy, dy=qsydy) sax.wait() say.wait() # Now convert output of divergence to calculated erosion and deposition in # vertical meters of elevation change. Add back the divergence in EW and NS # directions. Units are in kg/m2.s, so start by dividing by soil density # [kg/m3] to get m/s elevation change (for USPED that is m/year already, # but not for the shear stress or stream power). # For shear stress and stream power, also multiply by the number # of seconds at peak flow depth (stormi) and then by the number of erosive # storms per year to get m/year elevation change. if transp_eq == "USPED": ed = """${netchange}=((${qsxdx}+${qsydy})/${sdensity})""" grass.mapcalc( ed, quiet=True, netchange=tmpnetchange, qsxdx=qsxdx, qsydy=qsydy, sdensity=sdensity, ) else: ed = """${netchange}=((${qsxdx}+${qsydy})/${sdensity})*${stormi}*${storms}""" grass.mapcalc( ed, quiet=True, netchange=tmpnetchange, qsxdx=qsxdx, qsydy=qsydy, sdensity=sdensity, stormi=stormi, storms=storms, ) # Apply smoothing to the output to remove some spikes. Map will only be smoothed for values above the 90th quantile and below the 10th quantile (i.e., only extreme values will be smoothed) if flags["m"] is True: a = grass.start_command( "r.neighbors", quiet=True, input=tmpnetchange, output=tmp10qle, method="quantile", size=5, quantile=0.1, ) b = grass.start_command( "r.neighbors", quiet=True, input=tmpnetchange, output=tmp90qle, method="quantile", size=5, quantile=0.9, ) a.wait() b.wait() smoother = """${netchange}=if(${tmpnetchange}<${tmp10qle}, ${tmp10qle}, if(${tmpnetchange}>${tmp90qle}, ${tmp90qle}, ${tmpnetchange}))""" grass.mapcalc( smoother, quiet=True, netchange=netchange, tmpnetchange=tmpnetchange, tmp90qle=tmp90qle, tmp10qle=tmp10qle, ) else: grass.run_command("g.rename", quiet=True, raster=tmpnetchange + "," + netchange) grass.message( "\n*************************\n" + "Iteration %s -- " % o + "step 5/6: calculating terrain evolution and new soil depths\n" + " *************************\n") # Compute elevation changes: addition of ED change to old DEM. # This mapcalc statement first checks the amount of erodable soil in a given # cell against the amount of erosion calculated, and keeps the cell from # eroding past this amount (if there is soil, then if the amount of erosion # is more than the amount of soil, just remove all the soil and stop, else # remove the amount of caclulated erosion. It also runs an error catch that # checks to make sure that soil depth is not negative (could happen, I # suppose), and if it is, corrects it). Finally, do patch-job to catch the # shrinking edge problem (the edge cells have no upstream cell, so get # turned null in the calculations in step 4) e = """${new_dem} = eval(x=if(${old_soil} > 0.0 && (-1*${netchange}) <= ${old_soil}, ${netchange}, \ if((-1*${netchange}) > ${old_soil}, (-1*${old_soil}), 0)), \ y=(${old_dem} + x), if(isnull(y), ${old_dem}, y))""" grass.mapcalc( e, quiet=True, new_dem=new_dem, old_soil=old_soil, old_dem=old_dem, netchange=netchange, ) # Calculate new soil depths by subtracting initial bedrock elevations from # the new DEM. e = """${new_soil} = if((${new_dem} - ${initbdrk}) < 0, 0, (${new_dem} - ${initbdrk}))""" grass.mapcalc(e, quiet=True, new_soil=new_soil, new_dem=new_dem, initbdrk=initbdrk) # Set colors for elevation, soil, and ED maps grass.run_command("r.colors", quiet=True, map=new_dem, color="srtm") sdcolors = [ "100% 0:249:47", "20% 78:151:211", "6% 194:84:171", "0% 227:174:217" ] sdc = grass.feed_command("r.colors", quiet=True, map=new_soil, rules="-") sdc.stdin.write("\n".join(sdcolors)) sdc.stdin.close() nccolors = [ "100 127:0:255", "1 0:0:255", ".1 0:255:0", "0.001 152:251:152", "0 250:250:250", "-0.001 255:255:50", "-.1 255:127:0", "-1 255:0:0", "-100 127:0:255", ] ncc = grass.feed_command("r.colors", quiet=True, map=netchange, rules="-") ncc.stdin.write("\n".join(nccolors)) ncc.stdin.close() sdc.wait() ncc.wait() grass.message("\n*************************\n" + "Iteration %s -- " % o + "step 6/6: writing stats to output file\n" + "*************************\n") # Make some temp maps of just erosion rate and just deposition rates e = """${tmperosion}=if(${netchange} < -0, ${netchange}, null())""" ero1 = grass.mapcalc_start(e, quiet=True, tmperosion=tmperosion, netchange=netchange) e = """${tmpdep}=if(${netchange} > 0, ${netchange}, null())""" dep1 = grass.mapcalc_start(e, quiet=True, tmpdep=tmpdep, netchange=netchange) ero1.wait() dep1.wait() # Grab the stats from these temp files and save them to dictionaries erosstats = grass.parse_command("r.univar", flags="ge", percentile="1", map=tmperosion) depostats = grass.parse_command("r.univar", flags="ge", percentile="99", map=tmpdep) # Finish gathering stats (just need the soil depth stats now) soilstats = grass.parse_command("r.univar", flags="ge", map=new_soil, percentile="99") # Write stats to a new line in the stats file # HEADER of the file should be: ',,Mean Values,,,,Standard Deviations,,,,Totals,,,Additional Stats\nIteration,,Mean Erosion,Mean Deposition,Mean Soil Depth,,Standard Deviation Erosion,Standard Deviation Deposition,Standard Deviation Soil Depth,,Total Sediment Eroded,Total Sediment Deposited,,Minimum Erosion,First Quartile Erosion,Median Erosion,Third Quartile Erosion,Maximum Erosion,Original Un-smoothed Maximum Erosion,,Minimum Deposition,First Quartile Deposition,Median Deposition,Third Quartile Deposition,Maximum Deposition,Original Un-smoothed Maximum Deposition,,Minimum Soil Depth,First Quartile Soil Depth,Median Soil Depth,Third Quartile Soil Depth,Maximum Soil Depth' f.write("\n%s" % o + ",," + erosstats["mean"] + "," + depostats["mean"] + "," + soilstats["mean"] + ",," + erosstats["stddev"] + "," + depostats["stddev"] + "," + soilstats["stddev"] + ",," + erosstats["sum"] + "," + depostats["sum"] + ",," + erosstats["max"] + "," + erosstats["third_quartile"] + "," + erosstats["median"] + "," + erosstats["first_quartile"] + "," + erosstats["min"] + "," + depostats["min"] + "," + depostats["first_quartile"] + "," + depostats["median"] + "," + depostats["third_quartile"] + "," + depostats["max"] + "," + soilstats["min"] + "," + soilstats["first_quartile"] + "," + soilstats["median"] + "," + soilstats["third_quartile"] + "," + soilstats["max"]) # Cleanup temporary files if flags["k"] is True: grass.message("\nTemporary maps will NOT be deleted!!!!\n") else: grass.message("\nCleaning up temporary maps...\n\n") # Check all the flag options, and add to list of maps to delete if flags["s"] is True: grass.message("Keeping Slope map.") else: mapstoremove.append(slope) if flags["d"] is True: grass.message("Not keeping Soil Depth map.") mapstoremove.append(old_soil) # Check if this is the last year and remove the "new-soil" map too if o == int(options["number"]): mapstoremove.append(new_soil) else: # Check if this is the first year, and if so, remove the temporary initial soil depths map if o <= 1: grass.message(("%s%s%04d" % (p, outsoil, m))) mapstoremove.append("%s%s%04d" % (p, outsoil, m)) if flags["e"] is True: grass.message( "Keeping delta Transport Capacity (divergence) maps.") else: mapstoremove.extend([qsxdx, qsydy]) if flags["t"] is True: grass.message("Keeping Transport Capacity maps.") else: mapstoremove.extend([qsx, qsy]) if flags["r"] is True: grass.message("Not keeping an Erosion and Deposition rate map.") mapstoremove.append(netchange) if len(mapstoremove) == 0: pass else: grass.run_command( "g.remove", quiet=True, flags="f", type="rast", name=",".join(mapstoremove), ) grass.message("\n*************************\n" + "Done with Iteration %s " % o + "\n*************************\n") return 0
def main(): # Get user inputs friction_original = options['friction'] # Input friction map out = options['out'] # Output totalcost raster maxcost = options['maxcost'] # Max cost distance in cost units knight = "k" if flags["k"] else "" # Use Knight's move in r.cost instead Queen's move (a bit slower, but more accurate) mempercent = int(options['mempercent']) # Percent of map to keep in memory in r.cost calculation # Error if no valid friction surface is given if not grass.find_file(friction_original)['name']: grass.message(_("Friction surface <%s> not found") % friction_original) sys.exit() # Calculate cost distances / edge effect distances from the friction map. Result is in map units info = grass.raster_info(friction_original) # Read and get raster info edgeeffect_min = float(maxcost) / float(info['max']) # Minimum cost distance / edge effect distance edgeeffect_max = float(maxcost) / float(info['min']) # Maximum cost distance / edge effect distance # If "Only calculate edge effect" is selected if flags['e']: grass.message("Minimum distance / edge effect: " + str(edgeeffect_min)) grass.message("Maximum distance / edge effect: " + str(edgeeffect_max)) sys.exit() # If output file exists, but overwrite option isn't selected if not grass.overwrite(): if grass.find_file(out)['name']: grass.message(_("Output raster map <%s> already exists") % out) sys.exit() # Get raster calculation region information regiondata = grass.read_command("g.region", flags = 'p') regvalues = grass.parse_key_val(regiondata, sep= ':') # Assign variables for necessary region info bits nsres = float(regvalues['nsres']) ewres = float(regvalues['ewres']) # Calculate the mean resolution meanres = (nsres + ewres) / 2.0 # Create a list holding cell coordinates coordinatelist = [] # An empty list that will be populated with coordinates rasterdata = grass.read_command('r.stats', flags="1gn", input = friction_original) # Read input raster coordinates rastervalues = rasterdata.split() # Split the values from r.stats into list entries # rastervalues list is structured like that: [x1, y1, rastervalue1, x2, y2, rastervalue2 ... xn, yn, rastervaluen], so iterate through that list with step of 3 and write a new list that has coordinates in a string: ["x1,y1", "x2,y2" ... "xn,yn"] for val in xrange(0,len(rastervalues),3): coordinatelist.append(rastervalues[val] + "," + rastervalues[val+1]) # This is the number of cells (and hence cost surfaces) to be used n_coords = len(coordinatelist) # Create temporary filenames with unique process id in their name. Add each name to the tmp_layers list. pid = os.getpid() cost1 = str("tmp_totalcost_cost1_%d" % pid) tmp_layers.append(cost1) cost2 = str("tmp_totalcost_cost2_%d" % pid) tmp_layers.append(cost2) cost3 = str("tmp_totalcost_cost3_%d" % pid) tmp_layers.append(cost3) cost4 = str("tmp_totalcost_cost4_%d" % pid) tmp_layers.append(cost4) friction = str("tmp_friction_%d" % pid) tmp_layers.append(friction) calctemp = str("tmp_calctemp_%d" % pid) tmp_layers.append(calctemp) # Assuming the friction values are per map unit (not per cell), the raster should be multiplied with region resolution. This is because r.cost just uses cell values and adds them - slightly different approach compared to ArcGIS which compensates for the resolution automatically. The result is then divided by maxcost so that r.cost max_cost value can be fixed to 1 (it doesn't accept floating point values, hence the workaround). grass.mapcalc("$outmap = $inmap * $res / $mcost", outmap = friction, inmap = friction_original, res = meanres, mcost = maxcost) # Do the main loop for c in xrange(0, n_coords, 4): # Iterate through the numbers of cells with the step of 4 # Start four r.cost processes with different coordinates. The first process (costproc1) is always made, but the other 3 have the condition that there exists a successive coordinate in the list. This is because the used step of 4 in the loop. In case there are no coordinates left, assign the redundant cost outputs null-values so they wont be included in the map calc. try: costproc1 = grass.start_command('r.cost', overwrite = True, flags = knight, input = friction, output = cost1, start_coordinates = coordinatelist[c], max_cost = 1, percent_memory = mempercent) if c+1 < n_coords: costproc2 = grass.start_command('r.cost', overwrite = True, flags = knight, input = friction, output = cost2, start_coordinates = coordinatelist[c+1], max_cost = 1, percent_memory = mempercent) else: cost2 = "null()" if c+2 < n_coords: costproc3 = grass.start_command('r.cost', overwrite = True, flags = knight, input = friction, output = cost3, start_coordinates = coordinatelist[c+2], max_cost = 1, percent_memory = mempercent) else: cost3 = "null()" if c+3 < n_coords: costproc4 = grass.start_command('r.cost', overwrite = True, flags = knight, input = friction, output = cost4, start_coordinates = coordinatelist[c+3], max_cost = 1, percent_memory = mempercent) else: cost4 = "null()" except: grass.message("Error with r.cost: " + str(sys.exc_info()[0])) sys.exit() # For the very first iteration just add those first r.cost results together if c == 0: # Wait for the r.cost processes to stop before moving on costproc1.wait() costproc2.wait() costproc3.wait() costproc4.wait() # Do the map algebra: merge the cost surfaces try: grass.mapcalc("$outmap = if(isnull($tempmap1),0,1) + if(isnull($tempmap2),0,1) + if(isnull($tempmap3),0,1) + if(isnull($tempmap4),0,1)", outmap = out, tempmap1 = cost1, tempmap2 = cost2, tempmap3 = cost3, tempmap4 = cost4, overwrite=True) except: grass.message("Error with mapcalc: " + str(sys.exc_info()[0])) sys.exit() # If it's not the first iteration... else: # Rename the output of previous mapcalc iteration so that it can be used in the mapcalc expression (x = x + y logic doesn't work apparently) try: # If pygrass gets fixed, replace g.rename with those commented out pygrass-based lines as they seem to be a bit faster (are they really?) #map = pygrass.raster.RasterRow(out) #map.name = calctemp grass.run_command('g.rename', overwrite = True, rast = out + "," + calctemp) except: grass.message("Error: " + str(sys.exc_info()[0])) sys.exit() # Wait for the r.cost processes to stop before moving on costproc1.wait() costproc2.wait() costproc3.wait() costproc4.wait() # Merge the r.cost results and the cumulative map from previous iteration try: grass.mapcalc("$outmap = if(isnull($inmap),0,$inmap) + if(isnull($tempmap1),0,1) + if(isnull($tempmap2),0,1) + if(isnull($tempmap3),0,1) + if(isnull($tempmap4),0,1)", inmap = calctemp, outmap = out, tempmap1 = cost1, tempmap2 = cost2, tempmap3 = cost3, tempmap4 = cost4, overwrite=True) except: grass.message("Error with mapcalc: " + str(sys.exc_info()[0])) sys.exit() # Finally print the edge effect values grass.message("---------------------------------------------") grass.message("Minimum distance / edge effect: " + str(edgeeffect_min)) grass.message("Maximum distance / edge effect: " + str(edgeeffect_max))
def main(): # User inputs friction = options['friction'] # Input friction raster points = options['points'] # Input point layer output = options['output'] # Output least cost path raster radius = int(options['radius']) # Point search radius n_closepoints = int(options['closepoints']) # Number of closest points netout = options['netout'] # Network output # Initiate PontLayer() object input_points = PointLayer(points) # Get process id (pid) and create temporary layer names pid = os.getpid() # Process ID, used for making (more or less) unique temporary filenames costmap1 = "tmp_cost_%d" % pid # Cost surface from onepoint1 costmap2 = "tmp_cost_%d_%i" % (pid, 2) # Cost surface from onepoint2; parallel process lcpmap1 = "tmp_lcp_%d" % pid # Least cost path map from costmap1 lcpmap2 = "tmp_lcp_%d_%i" % (pid, 2) # Least cost path map from costmap2; parallel process lcptemp = "tmp_lcptemp_%d" % pid # Temporary file for mapcalc # Create a a long string of all temporary layernames for easy deletion them later on tmpvars = costmap1 + "," + costmap2 + "," + lcpmap1 + "," + lcpmap2 + "," + lcptemp # Get coordinates of input point layer and also the total number of point features in the layer #all_coords, n_feats = pointCoords(points) #distdict = pointDistances(all_coords, n_feats) n_feats = input_points.featCount() # Initiate new Popen() object for multiprocessing mapcalc mapcalcproc = grass.Popen("") # Main loop that creates least cost paths for all points for feat in range(1, n_feats +1, 2): # Initiate new PointLayer() objects layer_point1 = PointLayer(points, feat) layer_point2 = PointLayer(points, feat+1) if radius > 0 and n_closepoints <= 0: drainpointlayer1 = None drainpointlayer2 = None drainpoints1 = layer_point1.pointsInRadius(radius, stringoutput=True) drainpoints2 = layer_point2.pointsInRadius(radius, stringoutput=True) elif radius > 0 and n_closepoints > 0: drainpointlayer1 = None drainpointlayer2 = None drainpoints1 = layer_point1.closePointsInRadius(n_closepoints, radius) drainpoints2 = layer_point2.closePointsInRadius(n_closepoints, radius) elif radius == 0 and n_closepoints > 0: drainpointlayer1 = None drainpointlayer2 = None drainpoints1 = layer_point1.closePoints(n_closepoints) drainpoints2 = layer_point2.closePoints(n_closepoints) else: drainpointlayer1 = points drainpointlayer2 = points drainpoints1 = None drainpoints2 = None try: lcpproc1 = grass.start_command('r.cost', flags="k", overwrite=True, input=friction, output=costmap1, start_coordinates=layer_point1.oneCoord()) lcpproc2 = grass.start_command('r.cost', flags="k", overwrite=True, input=friction, output=costmap2, start_coordinates=layer_point2.oneCoord()) # Least-cost paths from every other point to the current point lcpproc1.wait() lcpproc1 = grass.start_command('r.drain', overwrite=True, input=costmap1, output=lcpmap1, start_coordinates=drainpoints1, start_points=drainpointlayer1) lcpproc2.wait() lcpproc2 = grass.start_command('r.drain', overwrite=True, input=costmap2, output=lcpmap2, start_coordinates=drainpoints2, start_points=drainpointlayer2) lcpproc1.wait() lcpproc2.wait() except: cleanUp(tmpvars) grass.fatal("Problem with lcp creation") try: if feat == 1: mapcalcproc = grass.mapcalc_start("$outmap = if(isnull($tempmap),0,1) + if(isnull($tempmap2),0,1)", outmap = output, tempmap = lcpmap1, tempmap2 = lcpmap2, overwrite=True) else: # Wait for the mapcalc operation from previous iteration to finish mapcalcproc.wait() # Rename the cumulative lcp map from previous iteration so that mapcalc can use it (x=x+y doesn't work with mapcalc) grass.run_command('g.rename', rast = output + ',' + lcptemp, overwrite=True) # output = Previous LCP + Current LCP mapcalcproc = grass.start_command('r.mapcalc', "$outmap = $inmap + if(isnull($tempmap),0,1) + if(isnull($tempmap2),0,1)", inmap = lcptemp, outmap = output, tempmap = lcpmap1, tempmap2 = lcpmap2) except: cleanUp(tmpvars) grass.fatal("Problem with mapcalc") # Wait for last mapcalc to finish mapcalcproc.wait() # Make 0 values into NULLs nullproc = grass.start_command('r.null', map = output, setnull = "0") cleanUp(tmpvars) nullproc.wait() grass.message("All done")
flags='c', quiet=True).splitlines(): print nace2 pin = gscript.pipe_command( 'v.db.select', map=firms_map, column="x,y,%s" % turnover_column, where="substr(%s, 1, 2) = '%s' AND %s >= 0" % (nace_column, nace2, turnover_column), flags='c', quiet=True) total_turnover_map = 'turnover_%d_%s' % (annee, nace2) p = gscript.start_command('r.in.xyz', input_='-', stdin=pin.stdout, method='sum', type_='DCELL', output=total_turnover_map, quiet=True, overwrite=True) if p.wait() is not 0: gscript.fatal("Error in r.in.xyz with nace %s" % nace2) stats = gscript.parse_command('r.univar', map_=total_turnover_map, flags='g', quiet=True) relative_turnover_map = 'turnover_rel_%d_%s' % (annee, nace2) mapcalc_expression = "%s = " % relative_turnover_map
def main(): # User inputs friction = options['friction'] # Input friction raster inpoints = options['points'] # Input point layer rastout = options['rastout'] # Output least cost path raster radius = int(options['radius']) # Point search radius n_closepoints = int(options['nearpoints']) # Number of closest points vectout = options['vectout'] # Vector layer output knight = "k" if flags['k'] else "" # Knight's move flag costatt = "e" if flags['c'] else "" # Calculate total cost values for paths and add them to attribute table # Check no vector or raster output is chosen, raise an error if (not vectout) and (not rastout): grass.message("No output chosen!") sys.exit() # Check overwrite settings # If output raster file exists, but overwrite option isn't selected if not grass.overwrite(): if grass.find_file(rastout)['name']: grass.message(_("Output raster map <%s> already exists") % rastout) sys.exit() # If output vector file exists, but overwrite option isn't selected if not grass.overwrite(): if grass.find_file(vectout, element = 'vector')['name']: grass.message(_("Output vector map <%s> already exists") % vectout) sys.exit() # If overwrite is chosen, remove the previous layers before any action (to lessen the probability of some random errors) if grass.overwrite(): grass.run_command("g.remove", rast = rastout, vect = vectout, quiet = True) # Get a region resolution to be used in cost attribute calculation, because the default will be in map units if vectout and (costatt == "e"): # Get raster calculation region information regiondata = grass.read_command("g.region", flags = 'p') regvalues = grass.parse_key_val(regiondata, sep= ':') # Assign variables for necessary region info bits nsres = float(regvalues['nsres']) ewres = float(regvalues['ewres']) regionres = (nsres + ewres) / 2.0 rescoefficient = regionres # Get process id (pid) and create temporary layer names which are also added to tmp_rlayers list pid = os.getpid() # Process ID, used for getting unique temporary filenames costmap1 = "tmp_cost_%d" % pid # Cost surface for point 1 tmp_rlayers.append(costmap1) costmap2 = "tmp_cost_%d_%i" % (pid, 2) # Cost surface from point 2 (parallel process) tmp_rlayers.append(costmap2) costdir1 = "tmp_costdir_%d" % pid # Temporary cost direction raster 1 tmp_rlayers.append(costdir1) costdir2 = "tmp_costdir_%d_%i" % (pid, 2) # Temporary cost direction raster 2 tmp_rlayers.append(costdir2) lcpmap1 = "tmp_lcp_%d" % pid # Least cost path map from costmap1 tmp_rlayers.append(lcpmap1) lcpmap2 = "tmp_lcp_%d_%i" % (pid, 2) # Least cost path map from costmap2 (parallel process) tmp_rlayers.append(lcpmap2) lcptemp = "tmp_lcptemp_%d" % pid # Temporary file for mapcalc tmp_rlayers.append(lcptemp) region = "tmp_region_%d" % pid # Temporary vector layer of computational region tmp_vlayers.append(region) points = "tmp_points_%d" % pid # Temporary point layer which holds points only inside the region tmp_vlayers.append(points) if vectout: # if vector output is needed, create the temporary vectorlayers too vectdrain1 = "tmp_vectdrain_%d" % pid tmp_vlayers.append(vectdrain1) vectdrain2 = "tmp_vectdrain2_%d" % pid tmp_vlayers.append(vectdrain2) # Make sure input data points are inside raster computational region: create a region polygon and select points that are inside it grass.run_command('v.in.region', overwrite = True, output = region) grass.run_command('v.select', overwrite = True, flags = "tc", ainput = inpoints, atype = 'point', binput = region, btype = 'area', output = points , operator = 'within') # Create a new PointLayerInfo class instance using input point layer and get the categories list as well as total feature count of the layer pointlayer = PointLayerInfo(points) points_cats = pointlayer.featcats # A list() of layer feature categories points_featcount = pointlayer.featcount # integer of feature count in point layer points_coordsdict = pointlayer.coordsdict # dict() of point coordinates as tuple (x,y) # Create an empty dictionaries for storing cost distances between points costdict1 = dict() costdict2 = dict() # Create the first mapcalc process, so that it can be checked and stopped in the loop without using more complicated ways mapcalc = grass.Popen("") lcp1 = grass.Popen("") lcp2 = grass.Popen("") # The main loop for least cost path creation. For each point a cost surface is created, least cost paths created and then added to the general output file. Loop uses a range which has as many items as there are points in the input point layer. To make use of parallel processing, the step is 2, although the "item" is always the first of the selected pair. for item in range(0,points_featcount,2): # Get category number of the point from the point_cats list cat1 = points_cats[item] # Set p2 (i.e. using second or parallel process) to be False by default and make it True if there are enough points left to do so. In that case set it to true and also get the category number of the point from the point_cats list p2 = False if item+1 < points_featcount: p2 = True cat2 = points_cats[item+1] # Create a new PointLayerInfo object from input point layer with centerpoint (from which distances area measured in the class) feature as currently selected point cat point1 = PointLayerInfo(points, cat1) if p2: # The same for p2 if needed point2 = PointLayerInfo(points, cat2) # begin cost surface process with the start coordinate of currently selected point. Do the same for second process costsurf1 = grass.start_command('r.cost', flags=knight, overwrite=True, input=friction, output=costmap1, outdir=costdir1, start_coordinates=point1.centercoord()) if p2: costsurf2 = grass.start_command('r.cost', flags=knight, overwrite=True, input=friction, output=costmap2, outdir=costdir2, start_coordinates=point2.centercoord()) # Create the drainlist (list of feature coordinates where lcp from current point is made to) depending on whether radius and/or n_closepoints are used. Drainlist point coordinates will be used for r.drain. See PointLayerInfo class below for explanation of the process. if radius and n_closepoints: # If radius and n_closepoints are used drainlist1 = point1.near_points_in_radius(n_closepoints, radius) if p2: drainlist2 = point2.near_points_in_radius(n_closepoints, radius) elif radius: # If radius is used drainlist1 = point1.points_in_radius(radius) if p2: drainlist2 = point2.points_in_radius(radius) elif n_closepoints: # If n_closepoints is used drainlist1 = point1.near_points(n_closepoints) if p2: drainlist2 = point2.near_points(n_closepoints) else: # If neither radius or n_closepoints are used drainlist1 = point1.cats_without_centerpoint() if p2: drainlist2 = point2.cats_without_centerpoint() # Do the least cost path calculation procedures drain_coords1 = "" # An empty string that will be populated with point coordinates which in turn will be used for r.drain start coordinates for drainpoint in drainlist1: # Iterate through all points in drainlist drain_x, drain_y = point1.coordsdict[drainpoint] # variables are assigned coordinate values from the coordinate dictionary drain_coords1 = drain_coords1 + str(drain_x) + "," + str(drain_y) + "," # Add those coordinates to the string that is usable by r.drain if p2: # The same thing for second process, see previous section for comments drain_coords2 = "" for drainpoint in drainlist2: drain_x, drain_y = point2.coordsdict[drainpoint] drain_coords2 = drain_coords2 + str(drain_x) + "," + str(drain_y) + "," # Wait for the previous processes to finish their processing costsurf1.wait() costsurf2.wait() mapcalc.wait() # If vector output is needed, do the r.drain for each point in the drainlist separately to get the cost values if vectout: if costatt == "e": for drainpoint in drainlist1: # Each point cat in the drainlist is being iterated drain_x, drain_y = point1.coordsdict[drainpoint] # Currently selected point's coordinates drain_onecoord = str(str(drain_x) + "," + str(drain_y)) # The coordinate to be used in r.drain on the next line grass.run_command('r.drain', overwrite=True, flags="ad", input=costmap1, indir=costdir1, output = lcpmap1, start_coordinates = drain_onecoord) # Get raster max value (=total cost value for one path) and store it in dictionary with point cat being its key rastinfo = grass.raster_info(lcpmap1) costdict1[drainpoint] = rescoefficient * rastinfo['min'] if p2: # Same procedure as in the previous section for parallel process for drainpoint in drainlist2: drain_x, drain_y = point2.coordsdict[drainpoint] drain_onecoord = str(str(drain_x) + "," + str(drain_y)) grass.run_command('r.drain', overwrite=True, flags="ad", input=costmap2, indir=costdir2, output = lcpmap2, start_coordinates = drain_onecoord) rastinfo = grass.raster_info(lcpmap2) costdict2[drainpoint] = rescoefficient * rastinfo['min'] # Finally create the vector layer with all paths from the current point. It also (whether we want it or not) creates a raster output if len(drainlist1) > 0: lcp1 = grass.start_command('r.drain', overwrite=True, flags="d", input=costmap1, indir=costdir1, output = lcpmap1, vector_output = vectdrain1,start_coordinates=drain_coords1) if p2 and (len(drainlist2) > 0): lcp2 = grass.start_command('r.drain', overwrite=True, flags="d", input=costmap2, indir=costdir2, output = lcpmap2, vector_output = vectdrain2,start_coordinates=drain_coords2) # If raster output is needed, but path maps have not been made yet (i.e. vectout must be False) then make those if not vectout and (len(drainlist1) > 0): lcp1 = grass.start_command('r.drain', overwrite=True, flags="d", input=costmap1, indir=costdir1, output = lcpmap1, start_coordinates=drain_coords1) if p2 and (len(drainlist2) > 0): lcp2 = grass.start_command('r.drain', overwrite=True, flags="d", input=costmap2, indir=costdir2, output = lcpmap2, start_coordinates=drain_coords2) # Wait for the lcp processes to finish lcp1.wait() lcp2.wait() # If raster output is needed, do the mapcalc stuff: merge the path rasters if rastout: if len(drainlist1) == 0: lcpmap1 = 0 if len(drainlist2) == 0: lcpmap2 = 0 if cat1 == points_cats[0]: # If it's the very first iteration if p2: # Technically this should not be False in any situation, but let it be here for additional safety # Add lcpmap1 and lcpmap2 together mapcalc = grass.mapcalc_start("$outmap = if(isnull($tempmap1),0,1) + if(isnull($tempmap2),0,1)", outmap = rastout, tempmap1 = lcpmap1, tempmap2 = lcpmap2, overwrite=True) else: # Just in case mapcalc = grass.mapcalc_start("$outmap = if(isnull($tempmap1),0,1)", outmap = rastout, tempmap1 = lcpmap1, overwrite=True) else: # Rename the cumulative lcp map from previous iteration so that mapcalc can use it (x=x+y logic doesn't work with mapcalc) grass.run_command('g.rename', rast = rastout + ',' + lcptemp, overwrite=True) # rastout = Previous LCP + Current LCP if p2: mapcalc = grass.mapcalc_start("$outmap = $inmap + if(isnull($tempmap1),0,1) + if(isnull($tempmap2),0,1)", inmap = lcptemp, outmap = rastout, tempmap1 = lcpmap1, tempmap2 = lcpmap2) else: mapcalc = grass.mapcalc_start("$outmap = $inmap + if(isnull($tempmap1),0,1)", inmap = lcptemp, outmap = rastout, tempmap1 = lcpmap1) # If vector output is needed, do all necessary things like merging the vectors and getting values for attribute table (if specified) if vectout: if costatt == "e": # Only if cost attributes are needed if len(drainlist1) > 0: # Process 1 # Add attribute table to the vector path layer grass.run_command('v.db.addtable', map = vectdrain1) # Get path Euclidean distances and add them to the new column in attribute table. Also add the current point cat to the attribute "from_point" grass.run_command('v.db.addcolumn', map = vectdrain1, columns = "length double precision, from_point int, to_point int, cost double precision") grass.run_command('v.to.db', map = vectdrain1, type = "line", option = "length", columns = "length") grass.run_command('v.db.update', map = vectdrain1, column = "from_point", value = str(cat1)) # Same as previous section but for process 2 if p2 and (len(drainlist2) > 0): grass.run_command('v.db.addtable', map = vectdrain2) grass.run_command('v.db.addcolumn', map = vectdrain2, columns = "length double precision, from_point int, to_point int, cost double precision") grass.run_command('v.to.db', map = vectdrain2, type = "line", option = "length", columns = "length") grass.run_command('v.db.update', map = vectdrain2, column = "from_point", value = str(cat2)) # A loop to update the path attribute values to the attribute table if len(drainlist1) > 0: drainseq = 1 # This is just a helper counter because for newly created vector layer the cats start from 1 and just go successively, so no need to introduce any unnecessary catlist for drainpoint in drainlist1: # Update to_point column with values from drainlist grass.run_command('v.db.update', map = vectdrain1, column = "to_point", value = str(drainpoint), where = "cat = " + str(drainseq)) # Update the cost column using costdict created earlier grass.run_command('v.db.update', map = vectdrain1, column = "cost", value = costdict1[drainpoint], where = "cat = " + str(drainseq)) drainseq += 1 # The same for process 2 if p2 and (len(drainlist2) > 0): drainseq = 1 # Reset the counter for drainpoint in drainlist2: grass.run_command('v.db.update', map = vectdrain2, column = "to_point", value = str(drainpoint), where = "cat = " + str(drainseq)) grass.run_command('v.db.update', map = vectdrain2, column = "cost", value = costdict2[drainpoint], where = "cat = " + str(drainseq)) drainseq += 1 # Patch vector layers # For both processes, first make sure that drainlists for current iteration are not empty. If they are not (i.e. the drainlist for current iteration > 0), then drain vectors will be used in v.patch, otherwise empty strings will be used in patching. This is to make sure that vectors from previous iterations are not used. if len(drainlist1) > 0: vect1 = vectdrain1 else: vect1 = "" if len(drainlist2) > 0: vect2 = vectdrain2 else: vect2 = "" # If BOTH drain processes resulted in vectors, create a comma character to be used in v.patch (input parameter must be a string type and layers should be separated by comma) if (len(drainlist1) > 0) and (len(drainlist2) > 0): comma = "," else: comma = "" # Finally do the patching if cat1 == points_cats[0]: # If it's the very first iteration if p2: # If iteration has 2 points grass.run_command('v.patch', overwrite = True, flags=costatt, input = vect1 + comma + vect2, output = vectout) else: # Technically this should never be called (because not having 2 points per iteration can happen only for the very last iteration), but I'll leave it here just in case or for future reference grass.run_command('g.rename', overwrite = True, vect = vect1 + "," + vectout) else: if grass.find_file(vectout, element='vector')['name']: # Check whether vectout exists or not (this can happen when the first iteration did not produce any vectors, i.e. search radius was too small). If it does exist, add "a" (append) flag to v.patch, otherwise omit it. append = costatt + "a" else: append = costatt # Choose between two patching scenarios: 1 or 2 process versions. if p2: grass.run_command('v.patch', overwrite = True, flags=append, input = vect1 + comma + vect2, output = vectout) else: grass.run_command('v.patch', overwrite = True, flags=append, input = vect1, output = vectout) # Make 0 values of raster into NULLs if rastout: mapcalc.wait() nullproc = grass.run_command('r.null', map = rastout, setnull = "0") grass.message("All done!")
def _download(self): """!Downloads data from WCS server using GDAL WCS driver @return ret (exit code of r.in.gdal module) """ self._debug("_download", "started") self.xml_file = self._createXML() self.vrt_file = self._createVRT() grass.message("Starting module r.in.gdal ...") env = os.environ.copy() env["GRASS_MESSAGE_FORMAT"] = "gui" if self.params["rimport"]: p = grass.start_command( "r.import", input=self.vrt_file, output=self.params["output"], stdout=grass.PIPE, stderr=grass.PIPE, env=env, ) elif self.params["location"] == "": p = grass.start_command( "r.in.gdal", input=self.vrt_file, output=self.params["output"], stdout=grass.PIPE, stderr=grass.PIPE, env=env, ) else: p = grass.start_command( "r.in.gdal", input=self.vrt_file, output=self.params["output"], location=self.params["location"], stdout=grass.PIPE, stderr=grass.PIPE, env=env, ) while p.poll() is None: line = p.stderr.readline() linepercent = line.replace(b"GRASS_INFO_PERCENT:", b"").strip() if linepercent.isdigit(): # print linepercent grass.percent(int(linepercent), 100, 1) grass.percent(100, 100, 5) ret = p.wait() if ret != 0: grass.fatal("r.in.gdal for %s failed." % self.vrt_file) else: grass.message("r.in.gdal was successful for new raster map %s " % self.params["output"]) grass.try_remove(self.vrt_file) grass.try_remove(self.xml_file) self._debug("_download", "finished") return ret
def main(): # Get user inputs friction_original = options['friction'] # Input friction map out = options['out'] # Output totalcost raster maxcost = options['maxcost'] # Max cost distance in cost units knight = "k" if flags[ "k"] else "" # Use Knight's move in r.cost instead Queen's move (a bit slower, but more accurate) mempercent = int( options['mempercent'] ) # Percent of map to keep in memory in r.cost calculation # Error if no valid friction surface is given if not grass.find_file(friction_original)['name']: grass.message(_("Friction surface <%s> not found") % friction_original) sys.exit() # Calculate cost distances / edge effect distances from the friction map. Result is in map units info = grass.raster_info(friction_original) # Read and get raster info edgeeffect_min = float(maxcost) / float( info['max']) # Minimum cost distance / edge effect distance edgeeffect_max = float(maxcost) / float( info['min']) # Maximum cost distance / edge effect distance # If "Only calculate edge effect" is selected if flags['e']: grass.message("Minimum distance / edge effect: " + str(edgeeffect_min)) grass.message("Maximum distance / edge effect: " + str(edgeeffect_max)) sys.exit() # If output file exists, but overwrite option isn't selected if not grass.overwrite(): if grass.find_file(out)['name']: grass.message(_("Output raster map <%s> already exists") % out) sys.exit() # Get raster calculation region information regiondata = grass.read_command("g.region", flags='p') regvalues = grass.parse_key_val(regiondata, sep=':') # Assign variables for necessary region info bits nsres = float(regvalues['nsres']) ewres = float(regvalues['ewres']) # Calculate the mean resolution meanres = (nsres + ewres) / 2.0 # Create a list holding cell coordinates coordinatelist = [ ] # An empty list that will be populated with coordinates rasterdata = grass.read_command( 'r.stats', flags="1gn", input=friction_original) # Read input raster coordinates rastervalues = rasterdata.split( ) # Split the values from r.stats into list entries # rastervalues list is structured like that: [x1, y1, rastervalue1, x2, y2, rastervalue2 ... xn, yn, rastervaluen], so iterate through that list with step of 3 and write a new list that has coordinates in a string: ["x1,y1", "x2,y2" ... "xn,yn"] for val in xrange(0, len(rastervalues), 3): coordinatelist.append(rastervalues[val] + "," + rastervalues[val + 1]) # This is the number of cells (and hence cost surfaces) to be used n_coords = len(coordinatelist) # Create temporary filenames with unique process id in their name. Add each name to the tmp_layers list. pid = os.getpid() cost1 = str("tmp_totalcost_cost1_%d" % pid) tmp_layers.append(cost1) cost2 = str("tmp_totalcost_cost2_%d" % pid) tmp_layers.append(cost2) cost3 = str("tmp_totalcost_cost3_%d" % pid) tmp_layers.append(cost3) cost4 = str("tmp_totalcost_cost4_%d" % pid) tmp_layers.append(cost4) friction = str("tmp_friction_%d" % pid) tmp_layers.append(friction) calctemp = str("tmp_calctemp_%d" % pid) tmp_layers.append(calctemp) # Assuming the friction values are per map unit (not per cell), the raster should be multiplied with region resolution. This is because r.cost just uses cell values and adds them - slightly different approach compared to ArcGIS which compensates for the resolution automatically. The result is then divided by maxcost so that r.cost max_cost value can be fixed to 1 (it doesn't accept floating point values, hence the workaround). grass.mapcalc("$outmap = $inmap * $res / $mcost", outmap=friction, inmap=friction_original, res=meanres, mcost=maxcost) # Do the main loop for c in xrange( 0, n_coords, 4): # Iterate through the numbers of cells with the step of 4 # Start four r.cost processes with different coordinates. The first process (costproc1) is always made, but the other 3 have the condition that there exists a successive coordinate in the list. This is because the used step of 4 in the loop. In case there are no coordinates left, assign the redundant cost outputs null-values so they wont be included in the map calc. try: costproc1 = grass.start_command( 'r.cost', overwrite=True, flags=knight, input=friction, output=cost1, start_coordinates=coordinatelist[c], max_cost=1, percent_memory=mempercent) if c + 1 < n_coords: costproc2 = grass.start_command( 'r.cost', overwrite=True, flags=knight, input=friction, output=cost2, start_coordinates=coordinatelist[c + 1], max_cost=1, percent_memory=mempercent) else: cost2 = "null()" if c + 2 < n_coords: costproc3 = grass.start_command( 'r.cost', overwrite=True, flags=knight, input=friction, output=cost3, start_coordinates=coordinatelist[c + 2], max_cost=1, percent_memory=mempercent) else: cost3 = "null()" if c + 3 < n_coords: costproc4 = grass.start_command( 'r.cost', overwrite=True, flags=knight, input=friction, output=cost4, start_coordinates=coordinatelist[c + 3], max_cost=1, percent_memory=mempercent) else: cost4 = "null()" except: grass.message("Error with r.cost: " + str(sys.exc_info()[0])) sys.exit() # For the very first iteration just add those first r.cost results together if c == 0: # Wait for the r.cost processes to stop before moving on costproc1.wait() costproc2.wait() costproc3.wait() costproc4.wait() # Do the map algebra: merge the cost surfaces try: grass.mapcalc( "$outmap = if(isnull($tempmap1),0,1) + if(isnull($tempmap2),0,1) + if(isnull($tempmap3),0,1) + if(isnull($tempmap4),0,1)", outmap=out, tempmap1=cost1, tempmap2=cost2, tempmap3=cost3, tempmap4=cost4, overwrite=True) except: grass.message("Error with mapcalc: " + str(sys.exc_info()[0])) sys.exit() # If it's not the first iteration... else: # Rename the output of previous mapcalc iteration so that it can be used in the mapcalc expression (x = x + y logic doesn't work apparently) try: # If pygrass gets fixed, replace g.rename with those commented out pygrass-based lines as they seem to be a bit faster (are they really?) #map = pygrass.raster.RasterRow(out) #map.name = calctemp grass.run_command('g.rename', overwrite=True, rast=out + "," + calctemp) except: grass.message("Error: " + str(sys.exc_info()[0])) sys.exit() # Wait for the r.cost processes to stop before moving on costproc1.wait() costproc2.wait() costproc3.wait() costproc4.wait() # Merge the r.cost results and the cumulative map from previous iteration try: grass.mapcalc( "$outmap = if(isnull($inmap),0,$inmap) + if(isnull($tempmap1),0,1) + if(isnull($tempmap2),0,1) + if(isnull($tempmap3),0,1) + if(isnull($tempmap4),0,1)", inmap=calctemp, outmap=out, tempmap1=cost1, tempmap2=cost2, tempmap3=cost3, tempmap4=cost4, overwrite=True) except: grass.message("Error with mapcalc: " + str(sys.exc_info()[0])) sys.exit() # Finally print the edge effect values grass.message("---------------------------------------------") grass.message("Minimum distance / edge effect: " + str(edgeeffect_min)) grass.message("Maximum distance / edge effect: " + str(edgeeffect_max))
def main(): # User inputs friction = options['friction'] # Input friction raster inpoints = options['points'] # Input point layer rastout = options['rastout'] # Output least cost path raster radius = int(options['radius']) # Point search radius n_closepoints = int(options['nearpoints']) # Number of closest points vectout = options['vectout'] # Vector layer output knight = "k" if flags['k'] else "" # Knight's move flag costatt = "e" if flags[ 'c'] else "" # Calculate total cost values for paths and add them to attribute table # Check no vector or raster output is chosen, raise an error if (not vectout) and (not rastout): grass.message("No output chosen!") sys.exit() # Check overwrite settings # If output raster file exists, but overwrite option isn't selected if not grass.overwrite(): if grass.find_file(rastout)['name']: grass.message(_("Output raster map <%s> already exists") % rastout) sys.exit() # If output vector file exists, but overwrite option isn't selected if not grass.overwrite(): if grass.find_file(vectout, element='vector')['name']: grass.message(_("Output vector map <%s> already exists") % vectout) sys.exit() # If overwrite is chosen, remove the previous layers before any action (to lessen the probability of some random errors) if grass.overwrite(): grass.run_command("g.remove", rast=rastout, vect=vectout, quiet=True) # Get a region resolution to be used in cost attribute calculation, because the default will be in map units if vectout and (costatt == "e"): # Get raster calculation region information regiondata = grass.read_command("g.region", flags='p') regvalues = grass.parse_key_val(regiondata, sep=':') # Assign variables for necessary region info bits nsres = float(regvalues['nsres']) ewres = float(regvalues['ewres']) regionres = (nsres + ewres) / 2.0 rescoefficient = regionres # Get process id (pid) and create temporary layer names which are also added to tmp_rlayers list pid = os.getpid( ) # Process ID, used for getting unique temporary filenames costmap1 = "tmp_cost_%d" % pid # Cost surface for point 1 tmp_rlayers.append(costmap1) costmap2 = "tmp_cost_%d_%i" % ( pid, 2) # Cost surface from point 2 (parallel process) tmp_rlayers.append(costmap2) costdir1 = "tmp_costdir_%d" % pid # Temporary cost direction raster 1 tmp_rlayers.append(costdir1) costdir2 = "tmp_costdir_%d_%i" % (pid, 2 ) # Temporary cost direction raster 2 tmp_rlayers.append(costdir2) lcpmap1 = "tmp_lcp_%d" % pid # Least cost path map from costmap1 tmp_rlayers.append(lcpmap1) lcpmap2 = "tmp_lcp_%d_%i" % ( pid, 2) # Least cost path map from costmap2 (parallel process) tmp_rlayers.append(lcpmap2) lcptemp = "tmp_lcptemp_%d" % pid # Temporary file for mapcalc tmp_rlayers.append(lcptemp) region = "tmp_region_%d" % pid # Temporary vector layer of computational region tmp_vlayers.append(region) points = "tmp_points_%d" % pid # Temporary point layer which holds points only inside the region tmp_vlayers.append(points) if vectout: # if vector output is needed, create the temporary vectorlayers too vectdrain1 = "tmp_vectdrain_%d" % pid tmp_vlayers.append(vectdrain1) vectdrain2 = "tmp_vectdrain2_%d" % pid tmp_vlayers.append(vectdrain2) # Make sure input data points are inside raster computational region: create a region polygon and select points that are inside it grass.run_command('v.in.region', overwrite=True, output=region) grass.run_command('v.select', overwrite=True, flags="tc", ainput=inpoints, atype='point', binput=region, btype='area', output=points, operator='within') # Create a new PointLayerInfo class instance using input point layer and get the categories list as well as total feature count of the layer pointlayer = PointLayerInfo(points) points_cats = pointlayer.featcats # A list() of layer feature categories points_featcount = pointlayer.featcount # integer of feature count in point layer points_coordsdict = pointlayer.coordsdict # dict() of point coordinates as tuple (x,y) # Create an empty dictionaries for storing cost distances between points costdict1 = dict() costdict2 = dict() # Create the first mapcalc process, so that it can be checked and stopped in the loop without using more complicated ways mapcalc = grass.Popen("", shell=True) lcp1 = grass.Popen("", shell=True) lcp2 = grass.Popen("", shell=True) # The main loop for least cost path creation. For each point a cost surface is created, least cost paths created and then added to the general output file. Loop uses a range which has as many items as there are points in the input point layer. To make use of parallel processing, the step is 2, although the "item" is always the first of the selected pair. for item in range(0, points_featcount, 2): # Get category number of the point from the point_cats list cat1 = points_cats[item] # Set p2 (i.e. using second or parallel process) to be False by default and make it True if there are enough points left to do so. In that case set it to true and also get the category number of the point from the point_cats list p2 = False if item + 1 < points_featcount: p2 = True cat2 = points_cats[item + 1] # Create a new PointLayerInfo object from input point layer with centerpoint (from which distances area measured in the class) feature as currently selected point cat point1 = PointLayerInfo(points, cat1) if p2: # The same for p2 if needed point2 = PointLayerInfo(points, cat2) # begin cost surface process with the start coordinate of currently selected point. Do the same for second process costsurf1 = grass.start_command('r.cost', flags=knight, overwrite=True, input=friction, output=costmap1, outdir=costdir1, start_coordinates=point1.centercoord()) if p2: costsurf2 = grass.start_command( 'r.cost', flags=knight, overwrite=True, input=friction, output=costmap2, outdir=costdir2, start_coordinates=point2.centercoord()) # Create the drainlist (list of feature coordinates where lcp from current point is made to) depending on whether radius and/or n_closepoints are used. Drainlist point coordinates will be used for r.drain. See PointLayerInfo class below for explanation of the process. if radius and n_closepoints: # If radius and n_closepoints are used drainlist1 = point1.near_points_in_radius(n_closepoints, radius) if p2: drainlist2 = point2.near_points_in_radius( n_closepoints, radius) elif radius: # If radius is used drainlist1 = point1.points_in_radius(radius) if p2: drainlist2 = point2.points_in_radius(radius) elif n_closepoints: # If n_closepoints is used drainlist1 = point1.near_points(n_closepoints) if p2: drainlist2 = point2.near_points(n_closepoints) else: # If neither radius or n_closepoints are used drainlist1 = point1.cats_without_centerpoint() if p2: drainlist2 = point2.cats_without_centerpoint() # Do the least cost path calculation procedures drain_coords1 = "" # An empty string that will be populated with point coordinates which in turn will be used for r.drain start coordinates for drainpoint in drainlist1: # Iterate through all points in drainlist drain_x, drain_y = point1.coordsdict[ drainpoint] # variables are assigned coordinate values from the coordinate dictionary drain_coords1 = drain_coords1 + str(drain_x) + "," + str( drain_y ) + "," # Add those coordinates to the string that is usable by r.drain if p2: # The same thing for second process, see previous section for comments drain_coords2 = "" for drainpoint in drainlist2: drain_x, drain_y = point2.coordsdict[drainpoint] drain_coords2 = drain_coords2 + str(drain_x) + "," + str( drain_y) + "," # Wait for the previous processes to finish their processing costsurf1.wait() costsurf2.wait() mapcalc.wait() # If vector output is needed, do the r.drain for each point in the drainlist separately to get the cost values if vectout: if costatt == "e": for drainpoint in drainlist1: # Each point cat in the drainlist is being iterated drain_x, drain_y = point1.coordsdict[ drainpoint] # Currently selected point's coordinates drain_onecoord = str( str(drain_x) + "," + str(drain_y) ) # The coordinate to be used in r.drain on the next line grass.run_command('r.drain', overwrite=True, flags="ad", input=costmap1, indir=costdir1, output=lcpmap1, start_coordinates=drain_onecoord) # Get raster max value (=total cost value for one path) and store it in dictionary with point cat being its key rastinfo = grass.raster_info(lcpmap1) costdict1[drainpoint] = rescoefficient * rastinfo['min'] if p2: # Same procedure as in the previous section for parallel process for drainpoint in drainlist2: drain_x, drain_y = point2.coordsdict[drainpoint] drain_onecoord = str(str(drain_x) + "," + str(drain_y)) grass.run_command('r.drain', overwrite=True, flags="ad", input=costmap2, indir=costdir2, output=lcpmap2, start_coordinates=drain_onecoord) rastinfo = grass.raster_info(lcpmap2) costdict2[ drainpoint] = rescoefficient * rastinfo['min'] # Finally create the vector layer with all paths from the current point. It also (whether we want it or not) creates a raster output if len(drainlist1) > 0: lcp1 = grass.start_command('r.drain', overwrite=True, flags="d", input=costmap1, indir=costdir1, output=lcpmap1, vector_output=vectdrain1, start_coordinates=drain_coords1) if p2 and (len(drainlist2) > 0): lcp2 = grass.start_command('r.drain', overwrite=True, flags="d", input=costmap2, indir=costdir2, output=lcpmap2, vector_output=vectdrain2, start_coordinates=drain_coords2) # If raster output is needed, but path maps have not been made yet (i.e. vectout must be False) then make those if not vectout and (len(drainlist1) > 0): lcp1 = grass.start_command('r.drain', overwrite=True, flags="d", input=costmap1, indir=costdir1, output=lcpmap1, start_coordinates=drain_coords1) if p2 and (len(drainlist2) > 0): lcp2 = grass.start_command('r.drain', overwrite=True, flags="d", input=costmap2, indir=costdir2, output=lcpmap2, start_coordinates=drain_coords2) # Wait for the lcp processes to finish lcp1.wait() lcp2.wait() # If raster output is needed, do the mapcalc stuff: merge the path rasters if rastout: if len(drainlist1) == 0: lcpmap1 = 0 if len(drainlist2) == 0: lcpmap2 = 0 if cat1 == points_cats[0]: # If it's the very first iteration if p2: # Technically this should not be False in any situation, but let it be here for additional safety # Add lcpmap1 and lcpmap2 together mapcalc = grass.mapcalc_start( "$outmap = if(isnull($tempmap1),0,1) + if(isnull($tempmap2),0,1)", outmap=rastout, tempmap1=lcpmap1, tempmap2=lcpmap2, overwrite=True) else: # Just in case mapcalc = grass.mapcalc_start( "$outmap = if(isnull($tempmap1),0,1)", outmap=rastout, tempmap1=lcpmap1, overwrite=True) else: # Rename the cumulative lcp map from previous iteration so that mapcalc can use it (x=x+y logic doesn't work with mapcalc) grass.run_command('g.rename', rast=rastout + ',' + lcptemp, overwrite=True) # rastout = Previous LCP + Current LCP if p2: mapcalc = grass.mapcalc_start( "$outmap = $inmap + if(isnull($tempmap1),0,1) + if(isnull($tempmap2),0,1)", inmap=lcptemp, outmap=rastout, tempmap1=lcpmap1, tempmap2=lcpmap2) else: mapcalc = grass.mapcalc_start( "$outmap = $inmap + if(isnull($tempmap1),0,1)", inmap=lcptemp, outmap=rastout, tempmap1=lcpmap1) # If vector output is needed, do all necessary things like merging the vectors and getting values for attribute table (if specified) if vectout: if costatt == "e": # Only if cost attributes are needed if len(drainlist1) > 0: # Process 1 # Add attribute table to the vector path layer grass.run_command('v.db.addtable', map=vectdrain1) # Get path Euclidean distances and add them to the new column in attribute table. Also add the current point cat to the attribute "from_point" grass.run_command( 'v.db.addcolumn', map=vectdrain1, columns= "length double precision, from_point int, to_point int, cost double precision" ) grass.run_command('v.to.db', map=vectdrain1, type="line", option="length", columns="length") grass.run_command('v.db.update', map=vectdrain1, column="from_point", value=str(cat1)) # Same as previous section but for process 2 if p2 and (len(drainlist2) > 0): grass.run_command('v.db.addtable', map=vectdrain2) grass.run_command( 'v.db.addcolumn', map=vectdrain2, columns= "length double precision, from_point int, to_point int, cost double precision" ) grass.run_command('v.to.db', map=vectdrain2, type="line", option="length", columns="length") grass.run_command('v.db.update', map=vectdrain2, column="from_point", value=str(cat2)) # A loop to update the path attribute values to the attribute table if len(drainlist1) > 0: drainseq = 1 # This is just a helper counter because for newly created vector layer the cats start from 1 and just go successively, so no need to introduce any unnecessary catlist for drainpoint in drainlist1: # Update to_point column with values from drainlist grass.run_command('v.db.update', map=vectdrain1, column="to_point", value=str(drainpoint), where="cat = " + str(drainseq)) # Update the cost column using costdict created earlier grass.run_command('v.db.update', map=vectdrain1, column="cost", value=costdict1[drainpoint], where="cat = " + str(drainseq)) drainseq += 1 # The same for process 2 if p2 and (len(drainlist2) > 0): drainseq = 1 # Reset the counter for drainpoint in drainlist2: grass.run_command('v.db.update', map=vectdrain2, column="to_point", value=str(drainpoint), where="cat = " + str(drainseq)) grass.run_command('v.db.update', map=vectdrain2, column="cost", value=costdict2[drainpoint], where="cat = " + str(drainseq)) drainseq += 1 # Patch vector layers # For both processes, first make sure that drainlists for current iteration are not empty. If they are not (i.e. the drainlist for current iteration > 0), then drain vectors will be used in v.patch, otherwise empty strings will be used in patching. This is to make sure that vectors from previous iterations are not used. if len(drainlist1) > 0: vect1 = vectdrain1 else: vect1 = "" if len(drainlist2) > 0: vect2 = vectdrain2 else: vect2 = "" # If BOTH drain processes resulted in vectors, create a comma character to be used in v.patch (input parameter must be a string type and layers should be separated by comma) if (len(drainlist1) > 0) and (len(drainlist2) > 0): comma = "," else: comma = "" # Finally do the patching if cat1 == points_cats[0]: # If it's the very first iteration if p2: # If iteration has 2 points grass.run_command('v.patch', overwrite=True, flags=costatt, input=vect1 + comma + vect2, output=vectout) else: # Technically this should never be called (because not having 2 points per iteration can happen only for the very last iteration), but I'll leave it here just in case or for future reference grass.run_command('g.rename', overwrite=True, vect=vect1 + "," + vectout) else: if grass.find_file( vectout, element='vector' )['name']: # Check whether vectout exists or not (this can happen when the first iteration did not produce any vectors, i.e. search radius was too small). If it does exist, add "a" (append) flag to v.patch, otherwise omit it. append = costatt + "a" else: append = costatt # Choose between two patching scenarios: 1 or 2 process versions. if p2: grass.run_command('v.patch', overwrite=True, flags=append, input=vect1 + comma + vect2, output=vectout) else: grass.run_command('v.patch', overwrite=True, flags=append, input=vect1, output=vectout) # Make 0 values of raster into NULLs if rastout: mapcalc.wait() nullproc = grass.run_command('r.null', map=rastout, setnull="0") grass.message("All done!")