def test(self): #messages print('Current GRASS GIS 7 environment:') print(gscript.gisenv()) print('Available raster maps:') for rast in gscript.list_strings(type = 'rast'): print('{}{}'.format(' ' * 4, rast)) print('Available vector maps:') for vect in gscript.list_strings(type = 'vect'): print('{}{}'.format(' ' * 4, vect))
def grass_setup_00(gisdb = r'C:\Users\betha\Work\grassdata', location='script_testing_chown05', mapset='mapset_01'): import os import sys import subprocess # Location of GRASS binary grass7bin = r'C:\OSGeo4W64\bin\grass78.bat' ############## Copied section # query GRASS GIS itself for its GISBASE startcmd = [grass7bin, '--config', 'path'] try: p = subprocess.Popen(startcmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() except OSError as error: sys.exit("ERROR: Cannot find GRASS GIS start script" " {cmd}: {error}".format(cmd=startcmd[0], error=error)) if p.returncode != 0: sys.exit("ERROR: Issues running GRASS GIS start script" " {cmd}: {error}" .format(cmd=' '.join(startcmd), error=err)) gisbase = out.strip(os.linesep) # set GISBASE environment variable os.environ['GISBASE'] = gisbase # define GRASS-Python environment grass_pydir = os.path.join(gisbase, "etc", "python") sys.path.append(grass_pydir) # Import GRASS Python bindings import grass.script as script # Launch session rcfile = script.setup.init(gisbase, gisdb, location, mapset) # Example calls script.message('Current GRASS GIS 7 environment:') print(script.gisenv()) script.message('Available raster maps:') for rast in script.list_strings(type='raster'): print(rast) script.message('Available vector maps:') for vect in script.list_strings(type='vector'): print(vect)
def test_a(self): self.assertModule('r3.to.rast', input=self.rast3d, output=self.rast2d) rasts = list_strings('raster', mapset=".", pattern="%s_*" % self.rast2d, exclude="%s_*" % self.rast2d_ref) self.assertEquals(len(rasts), 4, msg="Wrong number of 2D rasters present" " in the mapset") ref_info = dict(cells=9) ref_univar = dict(cells=9, null_cells=0) for rast in rasts: self.assertRasterExists(rast) # the following doesn't make much sense because we just listed them self.to_remove_2d.append(rast) self.assertRasterFitsInfo(raster=rast, reference=ref_info, precision=0) self.assertRasterFitsUnivar(raster=rast, reference=ref_univar, precision=0) # check the actual values for rast_ref, rast in zip(self.rast2d_refs, rasts): self.assertRastersNoDifference(actual=rast, reference=rast_ref, precision=0.1)
def test_a_b_coeff(self): self.assertModule('r3.to.rast', input=self.rast3d, output=self.rast2d, multiply=2, add=0.5) rasts = list_strings('raster', mapset=".", pattern="%s_*" % self.rast2d, exclude="%s_*" % self.rast2d_ref) self.assertEquals(len(rasts), 4, msg="Wrong number of 2D rasters present" " in the mapset") ref_info = dict(cells=9) ref_univar = dict(cells=9, null_cells=0) for rast in rasts: self.assertRasterExists(rast) # the following doesn't make much sense because we just listed them self.to_remove_2d.append(rast) self.assertRasterFitsInfo(raster=rast, reference=ref_info, precision=0) self.assertRasterFitsUnivar(raster=rast, reference=ref_univar, precision=0) # check the actual values for rast_ref, rast in zip(self.rast2d_refs, rasts): self.assertRastersNoDifference(actual=rast, reference=rast_ref, precision=0.1)
def test_output(self): self.assertModule("v.what.strds", input="points", strds="A", output="what_strds", overwrite=True) maps = gscript.list_strings('vector') self.assertIn('what_strds@{ma}'.format(ma=gscript.gisenv()['MAPSET']), maps)
def parsevectorlayer(self, layername, grassmapname, value=1, force=False): """ Take point information from a vector layer, mark the points on the layer specified and return them as a list @param string name of the layer to be exported @param string name of the GRASS map file to be created @param int value to be set @param boolean optional, whether an existing file may be overwritten """ vectors = [] if grassmapname in grass.list_strings("vect"): layer = grass.vector_db_select(grassmapname)["values"] # TODO only points are supported, ask some expert how to test this # TODO indexing seems to start at "1".. verify! for v in layer.values(): # TODO do they all look like this?? if len(v) == 4 and v[0] == v[3]: p = self.stringcoordinate(v[1], v[2]) # TODO - as with grass numpy array it seems that # [0,0] is north-most west-most.. p[0] = int( round( (self.region["n"] - p[0]) / self.region["nsres"])) p[1] = int( round( (p[1] - self.region["w"]) / self.region["ewres"])) vectors.append(p) self.layers[layername][p[0]][p[1]] = value return vectors
def test_b(self): self.assertModule("r3.to.rast", input=self.rast3d, output=self.rast2d) rasts = list_strings( "raster", mapset=".", pattern="%s_*" % self.rast2d, exclude="%s_*" % self.rast2d_ref, ) self.assertEquals(len(rasts), 4, msg="Wrong number of 2D rasters present" " in the mapset") ref_info = dict(cells=9) # only this tests the presence of nulls ref_univar = dict(cells=9, null_cells=2) for rast in rasts: self.assertRasterExists(rast) # the following doesn't make much sense because we just listed them self.to_remove_2d.append(rast) self.assertRasterFitsInfo(raster=rast, reference=ref_info, precision=0) self.assertRasterFitsUnivar(raster=rast, reference=ref_univar, precision=0) # check the actual values # TODO: this does not check the position of nulls # (it ignores nulls) for rast_ref, rast in zip(self.rast2d_refs, rasts): self.assertRastersNoDifference(actual=rast, reference=rast_ref, precision=0.1)
def setUp(self): # TODO check if there is a nicer way to do this.. self.rastlayername = None # "r_agent_rast_testmap@"+grass.gisenv()['MAPSET'] self.vectlayername = None # "r_agent_vect_testmap@"+grass.gisenv()['MAPSET'] if self.rastlayername: for m in grass.list_strings("rast"): if self.rastlayername == m: print "We need a raster map to play with in this test," + " but it seems to exist already: '" + self.rastlayername + "'" self.assertTrue(False) if self.vectlayername: for m in grass.list_strings("vect"): if self.vectlayername == m: print "We need a vector map to play with in this test," + " but it seems to exist already: '" + self.vectlayername + "'" self.assertTrue(False) self.pg = grassland.Grassland()
def test_output(self): self.assertModule( "v.what.strds", input="points", strds="A", output="what_strds", overwrite=True, ) maps = gscript.list_strings("vector") self.assertIn("what_strds@{ma}".format(ma=gscript.gisenv()["MAPSET"]), maps)
def setgrasslayer(self, layername, grassmapname, force=False): """ Put an existing map from GRASS to the layer collection @param string name of the layer @param string name of an existing GRASS map layer @param boolean optional, whether to overwrite values if key exists """ # fill the new grass array with the contents from the map (must exist) if grassmapname in grass.list_strings("rast"): layer = garray.array(grassmapname) self.grassmapnames[layername] = grassmapname self.setlayer(layername, layer, force) else: raise error.DataError(Grassland.ME, "Grass Map was missing: " + grassmapname)
def main(): table = options['table'] force = flags['f'] # check if DB parameters are set, and if not set them. grass.run_command('db.connect', flags = 'c') kv = grass.db_connection() database = kv['database'] driver = kv['driver'] # schema needed for PG? if force: grass.message(_("Forcing ...")) # check if table exists nuldev = file(os.devnull, 'w') if not grass.db_table_exist(table, stdout = nuldev, stderr = nuldev): grass.fatal(_("Table <%s> not found in current mapset") % table) # check if table is used somewhere (connected to vector map) used = [] vects = grass.list_strings('vect') for vect in vects: for f in grass.vector_db(vect, stderr = nuldev).itervalues(): if not f: continue if f['table'] == table: used.append(vect) break if used: grass.warning(_("Deleting table <%s> which is attached to following map(s):") % table) for vect in used: grass.message(vect) if not force: grass.message(_("The table <%s> would be deleted.") % table) grass.message("") grass.message(_("You must use the force flag to actually remove it. Exiting.")) sys.exit(0) p = grass.feed_command('db.execute', input = '-', database = database, driver = driver) p.stdin.write("DROP TABLE " + table) p.stdin.close() p.wait() if p.returncode != 0: grass.fatal(_("Cannot continue (problem deleting table)."))
def grassViewshed(lat, lng, pointNum, userid, outputDir='/'.join([script_dir, '..', viewshedDir])): # redudant conections to grass r, g, gscript = connect2grass(userid) outProj = Proj(init='epsg:3857') inProj = Proj(init='epsg:4326') x, y = transform(inProj, outProj, lng, lat) x, y = highestNeighbor(x, y, gscript, userid) print "found higher neighbor at", x, y rasters = gscript.list_strings(type='rast') print rasters srtm = [ raster for raster in rasters if options['demname'] in raster and userid in raster ][0] viewName = options['viewshedname'] + str(pointNum) print viewName r.viewshed( flags='b', #binary visible/invisible viewshed input=srtm, output=viewName, coordinates=(x, y), max_distance=-1, overwrite=True) combinedName = options['combinedname'] if pointNum == 1: print "Overwriting existing map with copy" # copy existing viewshed to new combined map expression = combinedName + ' = {1}@{0}'.format(userid, viewName) print "r.mapcalc", expression gscript.raster.mapcalc(expression, overwrite=True) else: # screen existing viewshed with new viewshed expression = '{0} = {0}@{1} * {2}@{1}'.format(combinedName, userid, viewName) print "r.mapcalc", expression gscript.raster.mapcalc(expression, overwrite=True)
def writelayer(self, layername, grassmapname=False, force=False): """ Write out a given layer to a GRASS map file @param string name of the layer to be exported @param string optional name of the GRASS map file to be written @param boolean optional, whether an existing file may be overwritten """ if not grassmapname: if layername in self.grassmapnames: grassmapname = self.grassmapnames[layername] else: raise error.DataError(Grassland.ME, "Grass Map name is empty.") if layername in self.layers: if grassmapname in grass.list_strings("rast"): if force: force = "force" else: raise error.DataError(Grassland.ME, "Grass map already exists.") self.layers[layername].write(grassmapname, overwrite=force) else: raise error.DataError(Grassland.ME, "Layer is not in list.")
#full_location = os.path.join(gisdb, location, mapset) grass7bin = grass7bin_lin # Do not change this part gisbase = '/lib64/grass72' os.environ['GISBASE'] = gisbase os.environ['PATH'] += os.pathsep + os.path.join(gisbase, 'extrabin') home = os.path.expanduser("~") os.environ['PATH'] += os.pathsep + os.path.join(home, '.grass7', 'addons', 'scripts') os.environ['GISDBASE'] = gisdb gsetup.init(gisbase, gisdb, location, mapset) maps = gscript.list_strings(type='rast') # Loading map X = garray.array(maps[0]) X = np.asarray(X) region = gscript.region() print 'Map Loaded' X0 = X dx = 10 N = region['rows'] / dx Nxy = N * N X = X[::dx, :] X = X[:, ::dx]
def main(): # set rendering directory directory = os.path.normpath("C:/Users/Brendan/Documents/grassdata/rendering/fusion") # files html_file = "report.html" style = "style.css" layout = "layout.css" fullpath_html = os.path.join(directory,html_file) fullpath_style = os.path.join(directory,style) fullpath_layout = os.path.join(directory,layout) # initialize a dictionary for each category of raster cats = defaultdict(list) # loop through the elevation, slope, aspect, pca, depth, stddev, variance, and coeff maps categories = ["fusion"] for category in categories: # variables pattern = category+"*" # get list of rasters rasters = gscript.list_strings('raster', pattern=pattern) # iterate through the list of rasters for raster in rasters: # add values to dictionary cats[category].append(raster) # template variables title = "lidar-uav fusion" # write to a css file using the style template with open(fullpath_style, 'w') as output: output.write(style_template.replace("background_image",background_image)) # write to a css file using the layout template with open(fullpath_layout, 'w') as output: output.write(layout_template) # write to an html file using templates with open(fullpath_html, 'w') as output: output.write(start_template.format(title=title)) for category in categories: for raster in cats[category]: # compute univariate statistics stat = gscript.parse_command('r.univar', map=raster, flags='g') # partition raster name name, separator, mapset = raster.partition('@') # write html output.write(raster_template.format( raster_title=raster, name=name, min=stat['min'], max=stat['max'], mean=stat['mean'], var=stat['variance'])) output.write(end_template)
grass.mapcalc('jul = 0.0') grass.mapcalc('aug = 0.0') grass.mapcalc('sep = 0.0') grass.mapcalc('oct = 0.0') grass.mapcalc('nov = 0.0') grass.mapcalc('dec = 0.0') it = 0 #iterator allrast = 0 # all rasters conatining tas years = 0 index = [None] * 1200 # Create list of 1200 (12x100) 'None's or 1800 (12x150) search = var + "_" grass.message('Raster maps:') for raster in grass.list_strings(type='rast'): if search in raster: print raster split_name = raster.split(".") split2 = split_name[1].split("@") print split2[0] index[int(split2[0]) - 1] = raster for rast in index: #print rast print index[allrast] if it == 0: grass.mapcalc('jan = jan + ' + index[allrast]) elif it == 1: grass.mapcalc('feb = feb + ' + index[allrast]) elif it == 2: grass.mapcalc('mar = mar + ' + index[allrast]) elif it == 3: grass.mapcalc('apr = apr + ' + index[allrast])
def SC(jobid, xlon, ylat, prj, damh, interval, output_lake=False): logger.info("run SC") dem_full_path = DEM_FULL_PATH dem = DEM_NAME drainage_full_path = DRAINAGE_FULL_PATH drainage = DRAINAGE_NAME gisbase = GISBASE grass7bin = GRASS7BIN gisdb = GISDB # Define grass data folder, location, mapset if not os.path.exists(gisdb): os.mkdir(gisdb) location = "location_{0}".format(dem) mapset = "PERMANENT" keep_intermediate = False msg = "" # Create log file for each job logs_path = LOGS_PATH if not os.path.exists(logs_path): os.mkdir(logs_path) log_name = 'log_{0}.log'.format(jobid) f = open(os.path.join(logs_path, log_name), 'w', 0) # Create output_data folder path output_data_path = OUTPUT_DATA_PATH if not os.path.exists(output_data_path): os.mkdir(output_data_path) temp_files_list = [] result = {} result['jobid'] = jobid try: # Create location location_path = os.path.join(gisdb, location) if not os.path.exists(location_path): f.write('\n---------Create Location from DEM--------------------\n') f.write('{0}\n'.format(location_path)) startcmd = grass7bin + ' -c ' + dem_full_path + ' -e ' + location_path print startcmd p = subprocess.Popen(startcmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != 0: print >>sys.stderr, 'ERROR: %s' % err print >>sys.stderr, 'ERROR: Cannot generate location (%s)' % startcmd f.write('\n---------Create Location failed--------------------\n') sys.exit(-1) else: f.write('\n---------Create Location done--------------------\n') print 'Created location %s' % location_path xlon = float(xlon) ylat = float(ylat) outlet = (xlon, ylat) dam_h = float(damh) elev_interval = dam_h/interval # Set GISBASE environment variable os.environ['GISBASE'] = gisbase # the following not needed with trunk os.environ['PATH'] += os.pathsep + os.path.join(gisbase, 'extrabin') # Set GISDBASE environment variable os.environ['GISDBASE'] = gisdb # define GRASS-Python environment gpydir = os.path.join(gisbase, "etc", "python") sys.path.append(gpydir) f.write('\n---------sys.path--------------------\n') f.write('\n'.join(sys.path)) f.write('\n----------sys.version-------------------\n') f.write(sys.version) f.write('\n----------os.environ-----------------\n') f.write(str(os.environ)) # import GRASS Python bindings (see also pygrass) import grass.script as gscript import grass.script.setup as gsetup gscript.core.set_raise_on_error(True) # launch session gsetup.init(gisbase, gisdb, location, mapset) f.write(str(gscript.gisenv())) # Check dem file, import if not exist dem_in_mapset_path = location_path = os.path.join(gisdb, location, mapset, "cell", dem) if not os.path.exists(dem_in_mapset_path): f.write("\n ---------- import DEM file ------------- \n") stats = gscript.read_command('r.in.gdal', input=dem_full_path, output=dem) #import drainage drainage_mapset_path = location_path = os.path.join(gisdb, location, mapset, "cell", drainage) if not os.path.exists(drainage_mapset_path): f.write("\n ---------- import Drainage file ------------- \n") stats = gscript.read_command('r.in.gdal', input=drainage_full_path, output=drainage) # List all files in location to check if the DEM file imported successfully f.write("\n ---------- raster ------------- \n") for rast in gscript.list_strings(type='rast'): f.write(str(rast)) f.write("\n ---------- vector ------------- \n") for vect in gscript.list_strings(type='vect'): f.write(str(vect)) f.write("\n ---------------------------JOB START-------------------------------- \n") f.write(str(datetime.now())) # Project xlon, ylat wgs84 into current if prj.lower() != "native" or prj.lower() == "wgs84": f.write("\n ---------- Reproject xlon and ylat into native dem projection ------------- \n") stats = gscript.read_command('m.proj', coordinates=(xlon, ylat), flags='i') coor_list = stats.split("|") xlon = float(coor_list[0]) ylat = float(coor_list[1]) outlet = (xlon, ylat) # Define region f.write("\n ---------- Define region ------------- \n") stats = gscript.parse_command('g.region', raster=dem, flags='p') f.write(str(stats)) # Read extent of the dem file for key in stats: if "north:" in key: north = float(key.split(":")[1]) elif "south:" in key: south = float(key.split(":")[1]) elif "west:" in key: west = float(key.split(":")[1]) elif "east:" in key: east = float(key.split(":")[1]) elif "nsres:" in key: nsres = float(key.split(":")[1]) elif "ewres:" in key: ewres = float(key.split(":")[1]) # check if xlon, ylat is within the extent of dem if xlon < west or xlon > east: f.write("\n ERROR: xlon is out of dem region. \n") raise Exception("(xlon, ylat) is out of dem region.") elif ylat < south or ylat > north: f.write("\n ERROR: ylat is out of dem region. \n") raise Exception("(xlon, ylat) is out of dem region.") # Calculate cell area cell_area = nsres * ewres # Flow accumulation analysis f.write("\n ---------- Flow accumulation analysis ------------- \n") if not os.path.exists(drainage_mapset_path): stats = gscript.read_command('r.watershed', elevation=dem, threshold='10000', drainage=drainage, flags='s', overwrite=True) # Delineate watershed f.write("\n ---------- Delineate watershed ------------- \n") basin = "{0}_{1}_basin".format(dem, jobid) temp_files_list.append(basin) stats = gscript.read_command('r.water.outlet', input=drainage, output=basin, coordinates=outlet, overwrite=True) # Cut dem with watershed f.write("\n -------------- Cut dem ----------------- \n") dem_cropped = "{0}_{1}_cropped".format(dem, jobid) mapcalc_cmd = '{0} = if({1}, {2})'.format(dem_cropped, basin, dem) temp_files_list.append(dem_cropped) gscript.mapcalc(mapcalc_cmd, overwrite=True, quiet=True) # Read outlet elevation f.write("\n ---------- Read outlet elevation ------------- \n") outlet_info = gscript.read_command('r.what', map=dem, coordinates=outlet) f.write("\n{0} \n".format(outlet_info)) outlet_elev = outlet_info.split('||')[1] try: outlet_elev = float(outlet_elev) except Exception as e: f.write("{0} \n".format(e.message)) raise Exception("This point has no data.") f.write("------------Outlet elevation--{0} ---------------- \n".format(outlet_elev)) # Create a list including elevations of all interval points dam_elev = outlet_elev + dam_h elev_list = [] elev = outlet_elev + elev_interval while elev < dam_elev: elev_list.append(elev) elev += elev_interval elev_list.append(dam_elev) f.write("\n----------Elevation list----------\n") f.write(str(elev_list)) #For each interval point, calculate reservior volume f.write("\n ------------- Reservoir volume calculation ---------------- \n") storage_list = [] lake_output_list = [] count = 0 for elev in elev_list: count += 1 f.write(str(elev)) f.write(", ") # Generate reservoir raster file f.write("\n-----------Generate lake file ---------- No.{}\n".format(count)) lake_rast = '{0}_{1}_lake_{2}'.format(dem, jobid, str(int(elev))) temp_files_list.append(lake_rast) stats = gscript.read_command('r.lake', elevation=dem_cropped, coordinates=outlet, waterlevel=elev, lake=lake_rast, overwrite=True) #Calculate reservoir volume f.write("\n-----------Calculate lake volume --------- No.{}\n".format(count)) stats = gscript.parse_command('r.univar', map=lake_rast, flags='g') f.write("\n{0}\n".format(str(stats))) sum_height = float(stats['sum']) f.write("\n-------Cell area--{0}----------\n".format(str(cell_area))) volume = sum_height * cell_area storage = (volume, elev) print("\nNo. {0}--------> sc is {1} \n".format(count, str(storage))) storage_list.append(storage) if output_lake: # output lake # r.mapcalc expression="lake_285.7846_all_0 = if( lake_285.7846, 0)" --o f.write("\n -------------- Set all values of raster lake to 0 ----------------- \n") lake_rast_all_0 = "{0}_all_0".format(lake_rast) mapcalc_cmd = '{0} = if({1}, 0)'.format(lake_rast_all_0, lake_rast) gscript.mapcalc(mapcalc_cmd, overwrite=True, quiet=True) # covert raster lake_rast_all_0 into vector # r.to.vect input='lake_285.7846_all_0@drew' output='lake_285_all_0_vec' type=area --o f.write("\n -------------- convert raster lake_rast_all_0 into vector ----------------- \n") lake_rast_all_0_vec = "{0}_all_0_vect".format(lake_rast) lake_rast_all_0_vec = lake_rast_all_0_vec.replace(".", "_") f.write("\n -------------- {0} ----------------- \n".format(lake_rast_all_0_vec)) stats = gscript.parse_command('r.to.vect', input=lake_rast_all_0, output=lake_rast_all_0_vec, type="area", overwrite=True) # output GeoJSON # v.out.ogr -c input='lake_285_all_0_vec' output='/tmp/lake_285_all_0_vec.geojson' format=GeoJSON type=area --overwrite geojson_f_name = "{0}.GEOJSON".format(lake_rast.replace(".", "_")) lake_rast_all_0_vec_GEOJSON = os.path.join(output_data_path, geojson_f_name) stats = gscript.parse_command('v.out.ogr', input=lake_rast_all_0_vec, output=lake_rast_all_0_vec_GEOJSON, \ format="GeoJSON", type="area", overwrite=True, flags="c") # output KML # v.out.ogr -c input='lake_285_all_0_vec' output='/tmp/lake_285_all_0_vec.KML' format=KML type=area --overwrite kml_f_name = "{0}.KML".format(lake_rast.replace(".", "_")) lake_rast_all_0_vec_KML = os.path.join(output_data_path, kml_f_name) stats = gscript.parse_command('v.out.ogr', input=lake_rast_all_0_vec, output=lake_rast_all_0_vec_KML, \ format="KML", type="area", overwrite=True, flags="c") output_tuple = (str(elev), geojson_f_name, kml_f_name) lake_output_list.append(output_tuple) zero_point = (0, outlet_elev) storage_list.insert(0, zero_point) for sc in storage_list: f.write(str(sc)) f.write("\n") f.write("\n-------------------------END--------------------------\n") f.write(str(datetime.now())) f.close() keep_intermediate = True result['status'] = 'success' result['storage_list'] = storage_list result['lake_output_list'] = lake_output_list result['msg'] = msg return result except Exception as e: keep_intermediate = True print e.message msg = e.message if f is not None: f.write("\n-------------!!!!!! ERROR !!!!!!--------------\n") f.write(e.message) f.close() result['status'] = 'error' result['storage_list'] = None result['lake_output_list'] = None result['msg'] = msg return result finally: save_result_to_db(jobid, result) # Remove all temp files if not keep_intermediate: for f in temp_files_list: f_fullpath = "{0}/{1}/{2}/cell/{3}".format(gisdb, location, mapset, f) if os.path.exists(f_fullpath): os.remove(f_fullpath)
def main(input_image): # Initiate a list for temp layers and groups TMP = [] TMP_file = [] TMP_region = [] TMP_group = [] # Tranform image in .png if needed filepath, extension = os.path.splitext(input_image) if extension != ".png": im = Image.open(input_image) im.save(filepath + ".png") input_image = filepath + ".png" TMP_file.append(input_image) # Import .png image in GRASS tmp_image = os.path.split( gscript.tempfile())[1] # Generate a name for temporary layer TMP.append(tmp_image) gscript.run_command('r.in.png', overwrite=True, input=input_image, output=tmp_image) # Create a list of layers corresponding to RGB of the image list_layers = gscript.list_strings("rast", pattern=tmp_image, flag="r") [TMP.append(a) for a in list_layers] # Set computional region and save it with name tmp_region = 'region_' + tmp_image gscript.run_command('g.region', raster=list_layers[0], save=tmp_region) TMP_region.append(tmp_region) # Create a image group tmp_group = os.path.split( gscript.tempfile())[1] # Generate a name for temporary group TMP_group.append(tmp_group) gscript.run_command('i.group', group=tmp_group, input=",".join(list_layers)) # Unsupervised segmentation parameters optimization gscript.run_command('i.segment.uspo', overwrite=True, group=tmp_group, output='-', segment_map='best', regions=tmp_region, segmentation_method='region_growing', threshold_start='0.005', threshold_stop='0.4', threshold_step='0.01', minsizes='50', memory='5000', processes='15') TMP.append('best_' + tmp_region + '_rank1') # Compute mean spectral value per segment rbg_layer = {} rbg_layer_mean = {} for layer in list_layers: outputname = layer.split("@")[0] + '_avg' gscript.run_command('r.stats.zonal', overwrite=True, base='best_' + tmp_region + '_rank1', cover=layer, method='average', output=outputname) TMP.append(outputname) # Create a dictionary containing name of RBG layer if outputname.split("_")[0].split(".")[-1] == "r": rbg_layer['r'] = layer rbg_layer_mean['r'] = outputname elif outputname.split("_")[0].split(".")[-1] == "g": rbg_layer['g'] = layer rbg_layer_mean['g'] = outputname elif outputname.split("_")[0].split(".")[-1] == "b": rbg_layer['b'] = layer rbg_layer_mean['b'] = outputname # Create a RGB composite as a new layer composite = os.path.split(filepath)[-1] composite_final = composite + "_final" gscript.run_command('r.composite', overwrite=True, red=rbg_layer['r'], green=rbg_layer['g'], blue=rbg_layer['b'], output=composite) gscript.run_command('r.composite', overwrite=True, red=rbg_layer_mean['r'], green=rbg_layer_mean['g'], blue=rbg_layer_mean['b'], output=composite_final) # Output layer as .png file gscript.run_command('r.out.png', overwrite=True, input=composite_final, output=filepath + "_uspo.png") # Cleanup gscript.run_command('g.remove', flags='f', type='raster', name=','.join(TMP)) gscript.run_command('g.remove', flags='f', type='region', name=','.join(TMP_region)) gscript.run_command('g.remove', flags='f', type='group', name=','.join(TMP_group)) for f in TMP_file: os.remove(f)
from grass.script import vector as grass # take directory from command line input and change path path = sys.argv[1] if len(sys.argv) < 1: print "You should provide the directory path as a parameter" sys.exit(1) # now change the path os.chdir(path) # print message gscript.message('Hello GRASS user!') # *** use GRASS list_strings to find specified rasters using a regular expression, change as file names change rasters = list_strings(type='raster', pattern='^elev_r_.*_2mrst_t1500$', flag='r') for raster in rasters: short = raster.split('@')[0] # print message gscript.message( 'We are now using map calculator to eliminate water features.') #expression using mapcalc to save only elevations above .36 and rename with '_higher_036' mapcalc("{short}_higher_036 = if({rast} > 0.36, 1, null())".format( rast=raster, short=short), overwrite=True) newmap = short + '_higher_036'
rel_mapsets = [] vcols = [] rmaps = [] for dtype in data_dict["predictor"].values(): for dkey, ddict in dtype.items(): vcols.append(dkey) rmaps.append(ddict["mapname"] + "@" + ddict["mapset"]) rel_mapsets.append(ddict["mapset"]) gs.run_command("g.mapsets", operation="add", mapset=list(set(rel_mapsets))) for idx, col in enumerate(vcols): print(col) if not col in gs.vector.vector_columns(maps, layer=layer): print("Update attributes for {}".format(rmaps[idx])) all_maps = gs.list_strings(type="raster") if rmaps[idx] not in all_maps: print("Error raster map <{}> not found!".format(rmaps[idx])) update_attributes(maps, layer=layer, meta_data_dict=data_dict, varname=col) #with Session(gisdb=gisdb, location=location, mapset=mapset): # import grass.script as gs # update_attributes(maps, layer=layer, meta_data_dict=data_dict, varname='v_wind_speed_at_100m') with Session(gisdb=gisdb, location=location, mapset=mapset, create_opts=""): # from grass.pygrass import raster as r # from grass.pygrass.utils import getenv import grass.script as gs
def __init__(self, **optionsandflags): '''Process all arguments and prepare processing''' # add all options and flags as attributes (only nonempty ones) self.options = interpret_options(optionsandflags) self.__dict__.update(self.options) # save region for convenience self.region = grass.region() self.region['kmtocell'] = 10**6 / (self.region['ewres'] * self.region['nsres']) self.region['celltokm'] = self.region['ewres'] * self.region['nsres'] * 1e-6 # check if DEM to processed or if all inputs set if not self.is_set('accumulation', 'drainage', 'streams'): grass.fatal('Either of these not set: accumulation, drainage, streams.') # lothresh default if 'lothresh' not in self.options: self.lothresh = self.upthresh * 0.05 # what to do with upthresh if self.is_set('upthreshcolumn'): gm('Will look for upper thresholds in the %s column.' % self.upthreshcolumn) # get thresholds from column in station vect try: threshs = grass.vector_db_select( self.stations, columns=self.upthreshcolumn)['values'] self.upthresh = OrderedDict([(k, float(v[0])) for k, v in sorted(threshs.items())]) except: grass.fatal('Cant read the upper threshold from the column %s' % self.upthreshcolumn) # streamthresh if 'streamthresh' in self.options: # convert to cells self.streamthresh = self.region['kmtocell'] * self.streamthresh # check if reasonable fract = float(self.streamthresh) / self.region['cells'] if fract > 0.5 or fract < 0.01: gwarn('streamthresh is %s percent of the region size!' % (fract*100)) else: self.streamthresh = int(self.region['cells'] * 0.02) # if no r.watershed flags given if 'rwatershedflags' not in self.options: self.rwatershedflags = 's' if 'rwatershedmemory' in self.options: self.rwatershedflags += 'm' else: # default value/not used self.rwatershedmemory = 300 # check input for stats print if self.s: for o in ['streams', 'stations', 'catchmentprefix']: if not self.is_set(o): grass.fatal('%s needs to be set!') # get all catchments rst = grass.list_strings('rast', self.catchmentprefix+'*') rolist = [(int(r.split('@')[0].replace(self.catchmentprefix, '')), r) for r in sorted(rst) if '__' not in r] self.catchment_rasters = OrderedDict(rolist) gm('Found these catchments %s' % self.catchment_rasters) # calculate station topology self.snap_stations() self.get_stations_topology() # initialise subbasinsdone self.subbasinsdone = {} return
qgispath = r'I:\DOCUMENTS\WEGC\02_PhD_research\04_Programming\QGIS' grasspth = r'I:\DOCUMENTS\WEGC\02_PhD_research\04_Programming\GRASS' # GISBASE SE Alpine Region (bounding Box of ZAMG stations), coordinate System UTM Z33N gisbase = os.environ['GISBASE'] gisdbase = r'I:\DOCUMENTS\WEGC\02_PhD_research\04_Programming\GRASS\Austria' location = "SE_Alpine" mapset = "ZAMG_SE_Alpine" gsetup.init(gisbase, gisdbase, location, mapset) print grass.gisenv() grass.message('Current GRASS GIS 7 environment:') print grass.gisenv() grass.message('Available raster maps:') for rast in grass.list_strings(type = 'rast'): print rast grass.message('Available vector maps:') for vect in grass.list_strings(type = 'vect'): print vect AttrTable = range(12,440) # fields in attribute table containing precip [mm] for eachField in AttrTable: # IDW (6 nearest points) for each column in attribute table cmd = 'saga_cmd grid_gridding 1 -SHAPES:%s -FIELD:%s -SEARCH_RANGE:%s \ -SEARCH_RADIUS:%s -SEARCH_POINTS_MAX:%s -USER_SIZE:%s -USER_GRID:%s' \ % (qgispath + '\ZAMG_UTM33_98daily.shp', eachField, 1, 10000, 6, 100, sagapath + '/IDW_' + str(eachField)) os.system(cmd)
import grass.script as gscript import grass.script.setup as gsetup import os ########### # launch session gisbase = r"C:\OSGEO4~1\apps\grass\grass-7.0.4" gisdb = r"C:\grassdb" mapset = "PERMANENT" for i,location in enumerate(sorted(os.listdir(gisdb))): print("\n\n"+location) gscript.message('Current GRASS GIS 7 environment:') print(gscript.gisenv()) gscript.message('Available raster maps:') for rast in gscript.list_strings(type='rast'): print(rast) gscript.message('Available vector maps:') for vect in gscript.list_strings(type='vect'): print(vect) print("done")
#!/usr/bin/env python3 import grass.script as gscript ofs = '+ 1200300000' list1 = gscript.list_strings('raster') x = '2_swa_gr78_Win10_4326__1arc_22200c_streams' b = '2_swa_gr78_Win10_4326__1arc_22200c_basins' for i in list1 : if x in i: def funcc(): c = i[0:65] + '_ofs' gscript.run_command('r.mapcalc', expression = c + '=' + '(' + i + ')/2' + ofs) gscript.run_command('r.to.vect', input = c , output = c , type = 'area' , flags = 'v') gscript.run_command('r.out.ascii', input = c, output = 'E:\\HRC_Water\\Python\\Test_Outputs' + '\\' + c , flags = 'i') gscript.run_command('v.out.ogr' , input = c, output = 'E:\\HRC_Water\\Python\\Test_Outputs' + '\\' + c , format = 'ESRI_shapefile') funcc()
def SC(jobid, xlon, ylat, prj): dem_full_path = DEM_FULL_PATH dem = DEM_NAME drainage_full_path = DRAINAGE_FULL_PATH drainage = DRAINAGE_NAME gisbase = GISBASE grass7bin = GRASS7BIN # Define grass data folder, location, mapset gisdb = os.path.join(tempfile.gettempdir(), 'grassdata') if not os.path.exists(gisdb): os.mkdir(gisdb) location = "location_{0}".format(dem) mapset = "PERMANENT" msg = "" # Create log file for each job log_name = 'log_{0}.log'.format(jobid) log_path = os.path.join(gisdb, log_name) f = open(log_path, 'w', 0) # Create output_data folder path output_data_path = OUTPUT_DATA_PATH if not os.path.exists(output_data_path): os.mkdir(output_data_path) try: # Create location location_path = os.path.join(gisdb, location) if not os.path.exists(location_path): f.write('\n---------Create Location from DEM--------------------\n') f.write('{0}\n'.format(location_path)) startcmd = grass7bin + ' -c ' + dem_full_path + ' -e ' + location_path print startcmd p = subprocess.Popen(startcmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != 0: print >>sys.stderr, 'ERROR: %s' % err print >>sys.stderr, 'ERROR: Cannot generate location (%s)' % startcmd f.write('\n---------Create Location failed--------------------\n') sys.exit(-1) else: f.write('\n---------Create Location done--------------------\n') print 'Created location %s' % location_path xlon = float(xlon) ylat = float(ylat) outlet = (xlon, ylat) # Set GISBASE environment variable os.environ['GISBASE'] = gisbase # the following not needed with trunk os.environ['PATH'] += os.pathsep + os.path.join(gisbase, 'extrabin') # Set GISDBASE environment variable os.environ['GISDBASE'] = gisdb # define GRASS-Python environment gpydir = os.path.join(gisbase, "etc", "python") sys.path.append(gpydir) f.write('\n---------sys.path--------------------\n') f.write('\n'.join(sys.path)) f.write('\n----------sys.version-------------------\n') f.write(sys.version) f.write('\n----------os.environ-----------------\n') f.write(str(os.environ)) # import GRASS Python bindings (see also pygrass) import grass.script as gscript import grass.script.setup as gsetup gscript.core.set_raise_on_error(True) # launch session gsetup.init(gisbase, gisdb, location, mapset) f.write(str(gscript.gisenv())) # Check the dem file, import if not exist dem_mapset_path = location_path = os.path.join(gisdb, location, mapset, "cell", dem) if not os.path.exists(dem_mapset_path): f.write("\n ---------- import DEM file ------------- \n") stats = gscript.read_command('r.in.gdal', input=dem_full_path, output=dem) #import drainage drainage_mapset_path = location_path = os.path.join(gisdb, location, mapset, "cell", drainage) if not os.path.exists(drainage_mapset_path): f.write("\n ---------- import Drainage file ------------- \n") stats = gscript.read_command('r.in.gdal', input=drainage_full_path, output=drainage) # List all files in location to check if the DEM file imported successfully f.write("\n ---------- List raster ------------- \n") for rast in gscript.list_strings(type='rast'): f.write(str(rast)) f.write("\n ---------- List vector ------------- \n") for vect in gscript.list_strings(type='vect'): f.write(str(vect)) f.write("\n ---------------------------JOB START-------------------------------- \n") f.write(str(datetime.now())) # Project xlon, ylat wgs84 into current if prj.lower() != "native" or prj.lower() == "wgs84": f.write("\n ---------- Reproject xlon and ylat into native dem projection ------------- \n") stats = gscript.read_command('m.proj', coordinates=(xlon, ylat), flags='i') coor_list = stats.split("|") xlon = float(coor_list[0]) ylat = float(coor_list[1]) outlet = (xlon, ylat) # Define region f.write("\n ---------- Define region ------------- \n") stats = gscript.parse_command('g.region', raster=dem, flags='p') f.write(str(stats)) # Read extent of the dem file for key in stats: if "north:" in key: north = float(key.split(":")[1]) elif "south:" in key: south = float(key.split(":")[1]) elif "west:" in key: west = float(key.split(":")[1]) elif "east:" in key: east = float(key.split(":")[1]) elif "nsres:" in key: nsres = float(key.split(":")[1]) elif "ewres:" in key: ewres = float(key.split(":")[1]) # check if xlon, ylat is within the extent of dem if xlon < west or xlon > east: f.write("\n ERROR: xlon is out of dem region. \n") raise Exception("(xlon, ylat) is out of dem region.") elif ylat < south or ylat > north: f.write("\n ERROR: ylat is out of dem region. \n") raise Exception("(xlon, ylat) is out of dem region.") # Flow accumulation analysis f.write("\n ---------- Flow accumulation analysis ------------- \n") if not os.path.exists(drainage_mapset_path): stats = gscript.read_command('r.watershed', elevation=dem, threshold='10000', drainage=drainage, flags='s', overwrite=True) # Delineate watershed f.write("\n ---------- Delineate watershed ------------- \n") basin = "{0}_basin_{1}".format(dem, jobid) stats = gscript.read_command('r.water.outlet', input=drainage, output=basin, coordinates=outlet, overwrite=True) # output lake # r.mapcalc expression="lake_285.7846_all_0 = if( lake_285.7846, 0)" --o f.write("\n -------------- Set all values of raster basin to 0 ----------------- \n") basin_all_0 = "{0}_all_0".format(basin) mapcalc_cmd = '{0} = if({1}, 0)'.format(basin_all_0, basin) gscript.mapcalc(mapcalc_cmd, overwrite=True, quiet=True) # covert raster lake_rast_all_0 into vector # r.to.vect input='lake_285.7846_all_0@drew' output='lake_285_all_0_vec' type=area --o f.write("\n -------------- convert raster lake_rast_all_0 into vector ----------------- \n") basin_all_0_vect = "{0}_all_0_vect".format(basin) f.write("\n -------------- {0} ----------------- \n".format(basin_all_0_vect)) stats = gscript.parse_command('r.to.vect', input=basin_all_0, output=basin_all_0_vect, type="area", overwrite=True) # output GeoJSON # v.out.ogr -c input='lake_285_alll_0_vec' output='/tmp/lake_285_all_0_vec.geojson' format=GeoJSON type=area --overwrite geojson_f_name = "{0}.GEOJSON".format(basin) basin_GEOJSON = os.path.join(output_data_path, geojson_f_name) stats = gscript.parse_command('v.out.ogr', input=basin_all_0_vect, output=basin_GEOJSON, \ format="GeoJSON", type="area", overwrite=True, flags="c") f.write("\n-------------------------END--------------------------\n") f.write(str(datetime.now())) f.close() return basin_GEOJSON, msg except Exception as e: print e.message msg = e.message if f is not None: f.write("\n-------------!!!!!! ERROR !!!!!!--------------\n") f.write(e.message) f.close() return None, msg
gisbase = out.strip(os.linesep) # set GISBASE environment variable os.environ['GISBASE'] = gisbase # define GRASS-Python environment grass_pydir = os.path.join(gisbase, "etc", "python") sys.path.append(grass_pydir) # import (some) GRASS Python bindings import grass.script as gscript import grass.script.setup as gsetup # launch session rcfile = gsetup.init(gisbase, gisdb, location, mapset) # example calls gscript.message('Current GRASS GIS 7 environment:') print gscript.gisenv() gscript.message('Available raster maps:') for rast in gscript.list_strings(type='raster'): print rast gscript.message('Available vector maps:') for vect in gscript.list_strings(type='vector'): print vect # delete the rcfile os.remove(rcfile)
import grass.script as gscript from grass.script import vector as grass # take directory from command line input and change path path = sys.argv[1] if len(sys.argv) <1: print "You should provide the directory path as a parameter" sys.exit(1) # now change the path os.chdir( path ) # print message gscript.message('Hello GRASS user!') # *** use GRASS list_strings to find specified rasters using a regular expression, change as file names change rasters = list_strings(type='raster', pattern='^elev_r_.*_2mrst_t1500$', flag='r') for raster in rasters: short = raster.split('@')[0] # print message gscript.message('We are now using map calculator to eliminate water features.') #expression using mapcalc to save only elevations above .36 and rename with '_higher_036' mapcalc("{short}_higher_036 = if({rast} > 0.36, 1, null())".format(rast=raster, short=short), overwrite=True) newmap = short + '_higher_036' # create a vector for a report run_command('r.to.vect', input=newmap, output=newmap, type='area', overwrite=True) # Find the unique category of the largest area
""" import grass.script as gscript from collections import defaultdict # initialize a dictionary for each name d = defaultdict(lambda: defaultdict(lambda: defaultdict(str))) # initialize a list for participants' names name_list = [] # initialize a list of categories categories = ["dem"] # get list of rasters rasters = gscript.list_strings('raster', pattern="*") for raster in rasters: raster_name, separator, mapset = raster.partition('@') name, separator, cat_number = raster_name.partition('_') cat, separator, number = cat_number.partition('_') name_list.append(name) # add values to dictionary d[name][number][cat] = raster # remove duplicates
def main(): #mapset latlong - tutto il path locale llmset=options['llmset'] #matricione in csv matrix=options['matrix'] #rete delle stazioni in csv stations=options['stations'] # matclean=options['matclean'] # mattot=options['mattot'] # matmatching=options['matmatching'] # matinteger=options['matinteger'] #llMset= '/home/annalisa/DATAGRASS/LL_WGS84/ismar72' #matrix= '/home/annalisa/Documenti/Venezia/datiMauroFabrizio/dataset_adriatico/versioniMatrice/matrice3D.csv' #stations= '/home/annalisa/Documenti/Venezia/datiMauroFabrizio/dataset_adriatico/stationsAll.csv' grass.run_command('g.gisenv', set='"OVERWRITE=1"') #importiamo la matrice come punti 3D os.system("grass72 "+llMset+" --exec v.in.ascii --overwrite input="+stations+" output=stationsAll_2 skip=1") os.system("grass72 "+llMset+" --exec v.in.ascii -z --overwrite input="+matrix+" output=matrice_3D skip=2 z=3") grass.run_command('v.proj', location=llMset.split('/')[-2], mapset=llMset.split('/')[-1], input='matrice_3D') grass.run_command('v.proj', location=llMset.split('/')[-2], mapset=llMset.split('/')[-1], input='stationsAll_2') #aggiorno le coordinate nel sistema metrico UTM grass.run_command('v.to.db', map='matrice_3D', option='coor', columns='dbl_1,dbl_2,dbl_3') grass.run_command('v.to.db', map='stationsAll_2', option='coor', columns='dbl_1,dbl_2') #faccio una copia del file vettoriale dei dati importato in UTM grass.run_command('g.copy', vector='matrice_3D,matrice_3D_copyOld') #grass.run_command('g.copy', vector='matrice_3D_copyOld,matrice_3D') #spostamento geografico dei punti su terra grass.run_command('g.region', raster='italy_cleaned2', res='100') #facciamo un buffer negativo di 10km per evitare i campionamenti in zone costiere (o estuarine) #verifico prima che io non abbia gia il buf neg da precedenti elaborazioni (operazione time consuming!) r=str(grass.list_strings(type='raster')) if re.search('italy_cleaned3',r): pass; else: grass.run_command('r.grow', input='italy_cleaned2', output='italy_cleaned3', radius='-100', new='1') a=grass.read_command('v.what.rast', flags='p', map='matrice_3D', raster='italy_cleaned3').split('\n')[:-1] cats=[] for i in a: if i.split('|')[1]=='1': cats.append(int(i.split('|')[0])) #se ci sono punti su terra, segue una procedura di spostamento dei punti if len(cats) != 0: from_coords=(grass.read_command('v.to.db', flags='p', map='matrice_3D', option='coor', units='meters')[:-1]).split('\n')[1:] for i in cats: print 'moving category nr. '+str(i) station=grass.read_command('v.db.select', flags='c', map='matrice_3D', columns='str_3', where='cat='+str(i))[:-1] #coordinate di partenza for g in from_coords: if g.startswith(str(i)+'|'): east_from,north_from,z_from=g.split('|')[1],g.split('|')[2],g.split('|')[3] #coordinate di arrivo grass.run_command('v.db.addcolumn', map='stationsAll_2', columns="east double precision, north double precision") grass.run_command('v.to.db', map='stationsAll_2', type='point', option='coor', columns='east,north', units='meters') east_to,north_to,z_to=grass.read_command('v.db.select', flags='c', map='stationsAll_2', columns='east,north', where='str_1 LIKE "%'+str(station)+'%"')[:-1].split('|')[0],grass.read_command('v.db.select', flags='c', map='stationsAll_2', columns='east,north', where='str_1 LIKE "%'+str(station)+'%"')[:-1].split('|')[1],z_from #sposto i punti piazzati male grass.run_command('v.edit', tool='move', map='matrice_3D', cat=i, move=str(float(east_to)-float(east_from))+','+str(float(north_to)-float(north_from))+',0') grass.run_command('v.to.db', map='matrice_3D', option='coor', columns='dbl_1,dbl_2,dbl_3') #creazione dei voronoi e matching della stazione indicata coi voronoi #mi metto intorno alle stazioni e allargo la bbox di 30km per prendere piu punti possibile grass.run_command('g.region', vector='stationsAll_2', n='n+30000', s='s-30000', e='e+30000', w='w-30000') grass.run_command('v.voronoi', input='stationsAll_2', output='stationsAllVoronoi_2') grass.run_command('v.db.addcolumn',map='matrice_3D',column='voronoiStat char(30)') grass.run_command('v.what.vect',map='matrice_3D',column='voronoiStat',query_map='stationsAllVoronoi_2',query_column='str_1') a=grass.read_command('db.select',table='matrice_3D').split('\n')[1:-1] #individuo i punti con le stazioni scritte bene a meno di un carattere separatore cats_matching=[] cats_integer=[] matching=open('matching.csv','w') integer=open('integer.csv','w') for i in a: if len(i.split('|')[6])>6 : #correggi la tabella del vettoriale in caso di stazione+data i=i.replace('_'+i.split('|')[8],'',1) try: #provo a trasformare i nomi delle stazioni in interi, se questo non avviene, avvio il matching #per evitare che ad es. 183 finisca nella stazione 1/3 int(i.split('|')[6]) except ValueError: #non considero i punti che non ricadono in nessun voronoi if len(i.split('|')[-1])==0: continue; #caso in cui la stazione abbia due nomi (cambio nome nel tempo) if len(i.split('|')[-1])>5: for g in i.split('|')[-1].split(','): if (len(i.split('|')[6])<6 and i.split('|')[6].startswith(g.split('/')[0]) and i.split('|')[6].endswith(g.split('/')[-1])): #correggi la tabella del vettoriale in caso di stazione scritta male i=i.replace(i.split('|')[6],g,1) matching.writelines(i) matching.writelines('\n') cats_matching.append(int(i.split('|')[0])) else: #caso in cui la stazione ha un solo nome if (len(i.split('|')[6])<6 and i.split('|')[6].startswith(i.split('|')[-1].split('/')[0]) and i.split('|')[6].endswith(i.split('|')[-1].split('/')[-1])): #correggi la tabella del vettoriale in caso di stazione scritta male i=i.replace(i.split('|')[6],i.split('|')[-1],1) matching.writelines(i) matching.writelines('\n') cats_matching.append(int(i.split('|')[0])) else: #caso in cui al posto del nome della stazione ci sia un numero intero cats_integer.append(int(i.split('|')[0])) integer.writelines(i) integer.writelines('\n') matching.close() integer.close() #faccio una copia di sicurezza prima di modificare i dati di partenza grass.run_command('g.copy', vector='matrice_3D,matrice_3D_copy') #carico i nuovi vettoriali di matching e integer con matrice corretta grass.run_command('v.in.ascii', flags='z', input='matching.csv', output='matrice_3D_matching', x='2', y='3', z='4', cat='1') grass.run_command('v.in.ascii', flags='z', input='integer.csv', output='matrice_3D_integer', x='2', y='3', z='4', cat='1') #salvo i risultati, una tabella dei dati totali e tre shapes dei vettoriali totale, matching e integer grass.run_command('v.out.ogr', input='matrice_3D', output=matclean format='CSV') grass.run_command('v.out.ogr', flags='e', input='matrice_3D', output=mattot) grass.run_command('v.out.ogr', flags='e', input='matrice_3D_integer', output=matinteger) grass.run_command('v.out.ogr', flags='e', input='matrice_3D_matching', output=matmatching) # grass.run_command('v.edit', map='matrice_3D', type='point', tool='delete', cats=str(cats_integer)[1:-1]+','+str(cats_matching)[1:-1]) print "The End";
def geomorphon_00(dem_path, **kwargs): """ Parameters ---------- dem_path: str or Path obj Path to the original DEM kwargs: dict, optional All other arguments. search: Outer search radius. Use higher numbers for flatter areas. Default=3 skip: Inner search radius. Default=0 flat: Flatness threshold in degrees. Default=1 dist: Default=0 """ from pathlib import Path import grass.script as gscript dem = Path(str(dem_path)) name_parts = dem.stem.split('_') prj = name_parts[0] dem_ras = '_'.join(name_parts[0:2]) dem_num = dem_ras[-2:] # Import the DEM and set region extent and projection to match it map_rasters = gscript.list_strings(type='raster') if dem_ras not in map_rasters: gscript.run_command('r.in.gdal', input=str(dem_path), output=dem_ras, verbose=True) gscript.run_command('g.region', raster=dem_ras, verbose=True) forms = '{p}_forms{n}'.format(p=prj, n=dem_num) gscript.run_command('r.geomorphon', elevation=str(dem_ras) + "@PERMANENT", forms=forms, **kwargs) # Reclassify. 10 -> 1:depression, 7 -> 2:hollow, 9 -> 3:valley g_ras = forms.replace('forms', 'geom') expr = "{o} = if({i}==10, 1, if({i}==7, 2, if({i}==9, 3, 0)))".format( o=g_ras, i=forms) gscript.raster.mapcalc(expr, overwrite=True) # Export the new file cls = dem.parent.parent.parent / "Surface_Flow" if cls.exists(): grps = list(cls.glob('SFW*_DEM' + str(dem_num))) else: grps = [] if len(grps) > 0: sfw = grps[0] sfw_num = sfw.name.split('_')[1][-2:] else: sfw = cls / 'SFW00_DEM' + str(dem_num) sfw_num = '00' sfw.mkdir(parents=True) out_file = sfw / "{p}_GEOM{g}_DEM{n}.tif".format( p=prj, g=sfw_num, n=dem_num) gscript.run_command('r.out.gdal', input=g_ras, output=str(out_file), format="GTiff", type="Byte", createopt="COMPRESS=DEFLATE,PREDICTOR=2") return out_file
def main(): options, flags = gs.parser() pattern = options["pattern"] exclude = options["exclude"] expression_type = options["matching"] setnull = options["setnull"] # value(s) to null null = options["null"] # null to value dry_run = flags["d"] null_flags = "" for flag, value in flags.items(): if value and flag in "fincrz": null_flags += flag if expression_type == "all" and pattern: gs.fatal(_("Option pattern is not allowed with matching=all")) if expression_type == "all" and exclude: gs.fatal(_("Option exclude is not allowed with matching=all")) # we need to set pattern for all # and for exclude only input with wildcards if not pattern: if expression_type == "all": pattern = "*" exclude = None expression_type = "wildcards" elif expression_type == "wildcards": pattern = "*" elif expression_type in ("basic", "extended"): pattern = ".*" # a proper None is needed # ("" and not setting the value is different for g.list) if not exclude: exclude = None if expression_type == "wildcards": expression_type_flag = "" elif expression_type == "basic": expression_type_flag = "r" else: # expression_type == "extended" case expression_type_flag = "e" type = "raster" mapset = "." # current try: maps = gs.list_strings(type=type, mapset=mapset, pattern=pattern, exclude=exclude, flag=expression_type_flag) except gs.CalledModuleError: # the previous error is appropriate (assuming g.list error) import sys sys.exit(1) if dry_run and maps: gs.message( _("With inclusion pattern <{pattern}>" " and exclusion pattern <{exclude}>" " using syntax <{expression_type}>" " these raster maps were identified").format(**locals())) elif dry_run: gs.message( _("No raster maps were identified" " with inclusion pattern <{pattern}>" " and exclusion pattern <{exclude}>" " using syntax <{expression_type}>").format(**locals())) for map in maps: # TODO: option copy with prefix/suffix before setting nulls if dry_run: # TODO: apply the further selection flags # (or add dry run to r.null) print(map) else: gs.run_command("r.null", map=map, setnull=setnull, null=null, flags=null_flags)
os.environ['GISDBASE'] = gisdb # import GRASS Python bindings (see also pygrass) import grass.script as gscript import grass.script.setup as gsetup # launch session gsetup.init(gisbase, gisdb, location, mapset) gscript.message('Current GRASS GIS 7 environment:') print gscript.gisenv() # list rasters in environment gscript.message('Available raster maps:') for rast in gscript.list_strings(type = 'rast'): print rast # CALCULATE TOPOGRAPHIC EXPOSURE import math azimuths = range(0, 360, 30) distance_intervals = range(20, 320, 20) mapcalc_formula = {} def get_position(interval, azimuth): value1 = int((math.cos(math.radians(azimuth)) * interval)/5) value2 = int((math.sin(math.radians(azimuth)) * interval)/5) return value1, value2
# grass.run_command("v.in.ogr", dsn=aralu_utm_vector, output=aralu_utm_vector_title ) # check dam vector aralu_check_dam_file = "/media/kiruba/New Volume/milli_watershed/check_dam/ch_aral_utm.shp" aralu_check_dam = "aralu_check_dam_utm" # remove vector grass.run_command('g.remove', vect=aralu_check_dam, quiet=True) # Input vector into grass grass.run_command("v.in.ogr", dsn=aralu_check_dam_file, output=aralu_check_dam, type='point') # set region settings to match vector grass.run_command('g.region', vect=aralu_utm_vector_title, flags='p') # list all the vectors within mapset grass.message("Vector maps:") for vect in grass.list_strings(type="vect"): print vect """ Clip raster with vector """ aralu_utm_raster = "aralu_utm_rast" # remove raster # grass.run_command('g.remove', rast=aralu_utm_raster, quiet=True) # rasterize vector to raster for raster-raster clipping using r.mapcalc # grass.run_command("v.to.rast", input=aralu_utm_vector_title, output=aralu_utm_raster,type='area', use='val', value=1 ) # grass.run_command('r.info', map=aralu_utm_raster) # raise SystemExit(0) aralu_dem_utm = "aralu_dem_utm" # remove raster grass.run_command('g.remove', rast=aralu_dem_utm, quiet=True)
else: print("'" + mapsetname + "' mapset doesn't exists in " + user["gisdb"]) ## Saving current time for processing time management begintime_optical = time.time() ## Import optical imagery and rename band with color name print("Importing optical raster imagery at " + time.ctime()) grass.run_command( 'r.in.gdal', input= "F:\\Studium_Trier\\Masterarbeit\\Datensaetze\\GeoTiffs\\cloudFree1\\MSI_NDVI_vis_nir_20200807.tif", output="optical", overwrite=True) for rast in grass.list_strings("rast"): if rast.find("1") != -1: grass.run_command("g.rename", overwrite=True, rast=(rast, "MSI")) elif rast.find("2") != -1: grass.run_command("g.rename", overwrite=True, rast=(rast, "NDVI")) elif rast.find("3") != -1: grass.run_command("g.rename", overwrite=True, rast=(rast, "opt_blue")) elif rast.find("4") != -1: grass.run_command("g.rename", overwrite=True, rast=(rast, "opt_green")) elif rast.find("5") != -1: grass.run_command("g.rename", overwrite=True, rast=(rast, "opt_red")) elif rast.find("6") != -1: grass.run_command("g.rename", overwrite=True, rast=(rast, "opt_nir")) print_processing_time(begintime_optical, "Optical imagery has been imported in ")
def run_app(self): print grass.gisenv() grass.message('Raster maps:') for rast in grass.list_strings(type = 'rast'): print rast grass.message('Vector maps:') for v in grass.list_strings(type = 'vect'): print v r = grass.read_command("g.region", flags='p' ) print r bsb_shp="/Users/pirchiner/Documents/ws_indigo/MMD_TFinal/resources/gis_data/bsb_br.shp" bsb_grass="test_in_ogr" grass.run_command("v.in.ogr", dsn=bsb_shp, output=bsb_grass, #layer="1", type="point", overwrite=True) faults_shp="/Users/pirchiner/Documents/ws_indigo/MMD_TFinal/resources/gis_data/faults_semNE.shp" faults_grass="faults_semNE_grass" grass.run_command("v.in.ogr", dsn=faults_shp, output=faults_grass, #layer="1", type="line", overwrite=True) distances_grass = "tmp_distances3" grass.run_command("v.distance", _from=bsb_grass, to=faults_grass, output=distances_grass+"_raw", overwrite=True, upload="cat", column="cat") grass.run_command("v.category", input=distances_grass+"_raw", output=distances_grass, #layer=1, type="line", op="add", overwrite=True) grass.run_command("v.db.addtable", map=distances_grass, #table=distances_grass+"_table", column="dist DOUBLE", #layer=1, overwrite=True) # grass.run_command("v.db.connect", # driver="sqlite", # map=distances_grass, # ) print "bla" grass.run_command("v.to.db", map=distances_grass, option="length", column="dist", #layer=1, overwrite=True) grass.run_command("v.univar", map=distances_grass, column="dist")