def setUpClass(cls): cls.org_mapset = Mapset() cls.tmp_mapset_name = tempname(10) make_mapset(mapset=cls.tmp_mapset_name) cls.tmp_mapset = Mapset(mapset=cls.tmp_mapset_name) cls.tmp_mapset.current() cls.tmp_mapset_path = cls.tmp_mapset.path()
def cleanup(): """Restore old mask if it existed""" if RasterRow("{}_MASK".format(TMP_NAME), Mapset().name).exist(): gscript.verbose("Restoring old mask...") if RasterRow("MASK", Mapset().name).exist(): gscript.run_command("r.mask", flags="r") gscript.run_command( "g.rename", rast="{}_MASK,MASK".format(TMP_NAME), quiet=True ) if RasterRow("MASK", Mapset().name).exist(): gscript.run_command("r.mask", flags="r")
def findmaps(type, pattern=None, mapset="", location="", gisdbase=""): """Return a list of tuple contining the names of the: * map * mapset, * location, * gisdbase """ from grass.pygrass.gis import Gisdbase, Location, Mapset def find_in_location(type, pattern, location): res = [] for msetname in location.mapsets(): mset = Mapset(msetname, location.name, location.gisdbase) res.extend( [ (m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern) ] ) return res def find_in_gisdbase(type, pattern, gisdbase): res = [] for loc in gisdbase.locations(): res.extend(find_in_location(type, pattern, Location(loc, gisdbase.name))) return res if gisdbase and location and mapset: mset = Mapset(mapset, location, gisdbase) return [ (m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern) ] elif gisdbase and location: loc = Location(location, gisdbase) return find_in_location(type, pattern, loc) elif gisdbase: gis = Gisdbase(gisdbase) return find_in_gisdbase(type, pattern, gis) elif location: loc = Location(location) return find_in_location(type, pattern, loc) elif mapset: mset = Mapset(mapset) return [ (m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern) ] else: gis = Gisdbase() return find_in_gisdbase(type, pattern, gis)
def get_path(path, vect_name=None): """Return the full path to the database; replacing environment variable with real values :param path: The path with substitutional parameter :param vect_name: The name of the vector map >>> from grass.script.core import gisenv >>> import os >>> path = '$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db' >>> new_path = get_path(path) >>> new_path2 = os.path.join(gisenv()['GISDBASE'], gisenv()['LOCATION_NAME'], ... gisenv()['MAPSET'], 'sqlite', 'sqlite.db') >>> new_path.replace("//","/") == new_path2.replace("//","/") True >>> path = '$GISDBASE/$LOCATION_NAME/$MAPSET/vector/$MAP/sqlite.db' >>> new_path = get_path(path, "test") >>> new_path2 = os.path.join(gisenv()['GISDBASE'], gisenv()['LOCATION_NAME'], ... gisenv()['MAPSET'], 'vector', 'test', 'sqlite.db') >>> new_path.replace("//","/") == new_path2.replace("//","/") True """ if "$" not in path: return path else: mapset = Mapset() path = path.replace('$GISDBASE', mapset.gisdbase) path = path.replace('$LOCATION_NAME', mapset.location) path = path.replace('$MAPSET', mapset.name) if vect_name is not None: path = path.replace('$MAP', vect_name) return path
def find_in_location(type, pattern, location): res = [] for msetname in location.mapsets(): mset = Mapset(msetname, location.name, location.gisdbase) res.extend([(m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern)]) return res
def main(): mapset = Mapset() mapset.current() with open(options['output'], 'w') as fd: for rast in mapset.glist('raster', pattern='*_B04_10m'): items = rast.split('_') d = datetime.strptime(items[2], '%Y%m%dT%H%M%S') ## workaround dd = d + timedelta(seconds=1) vect = '{}_{}_MSK_CLOUDS'.format(items[1], items[2]) mask_vect = '{}_{}'.format(vect, options['map'].split('@')[0]) if Vector(vect).exist(): Module('v.overlay', ainput=options['map'], binput=vect, operator='not', output=mask_vect) else: copy(options['map'], mask_vect, 'vector') Module('r.mask', vector=mask_vect, overwrite=True) Module('g.remove', flags='f', type='vector', name=mask_vect) Module('g.rename', raster=['MASK', mask_vect]) fd.write("{0}|{1}|{2}{3}".format(mask_vect, d.strftime('%Y-%m-%d %H:%M:%S'), dd.strftime('%Y-%m-%d %H:%M:%S'), os.linesep)) return 0
def setUpClass(cls): cls.mpath = utils.decode(G_mapset_path()) cls.mapset_name = Mapset().name cls.sigdirs = [] # As signatures are created directly not via signature creation # tools, we must ensure signature directories exist os.makedirs(f"{cls.mpath}/signatures/sig/", exist_ok=True) os.makedirs(f"{cls.mpath}/signatures/sigset/", exist_ok=True) # A mapset with a random name cls.src_mapset_name = tempname(10) G_make_mapset(None, None, cls.src_mapset_name) cls.src_mapset_path = (cls.mpath.rsplit("/", maxsplit=1)[0] + "/" + cls.src_mapset_name) # Create fake signature files os.makedirs(f"{cls.src_mapset_path}/signatures/sig/") cls.src_sig = tempname(10) cls.src_sig_dir = f"{cls.src_mapset_path}/signatures/sig/{cls.src_sig}" os.makedirs(cls.src_sig_dir) cls.sigdirs.append(cls.src_sig_dir) f = open(f"{cls.src_sig_dir}/sig", "w") f.write("A sig file") f.close() os.makedirs(f"{cls.src_mapset_path}/signatures/sigset/") cls.src_sigset = tempname(10) cls.src_sigset_dir = f"{cls.src_mapset_path}/signatures/sigset/{cls.src_sigset}" os.makedirs(cls.src_sigset_dir) cls.sigdirs.append(cls.src_sigset_dir) f = open(f"{cls.src_sigset_dir}/sig", "w") f.write("A sigset file") f.close()
def setUpClass(cls): cls.map = tempname(10) cls.mapset = Mapset().name cls.bandref = "The_Doors" cls.use_temp_region() cls.runModule("g.region", n=1, s=0, e=1, w=0, res=1) cls.runModule("r.mapcalc", expression="{} = 1".format(cls.map))
def __init__(self, cmd, width=None, height=None, overlap=0, processes=None, split=False, debug=False, region=None, move=None, log=False, start_row=0, start_col=0, out_prefix='', *args, **kargs): kargs['run_'] = False self.mset = Mapset() self.module = Module(cmd, *args, **kargs) self.width = width self.height = height self.overlap = overlap self.processes = processes self.region = region if region else Region() self.start_row = start_row self.start_col = start_col self.out_prefix = out_prefix self.log = log self.move = move self.gisrc_src = os.environ['GISRC'] self.n_mset, self.gisrc_dst = None, None if self.move: self.n_mset = copy_mapset(self.mset, self.move) self.gisrc_dst = write_gisrc(self.n_mset.gisdbase, self.n_mset.location, self.n_mset.name) rasters = [r for r in select(self.module.inputs, 'raster')] if rasters: copy_rasters(rasters, self.gisrc_src, self.gisrc_dst, region=self.region) vectors = [v for v in select(self.module.inputs, 'vector')] if vectors: copy_vectors(vectors, self.gisrc_src, self.gisrc_dst) groups = [g for g in select(self.module.inputs, 'group')] if groups: copy_groups(groups, self.gisrc_src, self.gisrc_dst, region=self.region) self.bboxes = split_region_tiles(region=region, width=width, height=height, overlap=overlap) self.msetstr = cmd.replace('.', '') + "_%03d_%03d" self.inlist = None if split: self.split() self.debug = debug
def setUpClass(cls): cls.mpath = utils.decode(G_mapset_path()) cls.mapset_name = Mapset().name cls.sigdirs = [] # As signatures are created directly not via signature creation # tools, we must ensure signature directories exist os.makedirs(f"{cls.mpath}/signatures/sig/", exist_ok=True) os.makedirs(f"{cls.mpath}/signatures/sigset/", exist_ok=True)
def main(): """ Delete all raster files which the name begin by "vshed" """ gscript.run_command('g.region', flags='p') m = Mapset() liste_viewshed = m.glist('raster', pattern='vshed*') print(liste_viewshed) for viewshed_layer in liste_viewshed: run_command("g.remove", flags="f", type="raster", name=viewshed_layer)
def in_mapset(m, dtype, pattern=None): """Checks if a map with the same name already exists in the mapset""" mset = Mapset() maps = mset.glist(dtype, pattern=pattern) if m in maps: return True else: return False
def set_default(self): """Set the Region object to the default GRASS region. It works only in PERMANENT mapset""" from grass.pygrass.gis import Mapset mapset = Mapset() if mapset.name != 'PERMANENT': raise GrassError("ERROR: Unable to change default region. The " \ "current mapset is not <PERMANENT>.") self.adjust() if libgis.G_put_window(self.c_region) < 0: raise GrassError("Cannot change region (DEFAUL_WIND file).")
def get_mapset(gisrc_src, gisrc_dst): """Get mapset from a GISRC source to a GISRC destination. :param gisrc_src: path to the GISRC source :type gisrc_src: str :param gisrc_dst: path to the GISRC destination :type gisrc_dst: str :returns: a tuple with Mapset(src), Mapset(dst) """ msrc, lsrc, gsrc = read_gisrc(gisrc_src) mdst, ldst, gdst = read_gisrc(gisrc_dst) path_src = os.path.join(gsrc, lsrc, msrc) path_dst = os.path.join(gdst, ldst, mdst) if not os.path.isdir(path_dst): os.makedirs(path_dst) copy_special_mapset_files(path_src, path_dst) src = Mapset(msrc, lsrc, gsrc) dst = Mapset(mdst, ldst, gdst) visible = [m for m in src.visible] visible.append(src.name) dst.visible.extend(visible) return src, dst
def cleanup(): """Remove temporary data """ #remove temporary region file grass.del_temp_region() try: grass.run_command('g.remove', flags='f', name=TMP_MAPS, quiet=True, type=['vector', 'raster'], stderr=os.devnull, stdout_=os.devnull) except: pass if RasterRow("MASK", Mapset().name).exist(): grass.run_command("r.mask", flags="r", quiet=True) reset_mask()
def setUpClass(cls): cls.libc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("c")) cls.mapset = Mapset().name cls.map1 = tempname(10) cls.semantic_label1 = "The_Doors" cls.map2 = tempname(10) cls.semantic_label2 = "The_Who" cls.map3 = tempname(10) cls.use_temp_region() cls.runModule("g.region", n=1, s=0, e=1, w=0, res=1) cls.runModule("r.mapcalc", expression=f"{cls.map1} = 1") cls.runModule("r.mapcalc", expression=f"{cls.map2} = 1") cls.runModule("r.mapcalc", expression=f"{cls.map3} = 1") Rast_write_semantic_label(cls.map1, cls.semantic_label1) Rast_write_semantic_label(cls.map2, cls.semantic_label2)
def setUpClass(cls): cls.list_ptr = ctypes.POINTER(ctypes.c_char_p) cls.mpath = utils.decode(G_mapset_path()) cls.mapset_name = Mapset().name cls.sigdirs = [] # As signatures are created directly not via signature creation # tools, we must ensure signature directories exist os.makedirs(f"{cls.mpath}/signatures/sig/", exist_ok=True) os.makedirs(f"{cls.mpath}/signatures/sigset/", exist_ok=True) # A mapset with a random name cls.rnd_mapset_name = tempname(10) G_make_mapset(None, None, cls.rnd_mapset_name) cls.rnd_mapset_path = (cls.mpath.rsplit("/", maxsplit=1)[0] + "/" + cls.rnd_mapset_name) os.makedirs(f"{cls.rnd_mapset_path}/signatures/sig/") os.makedirs(f"{cls.rnd_mapset_path}/signatures/sigset/")
def clean_location(self, location=None): """Remove all created mapsets. :param location: a Location instance where we are running the analysis :type location: Location object """ if location is None: if self.n_mset: self.n_mset.current() location = Location() mapsets = location.mapsets(self.msetstr.split('_')[0] + '_*') for mset in mapsets: Mapset(mset).delete() if self.n_mset and self.n_mset.is_current(): self.mset.current()
def main(): mapset = Mapset() mapset.current() with open(options['output'], 'w') as fd: for rast in mapset.glist('raster'): items = rast.split('_') d = datetime.strptime(items[2], '%Y%m%dT%H%M%S') #fd.write("{0}|{1}{2}".format(rast, iso_date, os.linesep)) ## workaround dd = d + timedelta(seconds=1) fd.write("{0}|{1}|{2}{3}".format(rast, d.strftime('%Y-%m-%d %H:%M:%S'), dd.strftime('%Y-%m-%d %H:%M:%S'), os.linesep)) return 0
def cleanup(): """Remove raster and vector maps stored in a list""" grass.run_command( "g.remove", flags="f", type="raster,vector", pattern="{}_*".format(TEMPNAME), quiet=True, stderr=subprocess.PIPE, ) # Reset mask if user MASK was present if (RasterRow("MASK", Mapset().name).exist() and RasterRow("MASK_{}".format(TEMPNAME)).exist()): grass.run_command("r.mask", flags="r", quiet=True) reset_mask()
def main(): m1 = Mapset() #Getting data tree liste_viewshed = m1.glist('raster', pattern='vshed*') #Getting all viewshed created for map in liste_viewshed: output_string = options["output"] + "\\" + map + ".tif" run_command("r.out.gdal", input = map, output = output_string, format = "GTiff", overviews = 0) return 0
def setUpClass(cls): cls.mpath = utils.decode(G_mapset_path()) cls.mapset_name = Mapset().name cls.sigdirs = [] # As signatures are created directly not via signature creation # tools, we must ensure signature directories exist os.makedirs(f"{cls.mpath}/signatures/sig/", exist_ok=True) os.makedirs(f"{cls.mpath}/signatures/sigset/", exist_ok=True) cls.sig_name1 = tempname(10) cls.sig_dir1 = f"{cls.mpath}/signatures/sigset/{cls.sig_name1}" os.makedirs(cls.sig_dir1) cls.sigdirs.append(cls.sig_dir1) open(f"{cls.sig_dir1}/sig", "a").close() cls.sig_name2 = tempname(10) cls.sig_dir2 = f"{cls.mpath}/signatures/sig/{cls.sig_name2}" os.makedirs(cls.sig_dir2) cls.sigdirs.append(cls.sig_dir2) open(f"{cls.sig_dir2}/sig", "a").close()
def prepare_horizon(elevation, step, bufferzone, maxdistance, resolution, prefix): g_region(raster="{}_elev".format(prefix), res=resolution) mset = Mapset() maps = mset.glist("raster") hors = [h for h in maps if "{}_horizon".format(prefix) in h] if len(hors) == int(360 / float(step)): pass else: r_horizon(elevation="{}_elev".format(prefix), step=step, bufferzone=bufferzone, maxdistance=maxdistance, output="{}_horizon".format(prefix), overwrite=True)
def unset_mask(): """Deactivate user mask""" if RasterRow("MASK", Mapset().name).exist(): grass.run_command( "g.copy", quiet=True, raster="MASK,MASK_{}".format(TEMPNAME), stderr=subprocess.DEVNULL, errors="ignore", ) grass.run_command( "g.remove", quiet=True, type="raster", name="MASK", stderr=subprocess.DEVNULL, flags="f", errors="ignore", )
def main(): mapset = Mapset() mapset.current() with open(options['output'], 'w') as fd: for vect in mapset.glist('vector', pattern='*MSK_CLOUDS'): items = vect.split('_') d = datetime.strptime(items[1], '%Y%m%dT%H%M%S') ## workaround dd = d + timedelta(seconds=1) Module('r.mask', vector=vect, flags='i') Module('g.rename', raster=['MASK', vect]) fd.write("{0}|{1}|{2}{3}".format(vect, d.strftime('%Y-%m-%d %H:%M:%S'), dd.strftime('%Y-%m-%d %H:%M:%S'), os.linesep)) return 0
def copy_mapset(mapset, path): """Copy mapset to another place without copying raster and vector data. :param mapset: a Mapset instance to copy :type mapset: Mapset object :param path: path where the new mapset must be copied :type path: str :returns: the instance of the new Mapset. >>> from grass.script.core import gisenv >>> mname = gisenv()['MAPSET'] >>> mset = Mapset() >>> mset.name == mname True >>> import tempfile as tmp >>> import os >>> path = os.path.join(tmp.gettempdir(), 'my_loc', 'my_mset') >>> copy_mapset(mset, path) # doctest: +ELLIPSIS Mapset(...) >>> sorted(os.listdir(path)) # doctest: +ELLIPSIS [...'PERMANENT'...] >>> sorted(os.listdir(os.path.join(path, 'PERMANENT'))) [u'DEFAULT_WIND', u'PROJ_EPSG', u'PROJ_INFO', u'PROJ_UNITS', u'VAR', u'WIND'] >>> sorted(os.listdir(os.path.join(path, mname))) # doctest: +ELLIPSIS [...u'SEARCH_PATH',...u'WIND'] >>> import shutil >>> shutil.rmtree(path) """ per_old = os.path.join(mapset.gisdbase, mapset.location, 'PERMANENT') per_new = os.path.join(path, 'PERMANENT') map_old = mapset.path() map_new = os.path.join(path, mapset.name) if not os.path.isdir(per_new): os.makedirs(per_new) if not os.path.isdir(map_new): os.mkdir(map_new) copy_special_mapset_files(per_old, per_new) copy_special_mapset_files(map_old, map_new) gisdbase, location = os.path.split(path) return Mapset(mapset.name, location, gisdbase)
def get_path(path): """Return the full path to the database; replacing environment variable with real values >>> path = '$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db' >>> new_path = get_path(path) >>> from grass.script.core import gisenv >>> import os >>> new_path2 = os.path.join(gisenv()['GISDBASE'], gisenv()['LOCATION_NAME'], ... gisenv()['MAPSET'], 'sqlite', 'sqlite.db') >>> new_path == new_path2 True """ if "$" not in path: return path else: mapset = Mapset() path = path.replace('$GISDBASE', mapset.gisdbase) path = path.replace('$LOCATION_NAME', mapset.location) path = path.replace('$MAPSET', mapset.name) return path
def main(): mapset = Mapset() mapset.current() # set mapset to current mapset with open(options['output_ras'], 'w') as fd: for rast in mapset.glist('raster'): # get all available raster data items = rast.split('_') d = datetime.strptime( items[0], '%Y%m%dT%H%M%S') # retrieve sensing date from file name # workaround to create timespan dd = d + timedelta(seconds=1) fd.write("{0}|{1}|{2}{3}".format( # write to timestamps text file rast, d.strftime('%Y-%m-%d %H:%M:%S'), dd.strftime('%Y-%m-%d %H:%M:%S'), os.linesep)) with open(options['output_vec'], 'w') as fd: for rast in mapset.glist( 'raster', pattern='*_B02.tif' ): # retrieve sensing date only for one band per date items = rast.split('_') d = datetime.strptime(items[0], '%Y%m%dT%H%M%S') # workaround to create timespan dd = d + timedelta(seconds=1) vect = 'cloudmask_{}_mergedvector'.format( items[0]) # pattern of cloud mask file names Module('r.mask', vector=vect, overwrite=True) # create a mask out of cloud vector file Module('g.remove', flags='f', type='vector', name=vect) # remove original vector data Module('g.rename', raster=['MASK', vect]) # rename mask to vector file name fd.write("{0}|{1}|{2}{3}".format( # write to timestamps text file vect, d.strftime('%Y-%m-%d %H:%M:%S'), dd.strftime('%Y-%m-%d %H:%M:%S'), os.linesep)) return 0
def main(): in_vector = options["input"].split("@")[0] if len(options["input"].split("@")) > 1: in_mapset = options["input"].split("@")[1] else: in_mapset = None raster_maps = options["raster"].split( ",") # raster file(s) to extract from output = options["output"] methods = tuple(options["methods"].split(",")) percentile = (None if options["percentile"] == "" else map( float, options["percentile"].split(","))) column_prefix = tuple(options["column_prefix"].split(",")) buffers = options["buffers"].split(",") types = options["type"].split(",") layer = options["layer"] sep = options["separator"] update = flags["u"] tabulate = flags["t"] percent = flags["p"] remove = flags["r"] use_label = flags["l"] empty_buffer_warning = ( "No data in raster map {} within buffer {} around geometry {}") # Do checks using pygrass for rmap in raster_maps: r_map = RasterAbstractBase(rmap) if not r_map.exist(): grass.fatal("Could not find raster map {}.".format(rmap)) user_mask = False m_map = RasterAbstractBase("MASK", Mapset().name) if m_map.exist(): grass.warning("Current MASK is temporarily renamed.") user_mask = True unset_mask() invect = VectorTopo(in_vector) if not invect.exist(): grass.fatal("Vector file {} does not exist".format(in_vector)) if output: if output == "-": out = None else: out = open(output, "w") # Check if input map is in current mapset (and thus editable) if in_mapset and unicode(in_mapset) != unicode(Mapset()): grass.fatal( "Input vector map is not in current mapset and cannot be modified. \ Please consider copying it to current mapset.".format( output)) buffers = [] for buf in options["buffers"].split(","): try: b = float(buf) if b.is_integer(): buffers.append(int(b)) else: buffers.append(b) except: grass.fatal("") if b < 0: grass.fatal("Negative buffer distance not supported!") ### Define column types depenting on statistic, map type and ### DB backend (SQLite supports only double and not real) # int: statistic produces allways integer precision # double: statistic produces allways floating point precision # map_type: precision f statistic depends on map type int_dict = { "number": (0, "int", "n"), "number_null": (1, "int", "null_cells"), "minimum": (3, "map_type", "min"), "maximum": (4, "map_type", "max"), "range": (5, "map_type", "range"), "average": (6, "double", "mean"), "average_abs": (7, "double", "mean_of_abs"), "stddev": (8, "double", "stddev"), "variance": (9, "double", "variance"), "coeff_var": (10, "double", "coeff_var"), "sum": (11, "map_type", "sum"), "first_quartile": (12, "map_type", "first_quartile"), "median": (13, "map_type", "median"), "third_quartile": (14, "map_type", "third_quartile"), "percentile": (15, "map_type", "percentile"), } if len(raster_maps) != len(column_prefix): grass.fatal( "Number of maps and number of column prefixes has to be equal!") # Generate list of required column names and types col_names = [] valid_labels = [] col_types = [] for p in column_prefix: rmaptype, val_lab, rcats = raster_type( raster_maps[column_prefix.index(p)], tabulate, use_label) valid_labels.append(val_lab) for b in buffers: b_str = str(b).replace(".", "_") if tabulate: if rmaptype == "double precision": grass.fatal( "{} has floating point precision. Can only tabulate integer maps" .format(raster_maps[column_prefix.index(p)])) col_names.append("{}_{}_b{}".format(p, "ncats", b_str)) col_types.append("int") col_names.append("{}_{}_b{}".format(p, "mode", b_str)) col_types.append("int") col_names.append("{}_{}_b{}".format(p, "null", b_str)) col_types.append("double precision") col_names.append("{}_{}_b{}".format(p, "area_tot", b_str)) col_types.append("double precision") for rcat in rcats: if use_label and valid_labels: rcat = rcat[0].replace(" ", "_") else: rcat = rcat[1] col_names.append("{}_{}_b{}".format(p, rcat, b_str)) col_types.append("double precision") else: for m in methods: col_names.append("{}_{}_b{}".format( p, int_dict[m][2], b_str)) col_types.append(rmaptype if int_dict[m][1] == "map_type" else int_dict[m][1]) if percentile: for perc in percentile: col_names.append("{}_percentile_{}_b{}".format( p, int(perc) if (perc).is_integer() else perc, b_str)) col_types.append(rmaptype if int_dict[m][1] == "map_type" else int_dict[m][1]) # Open input vector map in_vect = VectorTopo(in_vector, layer=layer) in_vect.open(mode="r") # Get name for temporary map global TMP_MAPS TMP_MAPS.append(tmp_map) # Setup stats collectors if tabulate: # Collector for raster category statistics stats = Module("r.stats", run_=False, stdout_=PIPE) stats.inputs.sort = "desc" stats.inputs.null_value = "null" stats.flags.quiet = True stats.flags.l = True if percent: stats.flags.p = True stats.flags.n = True else: stats.flags.a = True else: # Collector for univariat statistics univar = Module("r.univar", run_=False, stdout_=PIPE) univar.inputs.separator = sep univar.flags.g = True univar.flags.quiet = True # Add extended statistics if requested if set(methods).intersection( set(["first_quartile", "median", "third_quartile"])): univar.flags.e = True if percentile is not None: univar.flags.e = True univar.inputs.percentile = percentile # Check if attribute table exists if not output: if not in_vect.table: grass.fatal( "No attribute table found for vector map {}".format(in_vect)) # Modify table as needed tab = in_vect.table tab_name = tab.name tab_cols = tab.columns # Add required columns existing_cols = list(set(tab_cols.names()).intersection(col_names)) if len(existing_cols) > 0: if not update: in_vect.close() grass.fatal( "Column(s) {} already exist! Please use the u-flag \ if you want to update values in those columns". format(",".join(existing_cols))) else: grass.warning("Column(s) {} already exist!".format( ",".join(existing_cols))) for e in existing_cols: idx = col_names.index(e) del col_names[idx] del col_types[idx] tab_cols.add(col_names, col_types) conn = tab.conn cur = conn.cursor() sql_str_start = "UPDATE {} SET ".format(tab_name) elif output == "-": print("cat{0}raster_map{0}buffer{0}statistic{0}value".format(sep)) else: out.write("cat{0}raster_map{0}buffer{0}statistic{0}value{1}".format( sep, os.linesep)) # Get computational region grass.use_temp_region() r = Region() r.read() # Adjust region extent to buffer around geometry # reg = deepcopy(r) # Create iterator for geometries of all selected types geoms = chain() geoms_n = 0 n_geom = 1 for geom_type in types: geoms_n += in_vect.number_of(geom_type) if in_vect.number_of(geom_type) > 0: geoms = chain(in_vect.viter(geom_type)) # Loop over geometries for geom in geoms: # Get cat cat = geom.cat # Add where clause to UPDATE statement sql_str_end = " WHERE cat = {};".format(cat) # Loop over ser provided buffer distances for buf in buffers: b_str = str(buf).replace(".", "_") # Buffer geometry if buf <= 0: buffer_geom = geom else: buffer_geom = geom.buffer(buf) # Create temporary vector map with buffered geometry tmp_vect = VectorTopo(tmp_map, quiet=True) tmp_vect.open(mode="w") tmp_vect.write(Boundary(points=buffer_geom[0].to_list())) # , c_cats=int(cat), set_cats=True if callable(buffer_geom[1]): tmp_vect.write(Centroid(x=buffer_geom[1]().x, y=buffer_geom[1]().y), cat=int(cat)) else: tmp_vect.write(Centroid(x=buffer_geom[1].x, y=buffer_geom[1].y), cat=int(cat)) ################################################# # How to silence VectorTopo??? ################################################# # Save current stdout # original = sys.stdout # f = open(os.devnull, 'w') # with open('output.txt', 'w') as f: # sys.stdout = io.BytesIO() # sys.stdout.fileno() = os.devnull # sys.stderr = f # os.environ.update(dict(GRASS_VERBOSE='0')) tmp_vect.close(build=False) grass.run_command("v.build", map=tmp_map, quiet=True) # os.environ.update(dict(GRASS_VERBOSE='1')) # reg = Region() # reg.read() # r.from_vect(tmp_map) r = align_current(r, buffer_geom[0].bbox()) r.write() # Check if the following is needed # needed specially with r.stats -p # grass.run_command('g.region', vector=tmp_map, flags='a') # Create a MASK from buffered geometry if user_mask: grass.run_command( "v.to.rast", input=tmp_map, output=tmp_map, use="val", value=int(cat), quiet=True, ) mc_expression = ( "MASK=if(!isnull({0}) && !isnull({0}_MASK), {1}, null())". format(tmp_map, cat)) grass.run_command("r.mapcalc", expression=mc_expression, quiet=True) else: grass.run_command( "v.to.rast", input=tmp_map, output="MASK", use="val", value=int(cat), quiet=True, ) # reg.write() updates = [] # Compute statistics for every raster map for rm, rmap in enumerate(raster_maps): # rmap = raster_maps[rm] prefix = column_prefix[rm] if tabulate: # Get statistics on occurrence of raster categories within buffer stats.inputs.input = rmap stats.run() t_stats = (stats.outputs["stdout"].value.rstrip( os.linesep).replace(" ", " ").replace( "no data", "no_data").replace( " ", "_b{} = ".format(b_str)).split(os.linesep)) if t_stats == [""]: grass.warning( empty_buffer_warning.format(rmap, buf, cat)) continue if (t_stats[0].split( "_b{} = ".format(b_str))[0].split("_")[-1] != "null"): mode = (t_stats[0].split( "_b{} = ".format(b_str))[0].split("_")[-1]) elif len(t_stats) == 1: mode = "NULL" else: mode = (t_stats[1].split( "_b{} = ".format(b_str))[0].split("_")[-1]) if not output: updates.append("\t{}_{}_b{} = {}".format( prefix, "ncats", b_str, len(t_stats))) updates.append("\t{}_{}_b{} = {}".format( prefix, "mode", b_str, mode)) area_tot = 0 for l in t_stats: # check if raster maps has category or not if len(l.split("=")) == 2: updates.append("\t{}_{}".format( prefix, l.rstrip("%"))) elif not l.startswith("null"): vals = l.split("=") updates.append("\t{}_{} = {}".format( prefix, vals[-2].strip() if valid_labels[rm] else vals[0].strip(), vals[-1].strip().rstrip("%"), )) if not l.startswith("null"): area_tot += float( l.rstrip("%").split("= ")[-1]) if not percent: updates.append("\t{}_{}_b{} = {}".format( prefix, "area_tot", b_str, area_tot)) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "ncats", len(t_stats), os.linesep) out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "mode", mode, os.linesep) area_tot = 0 for l in t_stats: rcat = (l.split("= ")[1].rstrip( "_b{} = ".format(b_str)) if valid_labels[rm] else l.split("_")[0]) area = l.split("= ")[-1] out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "area {}".format(rcat), area, os.linesep, ) if rcat != "null": area_tot = area_tot + float( l.rstrip("%").split("= ")[-1]) if not percent: out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "area total", area_tot, os.linesep, ) if output == "-": print(out_str.rstrip(os.linesep)) else: out.write(out_str) else: # Get univariate statistics within buffer univar.inputs.map = rmap univar.run() u_stats = (univar.outputs["stdout"].value.rstrip( os.linesep).replace( "=", "_b{} = ".format(b_str)).split(os.linesep)) # Test if u_stats is empty and give warning # Needs to be adjusted to number of requested stats? if ((percentile and len(u_stats) < 14) or (univar.flags.e and len(u_stats) < 13) or len(u_stats) < 12): grass.warning( empty_buffer_warning.format(rmap, buf, cat)) break # Extract statistics for selected methods for m in methods: if not output: # Add to list of UPDATE statements updates.append("\t{}_{}".format( prefix, u_stats[int_dict[m][0]] if is_number( u_stats[int_dict[m][0]].split(" = ")[1]) else " = ".join([ u_stats[int_dict[m][0]].split(" = ")[0], "NULL", ]), )) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}".format( sep, cat, prefix, buf, m, u_stats[int_dict[m][0]].split("= ")[1], ) if output == "-": print(out_str) else: out.write("{}{}".format(out_str, os.linesep)) if percentile: perc_count = 0 for perc in percentile: if not output: updates.append( "{}_percentile_{}_b{} = {}".format( p, int(perc) if (perc).is_integer() else perc, b_str, u_stats[15 + perc_count].split("= ")[1], )) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}".format( sep, cat, prefix, buf, "percentile_{}".format( int(perc) if ( perc).is_integer() else perc), u_stats[15 + perc_count].split("= ")[1], ) if output == "-": print(out_str) else: out.write(out_str) perc_count = perc_count + 1 if not output and len(updates) > 0: cur.execute("{}{}{}".format(sql_str_start, ",\n".join(updates), sql_str_end)) # Remove temporary maps # , stderr=os.devnull, stdout_=os.devnull) grass.run_command("g.remove", flags="f", type="raster", name="MASK", quiet=True) grass.run_command("g.remove", flags="f", type="vector", name=tmp_map, quiet=True) # Give progress information grass.percent(n_geom, geoms_n, 1) n_geom = n_geom + 1 if not output: conn.commit() # Close cursor and DB connection if not output and not output == "-": cur.close() conn.close() # Update history grass.vector.vector_history(in_vector) elif output != "-": # write results to file out.close() if remove and not output: dropcols = [] selectnum = "select count({}) from {}" for i in col_names: thisrow = grass.read_command("db.select", flags="c", sql=selectnum.format(i, in_vector)) if int(thisrow) == 0: dropcols.append(i) grass.debug("Columns to delete: {}".format(", ".join(dropcols)), debug=2) if dropcols: grass.run_command("v.db.dropcolumn", map=in_vector, columns=dropcols)
def unset_mask(): """Deactivate user mask""" if RasterRow("MASK", Mapset().name).exist(): grass.run_command("g.rename", quiet=True, raster="MASK,{}_MASK".format(tmp_map))