def __init__(self, cmd, width=None, height=None, overlap=0, processes=None, split=False, debug=False, region=None, move=None, log=False, start_row=0, start_col=0, out_prefix='', *args, **kargs): kargs['run_'] = False self.mset = Mapset() self.module = Module(cmd, *args, **kargs) self.width = width self.height = height self.overlap = overlap self.processes = processes self.region = region if region else Region() self.start_row = start_row self.start_col = start_col self.out_prefix = out_prefix self.log = log self.move = move self.gisrc_src = os.environ['GISRC'] self.n_mset, self.gisrc_dst = None, None if self.move: self.n_mset = copy_mapset(self.mset, self.move) self.gisrc_dst = write_gisrc(self.n_mset.gisdbase, self.n_mset.location, self.n_mset.name) rasters = [r for r in select(self.module.inputs, 'raster')] if rasters: copy_rasters(rasters, self.gisrc_src, self.gisrc_dst, region=self.region) vectors = [v for v in select(self.module.inputs, 'vector')] if vectors: copy_vectors(vectors, self.gisrc_src, self.gisrc_dst) groups = [g for g in select(self.module.inputs, 'group')] if groups: copy_groups(groups, self.gisrc_src, self.gisrc_dst, region=self.region) self.bboxes = split_region_tiles(region=region, width=width, height=height, overlap=overlap) self.msetstr = cmd.replace('.', '') + "_%03d_%03d" self.inlist = None if split: self.split() self.debug = debug
def find_in_location(type, pattern, location): res = [] for msetname in location.mapsets(): mset = Mapset(msetname, location.name, location.gisdbase) res.extend([(m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern)]) return res
def main(): mapset = Mapset() mapset.current() with open(options['output'], 'w') as fd: for rast in mapset.glist('raster', pattern='*_B04_10m'): items = rast.split('_') d = datetime.strptime(items[2], '%Y%m%dT%H%M%S') ## workaround dd = d + timedelta(seconds=1) vect = '{}_{}_MSK_CLOUDS'.format(items[1], items[2]) mask_vect = '{}_{}'.format(vect, options['map'].split('@')[0]) if Vector(vect).exist(): Module('v.overlay', ainput=options['map'], binput=vect, operator='not', output=mask_vect) else: copy(options['map'], mask_vect, 'vector') Module('r.mask', vector=mask_vect, overwrite=True) Module('g.remove', flags='f', type='vector', name=mask_vect) Module('g.rename', raster=['MASK', mask_vect]) fd.write("{0}|{1}|{2}{3}".format(mask_vect, d.strftime('%Y-%m-%d %H:%M:%S'), dd.strftime('%Y-%m-%d %H:%M:%S'), os.linesep)) return 0
def setUpClass(cls): cls.org_mapset = Mapset() cls.tmp_mapset_name = tempname(10) make_mapset(mapset=cls.tmp_mapset_name) cls.tmp_mapset = Mapset(mapset=cls.tmp_mapset_name) cls.tmp_mapset.current() cls.tmp_mapset_path = cls.tmp_mapset.path()
def main(): """ Delete all raster files which the name begin by "vshed" """ gscript.run_command('g.region', flags='p') m = Mapset() liste_viewshed = m.glist('raster', pattern='vshed*') print(liste_viewshed) for viewshed_layer in liste_viewshed: run_command("g.remove", flags="f", type="raster", name=viewshed_layer)
def in_mapset(m, dtype, pattern=None): """Checks if a map with the same name already exists in the mapset""" mset = Mapset() maps = mset.glist(dtype, pattern=pattern) if m in maps: return True else: return False
def cleanup(): """Restore old mask if it existed""" if RasterRow("{}_MASK".format(TMP_NAME), Mapset().name).exist(): gscript.verbose("Restoring old mask...") if RasterRow("MASK", Mapset().name).exist(): gscript.run_command("r.mask", flags="r") gscript.run_command( "g.rename", rast="{}_MASK,MASK".format(TMP_NAME), quiet=True ) if RasterRow("MASK", Mapset().name).exist(): gscript.run_command("r.mask", flags="r")
def setUpClass(cls): cls.map = tempname(10) cls.mapset = Mapset().name cls.bandref = "The_Doors" cls.use_temp_region() cls.runModule("g.region", n=1, s=0, e=1, w=0, res=1) cls.runModule("r.mapcalc", expression="{} = 1".format(cls.map))
def get_path(path, vect_name=None): """Return the full path to the database; replacing environment variable with real values :param path: The path with substitutional parameter :param vect_name: The name of the vector map >>> from grass.script.core import gisenv >>> import os >>> path = '$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db' >>> new_path = get_path(path) >>> new_path2 = os.path.join(gisenv()['GISDBASE'], gisenv()['LOCATION_NAME'], ... gisenv()['MAPSET'], 'sqlite', 'sqlite.db') >>> new_path.replace("//","/") == new_path2.replace("//","/") True >>> path = '$GISDBASE/$LOCATION_NAME/$MAPSET/vector/$MAP/sqlite.db' >>> new_path = get_path(path, "test") >>> new_path2 = os.path.join(gisenv()['GISDBASE'], gisenv()['LOCATION_NAME'], ... gisenv()['MAPSET'], 'vector', 'test', 'sqlite.db') >>> new_path.replace("//","/") == new_path2.replace("//","/") True """ if "$" not in path: return path else: mapset = Mapset() path = path.replace('$GISDBASE', mapset.gisdbase) path = path.replace('$LOCATION_NAME', mapset.location) path = path.replace('$MAPSET', mapset.name) if vect_name is not None: path = path.replace('$MAP', vect_name) return path
def setUpClass(cls): cls.mpath = utils.decode(G_mapset_path()) cls.mapset_name = Mapset().name cls.sigdirs = [] # As signatures are created directly not via signature creation # tools, we must ensure signature directories exist os.makedirs(f"{cls.mpath}/signatures/sig/", exist_ok=True) os.makedirs(f"{cls.mpath}/signatures/sigset/", exist_ok=True) # A mapset with a random name cls.src_mapset_name = tempname(10) G_make_mapset(None, None, cls.src_mapset_name) cls.src_mapset_path = (cls.mpath.rsplit("/", maxsplit=1)[0] + "/" + cls.src_mapset_name) # Create fake signature files os.makedirs(f"{cls.src_mapset_path}/signatures/sig/") cls.src_sig = tempname(10) cls.src_sig_dir = f"{cls.src_mapset_path}/signatures/sig/{cls.src_sig}" os.makedirs(cls.src_sig_dir) cls.sigdirs.append(cls.src_sig_dir) f = open(f"{cls.src_sig_dir}/sig", "w") f.write("A sig file") f.close() os.makedirs(f"{cls.src_mapset_path}/signatures/sigset/") cls.src_sigset = tempname(10) cls.src_sigset_dir = f"{cls.src_mapset_path}/signatures/sigset/{cls.src_sigset}" os.makedirs(cls.src_sigset_dir) cls.sigdirs.append(cls.src_sigset_dir) f = open(f"{cls.src_sigset_dir}/sig", "w") f.write("A sigset file") f.close()
def setUpClass(cls): cls.mpath = utils.decode(G_mapset_path()) cls.mapset_name = Mapset().name cls.sigdirs = [] # As signatures are created directly not via signature creation # tools, we must ensure signature directories exist os.makedirs(f"{cls.mpath}/signatures/sig/", exist_ok=True) os.makedirs(f"{cls.mpath}/signatures/sigset/", exist_ok=True)
def main(): mapset = Mapset() mapset.current() with open(options['output'], 'w') as fd: for rast in mapset.glist('raster'): items = rast.split('_') d = datetime.strptime(items[2], '%Y%m%dT%H%M%S') #fd.write("{0}|{1}{2}".format(rast, iso_date, os.linesep)) ## workaround dd = d + timedelta(seconds=1) fd.write("{0}|{1}|{2}{3}".format(rast, d.strftime('%Y-%m-%d %H:%M:%S'), dd.strftime('%Y-%m-%d %H:%M:%S'), os.linesep)) return 0
def findmaps(type, pattern=None, mapset='', location='', gisdbase=''): """Return a list of tuple contining the names of the: * map * mapset, * location, * gisdbase """ from grass.pygrass.gis import Gisdbase, Location, Mapset def find_in_location(type, pattern, location): res = [] for msetname in location.mapsets(): mset = Mapset(msetname, location.name, location.gisdbase) res.extend([(m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern)]) return res def find_in_gisdbase(type, pattern, gisdbase): res = [] for loc in gisdbase.locations(): res.extend(find_in_location(type, pattern, Location(loc, gisdbase.name))) return res if gisdbase and location and mapset: mset = Mapset(mapset, location, gisdbase) return [(m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern)] elif gisdbase and location: loc = Location(location, gisdbase) return find_in_location(type, pattern, loc) elif gisdbase: gis = Gisdbase(gisdbase) return find_in_gisdbase(type, pattern, gis) elif location: loc = Location(location) return find_in_location(type, pattern, loc) elif mapset: mset = Mapset(mapset) return [(m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern)] else: gis = Gisdbase() return find_in_gisdbase(type, pattern, gis)
def main(): m1 = Mapset() #Getting data tree liste_viewshed = m1.glist('raster', pattern='vshed*') #Getting all viewshed created for map in liste_viewshed: output_string = options["output"] + "\\" + map + ".tif" run_command("r.out.gdal", input = map, output = output_string, format = "GTiff", overviews = 0) return 0
def main(): mapset = Mapset() mapset.current() with open(options['output'], 'w') as fd: for vect in mapset.glist('vector', pattern='*MSK_CLOUDS'): items = vect.split('_') d = datetime.strptime(items[1], '%Y%m%dT%H%M%S') ## workaround dd = d + timedelta(seconds=1) Module('r.mask', vector=vect, flags='i') Module('g.rename', raster=['MASK', vect]) fd.write("{0}|{1}|{2}{3}".format(vect, d.strftime('%Y-%m-%d %H:%M:%S'), dd.strftime('%Y-%m-%d %H:%M:%S'), os.linesep)) return 0
def prepare_horizon(elevation, step, bufferzone, maxdistance, resolution, prefix): g_region(raster="{}_elev".format(prefix), res=resolution) mset = Mapset() maps = mset.glist("raster") hors = [h for h in maps if "{}_horizon".format(prefix) in h] if len(hors) == int(360 / float(step)): pass else: r_horizon(elevation="{}_elev".format(prefix), step=step, bufferzone=bufferzone, maxdistance=maxdistance, output="{}_horizon".format(prefix), overwrite=True)
def set_default(self): """Set the Region object to the default GRASS region. It works only in PERMANENT mapset""" from grass.pygrass.gis import Mapset mapset = Mapset() if mapset.name != 'PERMANENT': raise GrassError("ERROR: Unable to change default region. The " \ "current mapset is not <PERMANENT>.") self.adjust() if libgis.G_put_window(self.c_region) < 0: raise GrassError("Cannot change region (DEFAUL_WIND file).")
def get_mapset(gisrc_src, gisrc_dst): """Get mapset from a GISRC source to a GISRC destination. :param gisrc_src: path to the GISRC source :type gisrc_src: str :param gisrc_dst: path to the GISRC destination :type gisrc_dst: str :returns: a tuple with Mapset(src), Mapset(dst) """ msrc, lsrc, gsrc = read_gisrc(gisrc_src) mdst, ldst, gdst = read_gisrc(gisrc_dst) path_src = os.path.join(gsrc, lsrc, msrc) path_dst = os.path.join(gdst, ldst, mdst) if not os.path.isdir(path_dst): os.makedirs(path_dst) copy_special_mapset_files(path_src, path_dst) src = Mapset(msrc, lsrc, gsrc) dst = Mapset(mdst, ldst, gdst) visible = [m for m in src.visible] visible.append(src.name) dst.visible.extend(visible) return src, dst
def cleanup(): """Remove temporary data """ #remove temporary region file grass.del_temp_region() try: grass.run_command('g.remove', flags='f', name=TMP_MAPS, quiet=True, type=['vector', 'raster'], stderr=os.devnull, stdout_=os.devnull) except: pass if RasterRow("MASK", Mapset().name).exist(): grass.run_command("r.mask", flags="r", quiet=True) reset_mask()
def setUpClass(cls): cls.libc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("c")) cls.mapset = Mapset().name cls.map1 = tempname(10) cls.semantic_label1 = "The_Doors" cls.map2 = tempname(10) cls.semantic_label2 = "The_Who" cls.map3 = tempname(10) cls.use_temp_region() cls.runModule("g.region", n=1, s=0, e=1, w=0, res=1) cls.runModule("r.mapcalc", expression=f"{cls.map1} = 1") cls.runModule("r.mapcalc", expression=f"{cls.map2} = 1") cls.runModule("r.mapcalc", expression=f"{cls.map3} = 1") Rast_write_semantic_label(cls.map1, cls.semantic_label1) Rast_write_semantic_label(cls.map2, cls.semantic_label2)
def setUpClass(cls): cls.list_ptr = ctypes.POINTER(ctypes.c_char_p) cls.mpath = utils.decode(G_mapset_path()) cls.mapset_name = Mapset().name cls.sigdirs = [] # As signatures are created directly not via signature creation # tools, we must ensure signature directories exist os.makedirs(f"{cls.mpath}/signatures/sig/", exist_ok=True) os.makedirs(f"{cls.mpath}/signatures/sigset/", exist_ok=True) # A mapset with a random name cls.rnd_mapset_name = tempname(10) G_make_mapset(None, None, cls.rnd_mapset_name) cls.rnd_mapset_path = (cls.mpath.rsplit("/", maxsplit=1)[0] + "/" + cls.rnd_mapset_name) os.makedirs(f"{cls.rnd_mapset_path}/signatures/sig/") os.makedirs(f"{cls.rnd_mapset_path}/signatures/sigset/")
def clean_location(self, location=None): """Remove all created mapsets. :param location: a Location instance where we are running the analysis :type location: Location object """ if location is None: if self.n_mset: self.n_mset.current() location = Location() mapsets = location.mapsets(self.msetstr.split('_')[0] + '_*') for mset in mapsets: Mapset(mset).delete() if self.n_mset and self.n_mset.is_current(): self.mset.current()
def __init__(self, cmd, width=None, height=None, overlap=0, processes=None, split=False, debug=False, region=None, move=None, log=False, start_row=0, start_col=0, out_prefix='', mapset_prefix=None, *args, **kargs): kargs['run_'] = False self.mset = Mapset() self.module = Module(cmd, *args, **kargs) self.width = width self.height = height self.overlap = overlap self.processes = processes self.region = region if region else Region() self.start_row = start_row self.start_col = start_col self.out_prefix = out_prefix self.log = log self.move = move self.gisrc_src = os.environ['GISRC'] self.n_mset, self.gisrc_dst = None, None if self.move: self.n_mset = copy_mapset(self.mset, self.move) self.gisrc_dst = write_gisrc(self.n_mset.gisdbase, self.n_mset.location, self.n_mset.name) rasters = [r for r in select(self.module.inputs, 'raster')] if rasters: copy_rasters(rasters, self.gisrc_src, self.gisrc_dst, region=self.region) vectors = [v for v in select(self.module.inputs, 'vector')] if vectors: copy_vectors(vectors, self.gisrc_src, self.gisrc_dst) groups = [g for g in select(self.module.inputs, 'group')] if groups: copy_groups(groups, self.gisrc_src, self.gisrc_dst, region=self.region) self.bboxes = split_region_tiles(region=region, width=width, height=height, overlap=overlap) if mapset_prefix: self.msetstr = mapset_prefix + "_%03d_%03d" else: self.msetstr = cmd.replace('.', '') + "_%03d_%03d" self.inlist = None if split: self.split() self.debug = debug
def cleanup(): """Remove raster and vector maps stored in a list""" grass.run_command( "g.remove", flags="f", type="raster,vector", pattern="{}_*".format(TEMPNAME), quiet=True, stderr=subprocess.PIPE, ) # Reset mask if user MASK was present if (RasterRow("MASK", Mapset().name).exist() and RasterRow("MASK_{}".format(TEMPNAME)).exist()): grass.run_command("r.mask", flags="r", quiet=True) reset_mask()
def findmaps(type, pattern=None, mapset="", location="", gisdbase=""): """Return a list of tuple contining the names of the: * map * mapset, * location, * gisdbase """ from grass.pygrass.gis import Gisdbase, Location, Mapset def find_in_location(type, pattern, location): res = [] for msetname in location.mapsets(): mset = Mapset(msetname, location.name, location.gisdbase) res.extend( [ (m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern) ] ) return res def find_in_gisdbase(type, pattern, gisdbase): res = [] for loc in gisdbase.locations(): res.extend(find_in_location(type, pattern, Location(loc, gisdbase.name))) return res if gisdbase and location and mapset: mset = Mapset(mapset, location, gisdbase) return [ (m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern) ] elif gisdbase and location: loc = Location(location, gisdbase) return find_in_location(type, pattern, loc) elif gisdbase: gis = Gisdbase(gisdbase) return find_in_gisdbase(type, pattern, gis) elif location: loc = Location(location) return find_in_location(type, pattern, loc) elif mapset: mset = Mapset(mapset) return [ (m, mset.name, mset.location, mset.gisdbase) for m in mset.glist(type, pattern) ] else: gis = Gisdbase() return find_in_gisdbase(type, pattern, gis)
def setUpClass(cls): cls.mpath = utils.decode(G_mapset_path()) cls.mapset_name = Mapset().name cls.sigdirs = [] # As signatures are created directly not via signature creation # tools, we must ensure signature directories exist os.makedirs(f"{cls.mpath}/signatures/sig/", exist_ok=True) os.makedirs(f"{cls.mpath}/signatures/sigset/", exist_ok=True) cls.sig_name1 = tempname(10) cls.sig_dir1 = f"{cls.mpath}/signatures/sigset/{cls.sig_name1}" os.makedirs(cls.sig_dir1) cls.sigdirs.append(cls.sig_dir1) open(f"{cls.sig_dir1}/sig", "a").close() cls.sig_name2 = tempname(10) cls.sig_dir2 = f"{cls.mpath}/signatures/sig/{cls.sig_name2}" os.makedirs(cls.sig_dir2) cls.sigdirs.append(cls.sig_dir2) open(f"{cls.sig_dir2}/sig", "a").close()
def unset_mask(): """Deactivate user mask""" if RasterRow("MASK", Mapset().name).exist(): grass.run_command( "g.copy", quiet=True, raster="MASK,MASK_{}".format(TEMPNAME), stderr=subprocess.DEVNULL, errors="ignore", ) grass.run_command( "g.remove", quiet=True, type="raster", name="MASK", stderr=subprocess.DEVNULL, flags="f", errors="ignore", )
def copy_mapset(mapset, path): """Copy mapset to another place without copying raster and vector data. :param mapset: a Mapset instance to copy :type mapset: Mapset object :param path: path where the new mapset must be copied :type path: str :returns: the instance of the new Mapset. >>> from grass.script.core import gisenv >>> mname = gisenv()['MAPSET'] >>> mset = Mapset() >>> mset.name == mname True >>> import tempfile as tmp >>> import os >>> path = os.path.join(tmp.gettempdir(), 'my_loc', 'my_mset') >>> copy_mapset(mset, path) # doctest: +ELLIPSIS Mapset(...) >>> sorted(os.listdir(path)) # doctest: +ELLIPSIS [...'PERMANENT'...] >>> sorted(os.listdir(os.path.join(path, 'PERMANENT'))) [u'DEFAULT_WIND', u'PROJ_EPSG', u'PROJ_INFO', u'PROJ_UNITS', u'VAR', u'WIND'] >>> sorted(os.listdir(os.path.join(path, mname))) # doctest: +ELLIPSIS [...u'SEARCH_PATH',...u'WIND'] >>> import shutil >>> shutil.rmtree(path) """ per_old = os.path.join(mapset.gisdbase, mapset.location, 'PERMANENT') per_new = os.path.join(path, 'PERMANENT') map_old = mapset.path() map_new = os.path.join(path, mapset.name) if not os.path.isdir(per_new): os.makedirs(per_new) if not os.path.isdir(map_new): os.mkdir(map_new) copy_special_mapset_files(per_old, per_new) copy_special_mapset_files(map_old, map_new) gisdbase, location = os.path.split(path) return Mapset(mapset.name, location, gisdbase)
def get_path(path): """Return the full path to the database; replacing environment variable with real values >>> path = '$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db' >>> new_path = get_path(path) >>> from grass.script.core import gisenv >>> import os >>> new_path2 = os.path.join(gisenv()['GISDBASE'], gisenv()['LOCATION_NAME'], ... gisenv()['MAPSET'], 'sqlite', 'sqlite.db') >>> new_path == new_path2 True """ if "$" not in path: return path else: mapset = Mapset() path = path.replace('$GISDBASE', mapset.gisdbase) path = path.replace('$LOCATION_NAME', mapset.location) path = path.replace('$MAPSET', mapset.name) return path
def main(): mapset = Mapset() mapset.current() # set mapset to current mapset with open(options['output_ras'], 'w') as fd: for rast in mapset.glist('raster'): # get all available raster data items = rast.split('_') d = datetime.strptime( items[0], '%Y%m%dT%H%M%S') # retrieve sensing date from file name # workaround to create timespan dd = d + timedelta(seconds=1) fd.write("{0}|{1}|{2}{3}".format( # write to timestamps text file rast, d.strftime('%Y-%m-%d %H:%M:%S'), dd.strftime('%Y-%m-%d %H:%M:%S'), os.linesep)) with open(options['output_vec'], 'w') as fd: for rast in mapset.glist( 'raster', pattern='*_B02.tif' ): # retrieve sensing date only for one band per date items = rast.split('_') d = datetime.strptime(items[0], '%Y%m%dT%H%M%S') # workaround to create timespan dd = d + timedelta(seconds=1) vect = 'cloudmask_{}_mergedvector'.format( items[0]) # pattern of cloud mask file names Module('r.mask', vector=vect, overwrite=True) # create a mask out of cloud vector file Module('g.remove', flags='f', type='vector', name=vect) # remove original vector data Module('g.rename', raster=['MASK', vect]) # rename mask to vector file name fd.write("{0}|{1}|{2}{3}".format( # write to timestamps text file vect, d.strftime('%Y-%m-%d %H:%M:%S'), dd.strftime('%Y-%m-%d %H:%M:%S'), os.linesep)) return 0
from mower import GrassSession DEM = "/home/mperry/projects/shortcreek/dem/dem.img" with GrassSession(DEM) as gs: from grass.pygrass.modules.shortcuts import raster # Import/Link to External GDAL data raster.external(input=DEM, output="dem") # Perform calculations raster.mapcalc(expression="demft=dem*3.28084") raster.slope_aspect(elevation="demft", slope="slope", aspect="aspect") # Export from GRASS to GDAL from grass.pygrass.gis import Mapset m = Mapset() for r in m.glist('rast'): if r == "dem": # don't save the original continue raster.out_gdal(r, format="GTiff", output="/tmp/{}.tif".format(r), overwrite=True)
def add_color_table(self, cbrew=False, color=None, column=None, attr=None, layer='1', nk=5, s=None, cramp=('Diverging','RdYlBu'), method='Equal_Interval', invert_ramp=False, *args, **kwargs): """ Add/replace color table Keyword Description ---------- ---------------------------------------------------------------------- nk number of classes column layer column to color classify, If s is not provided, column is needed. Default=None s pandas series or numpy array. If column is not provided, s is needed. Default=None ccramp Boolean. If true, color scheme and color ramp use Color Brewer (http://colorbrewer2.org/) method pysal.mapclassify methods. Options: Map_Classifier, Box_Plot, Equal_Interval, Fisher_Jenks, Jenks_Caspall, Jenks_Caspall_Forced, Jenks_Caspall_Sampled, Max_P_Classifier, Maximum_Breaks, Natural_Breaks, Quantiles, Percentiles, Std_Mean, User_Defined. Default User_Defined. See PySal documentation for valid parameters in each case (http://pysal.org/1.2/library/esda/mapclassify.html). invert_ramp Boolean. Default False. layer Grass layer where rgb column will be added *args Additional arguments for v.colors **kwargs Additional arguments for rgb_classify. For example, if method='User_Defined' then the arguments bins is required. ---------- ---------------------------------------------------------------------- """ #todo: allow editing if current mapset and mapset are PERMANENT #assert self.mapset is not 'PERMANENT',\ # '%r is in PERMANENT and is not writable by user' % self.name assert int(layer) <= self.dblinks.num_dblinks(), 'layer %r does not exist' % layer if cbrew: table_name = self.dblinks.by_layer(int(layer)).name sqlfilter = gtable.Table(name=table_name, connection=self._con) assert sqlfilter.exist(), 'table %r not found in mapset %r. Verify dblinks.'\ % (sqlfilter.name, self.mapset) if hasattr(s, '__len__'): dtype = s.dtype.type assert (np.issubdtype(dtype, np.integer) or np.issubdtype(dtype, np.float)),\ 'Numpy array dtype must be integer or float' sqlfilter.filters.select(sqlfilter.key) cur = sqlfilter.execute() cat = cur.fetchall() cat = np.array(cat) cat = cat.flatten() assert s.shape == cat.shape, 'Series does not match map key shape' if column: assert column in sqlfilter.columns.names(),\ '%r has not column %r' % (table_name, column) sqlfilter.filters.select(sqlfilter.key, column) cur = sqlfilter.execute() col = cur.fetchall() col = np.array(col).T s = col[1] cat = col[0] dtype = s.dtype.type if not (np.issubdtype(dtype, np.integer) or np.issubdtype(dtype, np.float)): nulls = np.where(s == '')[0] if nulls.any: s[s == ''] = np.nan s = s.astype(float) s = pd.Series(s) s_rgb = rgb_classify(s, nk, cramp=cramp, method=method, invert_ramp=invert_ramp, **kwargs) #TODO convert this into a save_colr_file function # Write color table mapset = Mapset() mapset.current() if self.mapset is mapset.name: filename = os.path.join('$GISDBASE','$LOCATION_NAME', self.mapset, 'vector', self.name, 'colr') else: filename = os.path.join('$GISDBASE','$LOCATION_NAME', mapset.name, 'vcolr2', self.mapset, self.name) filename = gtable.get_path(filename) directory = os.path.dirname(filename) if not os.path.exists(directory): os.makedirs(directory) with open(filename,'w') as f: f.write('% ' + '%r %r' % (int(min(cat)), self.table.n_rows()) + '\n') for i,item in enumerate(s_rgb): f.write(str(cat[i]) + ':' + item + '\n') else: assert color, 'color attribute is required' if column: v.colors(map=self.full_name, layer=layer, column=column, color=color, *args) else: v.colors(map=self.full_name, color=color, *args)
class GridModule(object): # TODO maybe also i.* could be supported easily """Run GRASS raster commands in a multiprocessing mode. :param cmd: raster GRASS command, only command staring with r.* are valid. :type cmd: str :param width: width of the tile, in pixel :type width: int :param height: height of the tile, in pixel. :type height: int :param overlap: overlap between tiles, in pixel. :type overlap: int :param processes: number of threads, default value is equal to the number of processor available. :param split: if True use r.tile to split all the inputs. :type split: bool :param run_: if False only instantiate the object :type run_: bool :param args: give all the parameters to the command :param kargs: give all the parameters to the command >>> grd = GridModule('r.slope.aspect', ... width=500, height=500, overlap=2, ... processes=None, split=False, ... elevation='elevation', ... slope='slope', aspect='aspect', overwrite=True) >>> grd.run() """ def __init__(self, cmd, width=None, height=None, overlap=0, processes=None, split=False, debug=False, region=None, move=None, log=False, start_row=0, start_col=0, out_prefix='', *args, **kargs): kargs['run_'] = False self.mset = Mapset() self.module = Module(cmd, *args, **kargs) self.width = width self.height = height self.overlap = overlap self.processes = processes self.region = region if region else Region() self.start_row = start_row self.start_col = start_col self.out_prefix = out_prefix self.log = log self.move = move self.gisrc_src = os.environ['GISRC'] self.n_mset, self.gisrc_dst = None, None if self.move: self.n_mset = copy_mapset(self.mset, self.move) self.gisrc_dst = write_gisrc(self.n_mset.gisdbase, self.n_mset.location, self.n_mset.name) rasters = [r for r in select(self.module.inputs, 'raster')] if rasters: copy_rasters(rasters, self.gisrc_src, self.gisrc_dst, region=self.region) vectors = [v for v in select(self.module.inputs, 'vector')] if vectors: copy_vectors(vectors, self.gisrc_src, self.gisrc_dst) groups = [g for g in select(self.module.inputs, 'group')] if groups: copy_groups(groups, self.gisrc_src, self.gisrc_dst, region=self.region) self.bboxes = split_region_tiles(region=region, width=width, height=height, overlap=overlap) self.msetstr = cmd.replace('.', '') + "_%03d_%03d" self.inlist = None if split: self.split() self.debug = debug def __del__(self): if self.gisrc_dst: # remove GISRC file os.remove(self.gisrc_dst) def clean_location(self, location=None): """Remove all created mapsets. :param location: a Location instance where we are running the analysis :type location: Location object """ if location is None: if self.n_mset: self.n_mset.current() location = Location() mapsets = location.mapsets(self.msetstr.split('_')[0] + '_*') for mset in mapsets: Mapset(mset).delete() if self.n_mset and self.n_mset.is_current(): self.mset.current() def split(self): """Split all the raster inputs using r.tile""" rtile = Module('r.tile') inlist = {} for inm in select(self.module.inputs, 'raster'): rtile(input=inm.value, output=inm.value, width=self.width, height=self.height, overlap=self.overlap) patt = '%s-*' % inm.value inlist[inm.value] = sorted(self.mset.glist(type='rast', pattern=patt)) self.inlist = inlist def get_works(self): """Return a list of tuble with the parameters for cmd_exe function""" works = [] reg = Region() if self.move: mdst, ldst, gdst = read_gisrc(self.gisrc_dst) else: ldst, gdst = self.mset.location, self.mset.gisdbase cmd = self.module.get_dict() groups = [g for g in select(self.module.inputs, 'group')] for row, box_row in enumerate(self.bboxes): for col, box in enumerate(box_row): inms = None if self.inlist: inms = {} cols = len(box_row) for key in self.inlist: indx = row * cols + col inms[key] = "%s@%s" % (self.inlist[key][indx], self.mset.name) # set the computational region, prepare the region parameters bbox = dict([(k[0], str(v)) for k, v in box.items()[:-2]]) bbox['nsres'] = '%f' % reg.nsres bbox['ewres'] = '%f' % reg.ewres new_mset = self.msetstr % (self.start_row + row, self.start_col + col), works.append((bbox, inms, self.gisrc_src, write_gisrc(gdst, ldst, new_mset), cmd, groups)) return works def define_mapset_inputs(self): """Add the mapset information to the input maps """ for inmap in self.module.inputs: inm = self.module.inputs[inmap] if inm.type in ('raster', 'vector') and inm.value: if '@' not in inm.value: mset = get_mapset_raster(inm.value) inm.value = inm.value + '@%s' % mset def run(self, patch=True, clean=True): """Run the GRASS command :param patch: set False if you does not want to patch the results :type patch: bool :param clean: set False if you does not want to remove all the stuff created by GridModule :type clean: bool """ self.module.flags.overwrite = True self.define_mapset_inputs() if self.debug: for wrk in self.get_works(): cmd_exe(wrk) else: pool = mltp.Pool(processes=self.processes) result = pool.map_async(cmd_exe, self.get_works()) result.wait() if not result.successful(): raise RuntimeError(_("Execution of subprocesses was not successful")) if patch: if self.move: os.environ['GISRC'] = self.gisrc_dst self.n_mset.current() self.patch() os.environ['GISRC'] = self.gisrc_src self.mset.current() # copy the outputs from dst => src routputs = [self.out_prefix + o for o in select(self.module.outputs, 'raster')] copy_rasters(routputs, self.gisrc_dst, self.gisrc_src) else: self.patch() if self.log: # record in the temp directory from grass.lib.gis import G_tempfile tmp, dummy = os.path.split(G_tempfile()) tmpdir = os.path.join(tmp, self.module.name) for k in self.module.outputs: par = self.module.outputs[k] if par.typedesc == 'raster' and par.value: dirpath = os.path.join(tmpdir, par.name) if not os.path.isdir(dirpath): os.makedirs(dirpath) fil = open(os.path.join(dirpath, self.out_prefix + par.value), 'w+') fil.close() if clean: self.clean_location() self.rm_tiles() if self.n_mset: gisdbase, location = os.path.split(self.move) self.clean_location(Location(location, gisdbase)) # rm temporary gis_rc os.remove(self.gisrc_dst) self.gisrc_dst = None sht.rmtree(os.path.join(self.move, 'PERMANENT')) sht.rmtree(os.path.join(self.move, self.mset.name)) def patch(self): """Patch the final results.""" bboxes = split_region_tiles(width=self.width, height=self.height) loc = Location() mset = loc[self.mset.name] mset.visible.extend(loc.mapsets()) for otmap in self.module.outputs: otm = self.module.outputs[otmap] if otm.typedesc == 'raster' and otm.value: rpatch_map(otm.value, self.mset.name, self.msetstr, bboxes, self.module.flags.overwrite, self.start_row, self.start_col, self.out_prefix) def rm_tiles(self): """Remove all the tiles.""" # if split, remove tiles if self.inlist: grm = Module('g.remove') for key in self.inlist: grm(flags='f', type='rast', pattern=self.inlist[key])
def main(): in_vector = options["input"].split("@")[0] if len(options["input"].split("@")) > 1: in_mapset = options["input"].split("@")[1] else: in_mapset = None raster_maps = options["raster"].split( ",") # raster file(s) to extract from output = options["output"] methods = tuple(options["methods"].split(",")) percentile = (None if options["percentile"] == "" else map( float, options["percentile"].split(","))) column_prefix = tuple(options["column_prefix"].split(",")) buffers = options["buffers"].split(",") types = options["type"].split(",") layer = options["layer"] sep = options["separator"] update = flags["u"] tabulate = flags["t"] percent = flags["p"] remove = flags["r"] use_label = flags["l"] empty_buffer_warning = ( "No data in raster map {} within buffer {} around geometry {}") # Do checks using pygrass for rmap in raster_maps: r_map = RasterAbstractBase(rmap) if not r_map.exist(): grass.fatal("Could not find raster map {}.".format(rmap)) user_mask = False m_map = RasterAbstractBase("MASK", Mapset().name) if m_map.exist(): grass.warning("Current MASK is temporarily renamed.") user_mask = True unset_mask() invect = VectorTopo(in_vector) if not invect.exist(): grass.fatal("Vector file {} does not exist".format(in_vector)) if output: if output == "-": out = None else: out = open(output, "w") # Check if input map is in current mapset (and thus editable) if in_mapset and unicode(in_mapset) != unicode(Mapset()): grass.fatal( "Input vector map is not in current mapset and cannot be modified. \ Please consider copying it to current mapset.".format( output)) buffers = [] for buf in options["buffers"].split(","): try: b = float(buf) if b.is_integer(): buffers.append(int(b)) else: buffers.append(b) except: grass.fatal("") if b < 0: grass.fatal("Negative buffer distance not supported!") ### Define column types depenting on statistic, map type and ### DB backend (SQLite supports only double and not real) # int: statistic produces allways integer precision # double: statistic produces allways floating point precision # map_type: precision f statistic depends on map type int_dict = { "number": (0, "int", "n"), "number_null": (1, "int", "null_cells"), "minimum": (3, "map_type", "min"), "maximum": (4, "map_type", "max"), "range": (5, "map_type", "range"), "average": (6, "double", "mean"), "average_abs": (7, "double", "mean_of_abs"), "stddev": (8, "double", "stddev"), "variance": (9, "double", "variance"), "coeff_var": (10, "double", "coeff_var"), "sum": (11, "map_type", "sum"), "first_quartile": (12, "map_type", "first_quartile"), "median": (13, "map_type", "median"), "third_quartile": (14, "map_type", "third_quartile"), "percentile": (15, "map_type", "percentile"), } if len(raster_maps) != len(column_prefix): grass.fatal( "Number of maps and number of column prefixes has to be equal!") # Generate list of required column names and types col_names = [] valid_labels = [] col_types = [] for p in column_prefix: rmaptype, val_lab, rcats = raster_type( raster_maps[column_prefix.index(p)], tabulate, use_label) valid_labels.append(val_lab) for b in buffers: b_str = str(b).replace(".", "_") if tabulate: if rmaptype == "double precision": grass.fatal( "{} has floating point precision. Can only tabulate integer maps" .format(raster_maps[column_prefix.index(p)])) col_names.append("{}_{}_b{}".format(p, "ncats", b_str)) col_types.append("int") col_names.append("{}_{}_b{}".format(p, "mode", b_str)) col_types.append("int") col_names.append("{}_{}_b{}".format(p, "null", b_str)) col_types.append("double precision") col_names.append("{}_{}_b{}".format(p, "area_tot", b_str)) col_types.append("double precision") for rcat in rcats: if use_label and valid_labels: rcat = rcat[0].replace(" ", "_") else: rcat = rcat[1] col_names.append("{}_{}_b{}".format(p, rcat, b_str)) col_types.append("double precision") else: for m in methods: col_names.append("{}_{}_b{}".format( p, int_dict[m][2], b_str)) col_types.append(rmaptype if int_dict[m][1] == "map_type" else int_dict[m][1]) if percentile: for perc in percentile: col_names.append("{}_percentile_{}_b{}".format( p, int(perc) if (perc).is_integer() else perc, b_str)) col_types.append(rmaptype if int_dict[m][1] == "map_type" else int_dict[m][1]) # Open input vector map in_vect = VectorTopo(in_vector, layer=layer) in_vect.open(mode="r") # Get name for temporary map global TMP_MAPS TMP_MAPS.append(tmp_map) # Setup stats collectors if tabulate: # Collector for raster category statistics stats = Module("r.stats", run_=False, stdout_=PIPE) stats.inputs.sort = "desc" stats.inputs.null_value = "null" stats.flags.quiet = True stats.flags.l = True if percent: stats.flags.p = True stats.flags.n = True else: stats.flags.a = True else: # Collector for univariat statistics univar = Module("r.univar", run_=False, stdout_=PIPE) univar.inputs.separator = sep univar.flags.g = True univar.flags.quiet = True # Add extended statistics if requested if set(methods).intersection( set(["first_quartile", "median", "third_quartile"])): univar.flags.e = True if percentile is not None: univar.flags.e = True univar.inputs.percentile = percentile # Check if attribute table exists if not output: if not in_vect.table: grass.fatal( "No attribute table found for vector map {}".format(in_vect)) # Modify table as needed tab = in_vect.table tab_name = tab.name tab_cols = tab.columns # Add required columns existing_cols = list(set(tab_cols.names()).intersection(col_names)) if len(existing_cols) > 0: if not update: in_vect.close() grass.fatal( "Column(s) {} already exist! Please use the u-flag \ if you want to update values in those columns". format(",".join(existing_cols))) else: grass.warning("Column(s) {} already exist!".format( ",".join(existing_cols))) for e in existing_cols: idx = col_names.index(e) del col_names[idx] del col_types[idx] tab_cols.add(col_names, col_types) conn = tab.conn cur = conn.cursor() sql_str_start = "UPDATE {} SET ".format(tab_name) elif output == "-": print("cat{0}raster_map{0}buffer{0}statistic{0}value".format(sep)) else: out.write("cat{0}raster_map{0}buffer{0}statistic{0}value{1}".format( sep, os.linesep)) # Get computational region grass.use_temp_region() r = Region() r.read() # Adjust region extent to buffer around geometry # reg = deepcopy(r) # Create iterator for geometries of all selected types geoms = chain() geoms_n = 0 n_geom = 1 for geom_type in types: geoms_n += in_vect.number_of(geom_type) if in_vect.number_of(geom_type) > 0: geoms = chain(in_vect.viter(geom_type)) # Loop over geometries for geom in geoms: # Get cat cat = geom.cat # Add where clause to UPDATE statement sql_str_end = " WHERE cat = {};".format(cat) # Loop over ser provided buffer distances for buf in buffers: b_str = str(buf).replace(".", "_") # Buffer geometry if buf <= 0: buffer_geom = geom else: buffer_geom = geom.buffer(buf) # Create temporary vector map with buffered geometry tmp_vect = VectorTopo(tmp_map, quiet=True) tmp_vect.open(mode="w") tmp_vect.write(Boundary(points=buffer_geom[0].to_list())) # , c_cats=int(cat), set_cats=True if callable(buffer_geom[1]): tmp_vect.write(Centroid(x=buffer_geom[1]().x, y=buffer_geom[1]().y), cat=int(cat)) else: tmp_vect.write(Centroid(x=buffer_geom[1].x, y=buffer_geom[1].y), cat=int(cat)) ################################################# # How to silence VectorTopo??? ################################################# # Save current stdout # original = sys.stdout # f = open(os.devnull, 'w') # with open('output.txt', 'w') as f: # sys.stdout = io.BytesIO() # sys.stdout.fileno() = os.devnull # sys.stderr = f # os.environ.update(dict(GRASS_VERBOSE='0')) tmp_vect.close(build=False) grass.run_command("v.build", map=tmp_map, quiet=True) # os.environ.update(dict(GRASS_VERBOSE='1')) # reg = Region() # reg.read() # r.from_vect(tmp_map) r = align_current(r, buffer_geom[0].bbox()) r.write() # Check if the following is needed # needed specially with r.stats -p # grass.run_command('g.region', vector=tmp_map, flags='a') # Create a MASK from buffered geometry if user_mask: grass.run_command( "v.to.rast", input=tmp_map, output=tmp_map, use="val", value=int(cat), quiet=True, ) mc_expression = ( "MASK=if(!isnull({0}) && !isnull({0}_MASK), {1}, null())". format(tmp_map, cat)) grass.run_command("r.mapcalc", expression=mc_expression, quiet=True) else: grass.run_command( "v.to.rast", input=tmp_map, output="MASK", use="val", value=int(cat), quiet=True, ) # reg.write() updates = [] # Compute statistics for every raster map for rm, rmap in enumerate(raster_maps): # rmap = raster_maps[rm] prefix = column_prefix[rm] if tabulate: # Get statistics on occurrence of raster categories within buffer stats.inputs.input = rmap stats.run() t_stats = (stats.outputs["stdout"].value.rstrip( os.linesep).replace(" ", " ").replace( "no data", "no_data").replace( " ", "_b{} = ".format(b_str)).split(os.linesep)) if t_stats == [""]: grass.warning( empty_buffer_warning.format(rmap, buf, cat)) continue if (t_stats[0].split( "_b{} = ".format(b_str))[0].split("_")[-1] != "null"): mode = (t_stats[0].split( "_b{} = ".format(b_str))[0].split("_")[-1]) elif len(t_stats) == 1: mode = "NULL" else: mode = (t_stats[1].split( "_b{} = ".format(b_str))[0].split("_")[-1]) if not output: updates.append("\t{}_{}_b{} = {}".format( prefix, "ncats", b_str, len(t_stats))) updates.append("\t{}_{}_b{} = {}".format( prefix, "mode", b_str, mode)) area_tot = 0 for l in t_stats: # check if raster maps has category or not if len(l.split("=")) == 2: updates.append("\t{}_{}".format( prefix, l.rstrip("%"))) elif not l.startswith("null"): vals = l.split("=") updates.append("\t{}_{} = {}".format( prefix, vals[-2].strip() if valid_labels[rm] else vals[0].strip(), vals[-1].strip().rstrip("%"), )) if not l.startswith("null"): area_tot += float( l.rstrip("%").split("= ")[-1]) if not percent: updates.append("\t{}_{}_b{} = {}".format( prefix, "area_tot", b_str, area_tot)) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "ncats", len(t_stats), os.linesep) out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "mode", mode, os.linesep) area_tot = 0 for l in t_stats: rcat = (l.split("= ")[1].rstrip( "_b{} = ".format(b_str)) if valid_labels[rm] else l.split("_")[0]) area = l.split("= ")[-1] out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "area {}".format(rcat), area, os.linesep, ) if rcat != "null": area_tot = area_tot + float( l.rstrip("%").split("= ")[-1]) if not percent: out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "area total", area_tot, os.linesep, ) if output == "-": print(out_str.rstrip(os.linesep)) else: out.write(out_str) else: # Get univariate statistics within buffer univar.inputs.map = rmap univar.run() u_stats = (univar.outputs["stdout"].value.rstrip( os.linesep).replace( "=", "_b{} = ".format(b_str)).split(os.linesep)) # Test if u_stats is empty and give warning # Needs to be adjusted to number of requested stats? if ((percentile and len(u_stats) < 14) or (univar.flags.e and len(u_stats) < 13) or len(u_stats) < 12): grass.warning( empty_buffer_warning.format(rmap, buf, cat)) break # Extract statistics for selected methods for m in methods: if not output: # Add to list of UPDATE statements updates.append("\t{}_{}".format( prefix, u_stats[int_dict[m][0]] if is_number( u_stats[int_dict[m][0]].split(" = ")[1]) else " = ".join([ u_stats[int_dict[m][0]].split(" = ")[0], "NULL", ]), )) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}".format( sep, cat, prefix, buf, m, u_stats[int_dict[m][0]].split("= ")[1], ) if output == "-": print(out_str) else: out.write("{}{}".format(out_str, os.linesep)) if percentile: perc_count = 0 for perc in percentile: if not output: updates.append( "{}_percentile_{}_b{} = {}".format( p, int(perc) if (perc).is_integer() else perc, b_str, u_stats[15 + perc_count].split("= ")[1], )) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}".format( sep, cat, prefix, buf, "percentile_{}".format( int(perc) if ( perc).is_integer() else perc), u_stats[15 + perc_count].split("= ")[1], ) if output == "-": print(out_str) else: out.write(out_str) perc_count = perc_count + 1 if not output and len(updates) > 0: cur.execute("{}{}{}".format(sql_str_start, ",\n".join(updates), sql_str_end)) # Remove temporary maps # , stderr=os.devnull, stdout_=os.devnull) grass.run_command("g.remove", flags="f", type="raster", name="MASK", quiet=True) grass.run_command("g.remove", flags="f", type="vector", name=tmp_map, quiet=True) # Give progress information grass.percent(n_geom, geoms_n, 1) n_geom = n_geom + 1 if not output: conn.commit() # Close cursor and DB connection if not output and not output == "-": cur.close() conn.close() # Update history grass.vector.vector_history(in_vector) elif output != "-": # write results to file out.close() if remove and not output: dropcols = [] selectnum = "select count({}) from {}" for i in col_names: thisrow = grass.read_command("db.select", flags="c", sql=selectnum.format(i, in_vector)) if int(thisrow) == 0: dropcols.append(i) grass.debug("Columns to delete: {}".format(", ".join(dropcols)), debug=2) if dropcols: grass.run_command("v.db.dropcolumn", map=in_vector, columns=dropcols)
def unset_mask(): """Deactivate user mask""" if RasterRow("MASK", Mapset().name).exist(): grass.run_command("g.rename", quiet=True, raster="MASK,{}_MASK".format(tmp_map))
def export_png_in_projection(src_mapset_name, map_name, output_file, epsg_code, routpng_flags, compression, wgs84_file, use_region=True): """ :param use_region: use computation region and not map extent """ if use_region: src_region = get_region() src_proj_string = get_location_proj_string() # TODO: change only location and not gisdbase? # we rely on the tmp dir having enough space for our map tgt_gisdbase = tempfile.mkdtemp() # this is not needed if we use mkdtemp but why not tgt_location = 'r.out.png.proj_location_%s' % epsg_code # because we are using PERMANENT we don't have to create mapset explicitly tgt_mapset_name = 'PERMANENT' src_mapset = Mapset(src_mapset_name) # get source (old) and set target (new) GISRC enviromental variable # TODO: set environ only for child processes could be enough and it would # enable (?) parallel runs src_gisrc = os.environ['GISRC'] tgt_gisrc = gsetup.write_gisrc(tgt_gisdbase, tgt_location, tgt_mapset_name) os.environ['GISRC'] = tgt_gisrc if os.environ.get('WIND_OVERRIDE'): old_temp_region = os.environ['WIND_OVERRIDE'] del os.environ['WIND_OVERRIDE'] else: old_temp_region = None # these lines looks good but anyway when developing the module # switching location seemed fragile and on some errors (while running # unfinished module) location was switched in the command line try: # the function itself is not safe for other (backgroud) processes # (e.g. GUI), however we already switched GISRC for us # and child processes, so we don't influece others gcore.create_location(dbase=tgt_gisdbase, location=tgt_location, epsg=epsg_code, datum=None, datum_trans=None) # Mapset object cannot be created if the real mapset does not exists tgt_mapset = Mapset(gisdbase=tgt_gisdbase, location=tgt_location, mapset=tgt_mapset_name) # set the current mapset in the library # we actually don't need to switch when only calling modules # (right GISRC is enough for them) tgt_mapset.current() # setting region if use_region: # respecting computation region of the src location # by previous use g.region in src location # and m.proj and g.region now # respecting MASK of the src location would be hard # null values in map are usually enough tgt_proj_string = get_location_proj_string() tgt_region = reproject_region(src_region, from_proj=src_proj_string, to_proj=tgt_proj_string) # uses g.region thus and sets region only for child processes # which is enough now set_region(tgt_region) else: # find out map extent to import everything # using only classic API because of some problems with pygrass # on ms windows rproj_out = gcore.read_command('r.proj', input=map_name, dbase=src_mapset.gisdbase, location=src_mapset.location, mapset=src_mapset.name, output=map_name, flags='g') a = gcore.parse_key_val(rproj_out, sep='=', vsep=' ') gcore.run_command('g.region', **a) # map import gcore.run_command('r.proj', input=map_name, dbase=src_mapset.gisdbase, location=src_mapset.location, mapset=src_mapset.name, output=map_name) # actual export gcore.run_command('r.out.png', input=map_name, output=output_file, compression=compression, flags=routpng_flags) # outputting file with WGS84 coordinates if wgs84_file: gcore.message("Projecting coordinates to LL WGS 84...") with open(wgs84_file, 'w') as data_file: if use_region: # map which is smaller than region is imported in its own # small extent, but we export image in region, so we need # bounds to be for region, not map # hopefully this is consistent with r.out.png behavior data_file.write( map_extent_to_file_content( proj_to_wgs84(get_region())) + '\n') else: # use map to get extent # the result is actually the same as using map # if region is the same as map (use_region == False) data_file.write( map_extent_to_file_content( get_map_extent_for_location(map_name)) + '\n') finally: # juts in case we need to do something in the old location # our callers probably do os.environ['GISRC'] = src_gisrc if old_temp_region: os.environ['WIND_OVERRIDE'] = old_temp_region # set current in library src_mapset.current() # delete the whole gisdbase # delete file by file to ensure that we are deleting only our things # exception will be raised when removing non-empty directory tgt_location_path = Location(gisdbase=tgt_gisdbase, location=tgt_location).path() tgt_mapset.delete() os.rmdir(tgt_location_path) # dir created by tempfile.mkdtemp() needs to be romved manually os.rmdir(tgt_gisdbase) # we have to remove file created by tempfile.mkstemp function # in write_gisrc function os.remove(tgt_gisrc)